mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
Compare commits
98 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
3890e12c9e | ||
![]() |
80cff16250 | ||
![]() |
75d9026be5 | ||
![]() |
6854d13b88 | ||
![]() |
29072654db | ||
![]() |
ec6f09958a | ||
![]() |
c2957746f4 | ||
![]() |
9e415869b8 | ||
![]() |
7e8a1076c1 | ||
![]() |
b97fb45e06 | ||
![]() |
61094d387a | ||
![]() |
90ed4cc115 | ||
![]() |
4506ad0c97 | ||
![]() |
539054c34d | ||
![]() |
cf12c201d8 | ||
![]() |
3b0297d14d | ||
![]() |
58deb14028 | ||
![]() |
5c4d2e87a1 | ||
![]() |
6d31776c25 | ||
![]() |
dda730dfa2 | ||
![]() |
08431106da | ||
![]() |
b4b1c2013f | ||
![]() |
5280cb2f1c | ||
![]() |
b2ab0c0830 | ||
![]() |
6a985c9fb0 | ||
![]() |
db1d04f622 | ||
![]() |
2491eb0316 | ||
![]() |
06f9d41a96 | ||
![]() |
8d5c385c76 | ||
![]() |
153b972dfd | ||
![]() |
4ac80061ca | ||
![]() |
01b825f7c6 | ||
![]() |
157da4c7a7 | ||
![]() |
8ef208e418 | ||
![]() |
65ea013270 | ||
![]() |
ef1a1deb9c | ||
![]() |
c93e11f343 | ||
![]() |
e3d0a30443 | ||
![]() |
4fdb4983dd | ||
![]() |
3521da3ad6 | ||
![]() |
ca60911e19 | ||
![]() |
71ec878780 | ||
![]() |
6dd9daf074 | ||
![]() |
3b33d20849 | ||
![]() |
542c557404 | ||
![]() |
211cbfe660 | ||
![]() |
359231a4d8 | ||
![]() |
d23764b59e | ||
![]() |
1dc31bb360 | ||
![]() |
6fa33777db | ||
![]() |
2fee6f959b | ||
![]() |
3d0a408a2b | ||
![]() |
3e846c8756 | ||
![]() |
79932aad41 | ||
![]() |
2879dc63c3 | ||
![]() |
b92eabf4ae | ||
![]() |
1660a22548 | ||
![]() |
a3b3b0850d | ||
![]() |
562cef1f22 | ||
![]() |
e676e1a484 | ||
![]() |
37f7872565 | ||
![]() |
5c07c899ec | ||
![]() |
054740dd14 | ||
![]() |
78a3b3ef7b | ||
![]() |
f3b4284afb | ||
![]() |
fb2e2bd37a | ||
![]() |
a0c1a398f4 | ||
![]() |
d4862e99da | ||
![]() |
34ed674869 | ||
![]() |
8c7cd80245 | ||
![]() |
68bc4c02cf | ||
![]() |
f4266f0142 | ||
![]() |
d3e8dc6dbf | ||
![]() |
5462d99255 | ||
![]() |
2d60471f36 | ||
![]() |
62e01fe03a | ||
![]() |
5c6e2188e2 | ||
![]() |
ab55440a05 | ||
![]() |
aae9f53e79 | ||
![]() |
a77ffbf4a5 | ||
![]() |
402008950c | ||
![]() |
f7e6e9cb0d | ||
![]() |
2b55882cce | ||
![]() |
0d0c42c6d9 | ||
![]() |
b76918a217 | ||
![]() |
f7974568c9 | ||
![]() |
d13c117e80 | ||
![]() |
8932199513 | ||
![]() |
69d54864a8 | ||
![]() |
baeccfa3e4 | ||
![]() |
e99682a277 | ||
![]() |
4dc00556c1 | ||
![]() |
3dc6b6b3ca | ||
![]() |
c947690aeb | ||
![]() |
1af3c58d40 | ||
![]() |
46bcf4efa6 | ||
![]() |
60750f8e4c | ||
![]() |
661f78557f |
141 changed files with 5277 additions and 4786 deletions
2
.version
2
.version
|
@ -1 +1 @@
|
||||||
25.07.5
|
25.09.2
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
nodeLinker: node-modules
|
nodeLinker: node-modules
|
||||||
|
enableScripts: false
|
||||||
|
|
|
@ -49,6 +49,7 @@ Sander Santema <github.com/sandersantema/>
|
||||||
Thomas Brownback <https://github.com/brownbat/>
|
Thomas Brownback <https://github.com/brownbat/>
|
||||||
Andrew Gaul <andrew@gaul.org>
|
Andrew Gaul <andrew@gaul.org>
|
||||||
kenden
|
kenden
|
||||||
|
Emil Hamrin <github.com/e-hamrin>
|
||||||
Nickolay Yudin <kelciour@gmail.com>
|
Nickolay Yudin <kelciour@gmail.com>
|
||||||
neitrinoweb <github.com/neitrinoweb/>
|
neitrinoweb <github.com/neitrinoweb/>
|
||||||
Andreas Reis <github.com/nwwt>
|
Andreas Reis <github.com/nwwt>
|
||||||
|
@ -236,6 +237,12 @@ Marvin Kopf <marvinkopf@outlook.com>
|
||||||
Kevin Nakamura <grinkers@grinkers.net>
|
Kevin Nakamura <grinkers@grinkers.net>
|
||||||
Bradley Szoke <bradleyszoke@gmail.com>
|
Bradley Szoke <bradleyszoke@gmail.com>
|
||||||
jcznk <https://github.com/jcznk>
|
jcznk <https://github.com/jcznk>
|
||||||
|
Thomas Rixen <thomas.rixen@student.uclouvain.be>
|
||||||
|
Siyuan Mattuwu Yan <syan4@ualberta.ca>
|
||||||
|
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
|
||||||
|
memchr <memchr@proton.me>
|
||||||
|
Max Romanowski <maxr777@proton.me>
|
||||||
|
Aldlss <ayaldlss@gmail.com>
|
||||||
|
|
||||||
********************
|
********************
|
||||||
|
|
||||||
|
|
1114
Cargo.lock
generated
1114
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -33,9 +33,8 @@ git = "https://github.com/ankitects/linkcheck.git"
|
||||||
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
|
||||||
|
|
||||||
[workspace.dependencies.fsrs]
|
[workspace.dependencies.fsrs]
|
||||||
version = "4.1.1"
|
version = "5.1.0"
|
||||||
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
|
||||||
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
|
|
||||||
# path = "../open-spaced-repetition/fsrs-rs"
|
# path = "../open-spaced-repetition/fsrs-rs"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
|
@ -110,6 +109,7 @@ prost-types = "0.13"
|
||||||
pulldown-cmark = "0.13.0"
|
pulldown-cmark = "0.13.0"
|
||||||
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
|
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
|
||||||
rand = "0.9.1"
|
rand = "0.9.1"
|
||||||
|
rayon = "1.10.0"
|
||||||
regex = "1.11.1"
|
regex = "1.11.1"
|
||||||
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
|
||||||
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
|
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
|
||||||
|
@ -133,7 +133,7 @@ tokio-util = { version = "0.7.15", features = ["io"] }
|
||||||
tower-http = { version = "0.6.6", features = ["trace"] }
|
tower-http = { version = "0.6.6", features = ["trace"] }
|
||||||
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
|
||||||
tracing-appender = "0.2.3"
|
tracing-appender = "0.2.3"
|
||||||
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
|
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] }
|
||||||
unic-langid = { version = "0.9.6", features = ["macros"] }
|
unic-langid = { version = "0.9.6", features = ["macros"] }
|
||||||
unic-ucd-category = "0.9.0"
|
unic-ucd-category = "0.9.0"
|
||||||
unicode-normalization = "0.1.24"
|
unicode-normalization = "0.1.24"
|
||||||
|
|
|
@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
||||||
|
|
||||||
pub fn check_rust(build: &mut Build) -> Result<()> {
|
pub fn check_rust(build: &mut Build) -> Result<()> {
|
||||||
let inputs = inputs![
|
let inputs = inputs![
|
||||||
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**}"),
|
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**,tools/minilints/**}"),
|
||||||
"Cargo.lock",
|
"Cargo.lock",
|
||||||
"Cargo.toml",
|
"Cargo.toml",
|
||||||
"rust-toolchain.toml",
|
"rust-toolchain.toml",
|
||||||
|
|
|
@ -49,6 +49,46 @@ pub trait BuildAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
std::any::type_name::<Self>().split("::").last().unwrap()
|
std::any::type_name::<Self>()
|
||||||
|
.split("::")
|
||||||
|
.last()
|
||||||
|
.unwrap()
|
||||||
|
.split('<')
|
||||||
|
.next()
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
trait TestBuildAction {}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl<T: TestBuildAction + ?Sized> BuildAction for T {
|
||||||
|
fn command(&self) -> &str {
|
||||||
|
"test"
|
||||||
|
}
|
||||||
|
fn files(&mut self, _build: &mut impl FilesHandle) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code, unused_variables)]
|
||||||
|
#[test]
|
||||||
|
fn should_strip_regions_in_type_name() {
|
||||||
|
struct Bare;
|
||||||
|
impl TestBuildAction for Bare {}
|
||||||
|
assert_eq!(Bare {}.name(), "Bare");
|
||||||
|
|
||||||
|
struct WithLifeTime<'a>(&'a str);
|
||||||
|
impl TestBuildAction for WithLifeTime<'_> {}
|
||||||
|
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
|
||||||
|
|
||||||
|
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
|
||||||
|
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
|
||||||
|
assert_eq!(
|
||||||
|
WithMultiLifeTime("test", "test").name(),
|
||||||
|
"WithMultiLifeTime"
|
||||||
|
);
|
||||||
|
|
||||||
|
struct WithGeneric<T>(T);
|
||||||
|
impl<T> TestBuildAction for WithGeneric<T> {}
|
||||||
|
assert_eq!(WithGeneric(3).name(), "WithGeneric");
|
||||||
|
}
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl Platform {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Append .exe to path if on Windows.
|
/// Append .exe to path if on Windows.
|
||||||
pub fn with_exe(path: &str) -> Cow<str> {
|
pub fn with_exe(path: &str) -> Cow<'_, str> {
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
format!("{path}.exe").into()
|
format!("{path}.exe").into()
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -98,7 +98,7 @@ impl BuildAction for YarnInstall<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_cmd_ext(bin: &str) -> Cow<str> {
|
fn with_cmd_ext(bin: &str) -> Cow<'_, str> {
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
format!("{bin}.cmd").into()
|
format!("{bin}.cmd").into()
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -28,7 +28,11 @@ pub fn setup_yarn(args: YarnArgs) {
|
||||||
.arg("--ignore-scripts"),
|
.arg("--ignore-scripts"),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
run_command(Command::new(&args.yarn_bin).arg("install"));
|
run_command(
|
||||||
|
Command::new(&args.yarn_bin)
|
||||||
|
.arg("install")
|
||||||
|
.arg("--immutable"),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::fs::write(args.stamp, b"").unwrap();
|
std::fs::write(args.stamp, b"").unwrap();
|
||||||
|
|
5958
cargo/licenses.json
5958
cargo/licenses.json
File diff suppressed because it is too large
Load diff
|
@ -1,35 +1,78 @@
|
||||||
# This Dockerfile uses three stages.
|
# This is a user-contributed Dockerfile. No official support is available.
|
||||||
# 1. Compile anki (and dependencies) and build python wheels.
|
|
||||||
# 2. Create a virtual environment containing anki and its dependencies.
|
|
||||||
# 3. Create a final image that only includes anki's virtual environment and required
|
|
||||||
# system packages.
|
|
||||||
|
|
||||||
ARG PYTHON_VERSION="3.9"
|
|
||||||
ARG DEBIAN_FRONTEND="noninteractive"
|
ARG DEBIAN_FRONTEND="noninteractive"
|
||||||
|
|
||||||
# Build anki.
|
FROM ubuntu:24.04 AS build
|
||||||
FROM python:$PYTHON_VERSION AS build
|
|
||||||
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
|
|
||||||
> /usr/local/bin/bazel \
|
|
||||||
&& chmod +x /usr/local/bin/bazel \
|
|
||||||
# Bazel expects /usr/bin/python
|
|
||||||
&& ln -s /usr/local/bin/python /usr/bin/python
|
|
||||||
WORKDIR /opt/anki
|
WORKDIR /opt/anki
|
||||||
|
ENV PYTHON_VERSION="3.13"
|
||||||
|
|
||||||
|
|
||||||
|
# System deps
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
build-essential \
|
||||||
|
pkg-config \
|
||||||
|
libssl-dev \
|
||||||
|
libbz2-dev \
|
||||||
|
libreadline-dev \
|
||||||
|
libsqlite3-dev \
|
||||||
|
libffi-dev \
|
||||||
|
zlib1g-dev \
|
||||||
|
liblzma-dev \
|
||||||
|
ca-certificates \
|
||||||
|
ninja-build \
|
||||||
|
rsync \
|
||||||
|
libglib2.0-0 \
|
||||||
|
libgl1 \
|
||||||
|
libx11-6 \
|
||||||
|
libxext6 \
|
||||||
|
libxrender1 \
|
||||||
|
libxkbcommon0 \
|
||||||
|
libxkbcommon-x11-0 \
|
||||||
|
libxcb1 \
|
||||||
|
libxcb-render0 \
|
||||||
|
libxcb-shm0 \
|
||||||
|
libxcb-icccm4 \
|
||||||
|
libxcb-image0 \
|
||||||
|
libxcb-keysyms1 \
|
||||||
|
libxcb-randr0 \
|
||||||
|
libxcb-shape0 \
|
||||||
|
libxcb-xfixes0 \
|
||||||
|
libxcb-xinerama0 \
|
||||||
|
libxcb-xinput0 \
|
||||||
|
libsm6 \
|
||||||
|
libice6 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# install rust with rustup
|
||||||
|
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||||
|
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||||
|
|
||||||
|
# Install uv and Python 3.13 with uv
|
||||||
|
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
|
||||||
|
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
|
||||||
|
ENV PATH="/root/.local/bin:${PATH}"
|
||||||
|
|
||||||
|
RUN uv python install ${PYTHON_VERSION} --default
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
# Build python wheels.
|
|
||||||
RUN ./tools/build
|
RUN ./tools/build
|
||||||
|
|
||||||
|
|
||||||
# Install pre-compiled Anki.
|
# Install pre-compiled Anki.
|
||||||
FROM python:${PYTHON_VERSION}-slim as installer
|
FROM python:3.13-slim AS installer
|
||||||
WORKDIR /opt/anki/
|
WORKDIR /opt/anki/
|
||||||
COPY --from=build /opt/anki/wheels/ wheels/
|
COPY --from=build /opt/anki/out/wheels/ wheels/
|
||||||
# Use virtual environment.
|
# Use virtual environment.
|
||||||
RUN python -m venv venv \
|
RUN python -m venv venv \
|
||||||
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
|
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
|
||||||
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
|
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
|
||||||
|
|
||||||
|
|
||||||
# We use another build stage here so we don't include the wheels in the final image.
|
# We use another build stage here so we don't include the wheels in the final image.
|
||||||
FROM python:${PYTHON_VERSION}-slim as final
|
FROM python:3.13-slim AS final
|
||||||
COPY --from=installer /opt/anki/venv /opt/anki/venv
|
COPY --from=installer /opt/anki/venv /opt/anki/venv
|
||||||
ENV PATH=/opt/anki/venv/bin:$PATH
|
ENV PATH=/opt/anki/venv/bin:$PATH
|
||||||
# Install run-time dependencies.
|
# Install run-time dependencies.
|
||||||
|
@ -59,9 +102,9 @@ RUN apt-get update \
|
||||||
libxrender1 \
|
libxrender1 \
|
||||||
libxtst6 \
|
libxtst6 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Add non-root user.
|
# Add non-root user.
|
||||||
RUN useradd --create-home anki
|
RUN useradd --create-home anki
|
||||||
USER anki
|
USER anki
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
|
||||||
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 939298f7c461407951988f362b1a08b451336a1e
|
Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba
|
|
@ -5,6 +5,11 @@ database-check-card-properties =
|
||||||
[one] Fixed { $count } invalid card property.
|
[one] Fixed { $count } invalid card property.
|
||||||
*[other] Fixed { $count } invalid card properties.
|
*[other] Fixed { $count } invalid card properties.
|
||||||
}
|
}
|
||||||
|
database-check-card-last-review-time-empty =
|
||||||
|
{ $count ->
|
||||||
|
[one] Added last review time to { $count } card.
|
||||||
|
*[other] Added last review time to { $count } cards.
|
||||||
|
}
|
||||||
database-check-missing-templates =
|
database-check-missing-templates =
|
||||||
{ $count ->
|
{ $count ->
|
||||||
[one] Deleted { $count } card with missing template.
|
[one] Deleted { $count } card with missing template.
|
||||||
|
|
|
@ -384,8 +384,6 @@ deck-config-which-deck = Which deck would you like to display options for?
|
||||||
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
|
||||||
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
|
||||||
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
deck-config-not-enough-history = Insufficient review history to perform this operation.
|
||||||
deck-config-unable-to-determine-desired-retention =
|
|
||||||
Unable to determine a minimum recommended retention.
|
|
||||||
deck-config-must-have-400-reviews =
|
deck-config-must-have-400-reviews =
|
||||||
{ $count ->
|
{ $count ->
|
||||||
[one] Only { $count } review was found.
|
[one] Only { $count } review was found.
|
||||||
|
@ -394,7 +392,6 @@ deck-config-must-have-400-reviews =
|
||||||
# Numbers that control how aggressively the FSRS algorithm schedules cards
|
# Numbers that control how aggressively the FSRS algorithm schedules cards
|
||||||
deck-config-weights = FSRS parameters
|
deck-config-weights = FSRS parameters
|
||||||
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
deck-config-compute-optimal-weights = Optimize FSRS parameters
|
||||||
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
|
||||||
deck-config-optimize-button = Optimize Current Preset
|
deck-config-optimize-button = Optimize Current Preset
|
||||||
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
|
||||||
deck-config-slow-suffix = { $text } (slow)
|
deck-config-slow-suffix = { $text } (slow)
|
||||||
|
@ -407,7 +404,6 @@ deck-config-historical-retention = Historical retention
|
||||||
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
|
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
|
||||||
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
|
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
|
||||||
deck-config-get-params = Get Params
|
deck-config-get-params = Get Params
|
||||||
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
|
||||||
deck-config-complete = { $num }% complete.
|
deck-config-complete = { $num }% complete.
|
||||||
deck-config-iterations = Iteration: { $count }...
|
deck-config-iterations = Iteration: { $count }...
|
||||||
deck-config-reschedule-cards-on-change = Reschedule cards on change
|
deck-config-reschedule-cards-on-change = Reschedule cards on change
|
||||||
|
@ -468,12 +464,7 @@ deck-config-compute-optimal-weights-tooltip2 =
|
||||||
By default, parameters will be calculated from the review history of all decks using the current preset. You can
|
By default, parameters will be calculated from the review history of all decks using the current preset. You can
|
||||||
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
|
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
|
||||||
optimizing the parameters.
|
optimizing the parameters.
|
||||||
deck-config-compute-optimal-retention-tooltip4 =
|
|
||||||
This tool will attempt to find the desired retention value
|
|
||||||
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
|
||||||
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
|
||||||
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
|
||||||
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
|
||||||
deck-config-please-save-your-changes-first = Please save your changes first.
|
deck-config-please-save-your-changes-first = Please save your changes first.
|
||||||
deck-config-workload-factor-change = Approximate workload: {$factor}x
|
deck-config-workload-factor-change = Approximate workload: {$factor}x
|
||||||
(compared to {$previousDR}% desired retention)
|
(compared to {$previousDR}% desired retention)
|
||||||
|
@ -505,7 +496,10 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
|
||||||
# Description of the y axis in the FSRS simulation
|
# Description of the y axis in the FSRS simulation
|
||||||
# diagram (Deck options -> FSRS) showing the total number of
|
# diagram (Deck options -> FSRS) showing the total number of
|
||||||
# cards that can be recalled or retrieved on a specific date.
|
# cards that can be recalled or retrieved on a specific date.
|
||||||
deck-config-fsrs-simulator-experimental = FSRS simulator (experimental)
|
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
|
||||||
|
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
|
||||||
|
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
|
||||||
|
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
|
||||||
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
|
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
|
||||||
deck-config-simulate = Simulate
|
deck-config-simulate = Simulate
|
||||||
deck-config-clear-last-simulate = Clear Last Simulation
|
deck-config-clear-last-simulate = Clear Last Simulation
|
||||||
|
@ -519,6 +513,9 @@ deck-config-save-options-to-preset-confirm = Overwrite the options in your curre
|
||||||
# to show the total number of cards that can be recalled or retrieved on a
|
# to show the total number of cards that can be recalled or retrieved on a
|
||||||
# specific date.
|
# specific date.
|
||||||
deck-config-fsrs-simulator-radio-memorized = Memorized
|
deck-config-fsrs-simulator-radio-memorized = Memorized
|
||||||
|
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
|
||||||
|
# $time here is pre-formatted e.g. "10 Seconds"
|
||||||
|
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
|
||||||
|
|
||||||
## Messages related to the FSRS scheduler’s health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
|
## Messages related to the FSRS scheduler’s health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
|
||||||
|
|
||||||
|
@ -528,7 +525,7 @@ deck-config-health-check = Check health when optimizing
|
||||||
deck-config-fsrs-bad-fit-warning = Health Check:
|
deck-config-fsrs-bad-fit-warning = Health Check:
|
||||||
Your memory is difficult for FSRS to predict. Recommendations:
|
Your memory is difficult for FSRS to predict. Recommendations:
|
||||||
|
|
||||||
- Suspend or reformulate leeches.
|
- Suspend or reformulate any cards you constantly forget.
|
||||||
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
|
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
|
||||||
- Understand before you memorize.
|
- Understand before you memorize.
|
||||||
|
|
||||||
|
@ -539,6 +536,17 @@ deck-config-fsrs-good-fit = Health Check:
|
||||||
|
|
||||||
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
|
||||||
|
|
||||||
|
deck-config-unable-to-determine-desired-retention =
|
||||||
|
Unable to determine a minimum recommended retention.
|
||||||
|
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
|
||||||
|
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
|
||||||
|
deck-config-compute-optimal-retention-tooltip4 =
|
||||||
|
This tool will attempt to find the desired retention value
|
||||||
|
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
|
||||||
|
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if you’re
|
||||||
|
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
|
||||||
|
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
|
||||||
|
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
|
||||||
deck-config-a-100-day-interval =
|
deck-config-a-100-day-interval =
|
||||||
{ $days ->
|
{ $days ->
|
||||||
[one] A 100 day interval will become { $days } day.
|
[one] A 100 day interval will become { $days } day.
|
||||||
|
|
|
@ -46,6 +46,20 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
|
||||||
studying-unbury = Unbury
|
studying-unbury = Unbury
|
||||||
studying-what-would-you-like-to-unbury = What would you like to unbury?
|
studying-what-would-you-like-to-unbury = What would you like to unbury?
|
||||||
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
|
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
|
||||||
|
studying-card-studied-in-minute =
|
||||||
|
{ $cards ->
|
||||||
|
[one] { $cards } card
|
||||||
|
*[other] { $cards } cards
|
||||||
|
} studied in
|
||||||
|
{ $minutes ->
|
||||||
|
[one] { $minutes } minute.
|
||||||
|
*[other] { $minutes } minutes.
|
||||||
|
}
|
||||||
|
studying-question-time-elapsed = Question time elapsed
|
||||||
|
studying-answer-time-elapsed = Answer time elapsed
|
||||||
|
|
||||||
|
## OBSOLETE; you do not need to translate this
|
||||||
|
|
||||||
studying-card-studied-in =
|
studying-card-studied-in =
|
||||||
{ $count ->
|
{ $count ->
|
||||||
[one] { $count } card studied in
|
[one] { $count } card studied in
|
||||||
|
@ -56,5 +70,3 @@ studying-minute =
|
||||||
[one] { $count } minute.
|
[one] { $count } minute.
|
||||||
*[other] { $count } minutes.
|
*[other] { $count } minutes.
|
||||||
}
|
}
|
||||||
studying-question-time-elapsed = Question time elapsed
|
|
||||||
studying-answer-time-elapsed = Answer time elapsed
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit bc2da83c77749d96f3df8144f00c87d68dd2187a
|
Subproject commit fd5f984785ad07a0d3dbd893ee3d7e3671eaebd6
|
|
@ -82,6 +82,7 @@
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"canvas": "npm:empty-npm-package@1.0.0",
|
"canvas": "npm:empty-npm-package@1.0.0",
|
||||||
"cookie": "0.7.0",
|
"cookie": "0.7.0",
|
||||||
|
"devalue": "^5.3.2",
|
||||||
"vite": "6"
|
"vite": "6"
|
||||||
},
|
},
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
|
|
|
@ -40,12 +40,10 @@ message DeckConfigId {
|
||||||
message GetRetentionWorkloadRequest {
|
message GetRetentionWorkloadRequest {
|
||||||
repeated float w = 1;
|
repeated float w = 1;
|
||||||
string search = 2;
|
string search = 2;
|
||||||
float before = 3;
|
|
||||||
float after = 4;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetRetentionWorkloadResponse {
|
message GetRetentionWorkloadResponse {
|
||||||
float factor = 1;
|
map<uint32, float> costs = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetIgnoredBeforeCountRequest {
|
message GetIgnoredBeforeCountRequest {
|
||||||
|
@ -219,6 +217,8 @@ message DeckConfigsForUpdate {
|
||||||
bool review_today_active = 5;
|
bool review_today_active = 5;
|
||||||
// Whether new_today applies to today or a past day.
|
// Whether new_today applies to today or a past day.
|
||||||
bool new_today_active = 6;
|
bool new_today_active = 6;
|
||||||
|
// Deck-specific desired retention override
|
||||||
|
optional float desired_retention = 7;
|
||||||
}
|
}
|
||||||
string name = 1;
|
string name = 1;
|
||||||
int64 config_id = 2;
|
int64 config_id = 2;
|
||||||
|
|
|
@ -83,6 +83,8 @@ message Deck {
|
||||||
optional uint32 new_limit = 7;
|
optional uint32 new_limit = 7;
|
||||||
DayLimit review_limit_today = 8;
|
DayLimit review_limit_today = 8;
|
||||||
DayLimit new_limit_today = 9;
|
DayLimit new_limit_today = 9;
|
||||||
|
// Deck-specific desired retention override
|
||||||
|
optional float desired_retention = 10;
|
||||||
|
|
||||||
reserved 12 to 15;
|
reserved 12 to 15;
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,9 @@ service FrontendService {
|
||||||
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
|
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
|
||||||
// Warns python that the deck option web view is ready to receive requests.
|
// Warns python that the deck option web view is ready to receive requests.
|
||||||
rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
|
rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
|
||||||
|
|
||||||
|
// Save colour picker's custom colour palette
|
||||||
|
rpc SaveCustomColours(generic.Empty) returns (generic.Empty);
|
||||||
}
|
}
|
||||||
|
|
||||||
service BackendFrontendService {}
|
service BackendFrontendService {}
|
||||||
|
|
|
@ -59,7 +59,7 @@ message AddNoteRequest {
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddNoteResponse {
|
message AddNoteResponse {
|
||||||
collection.OpChanges changes = 1;
|
collection.OpChangesWithCount changes = 1;
|
||||||
int64 note_id = 2;
|
int64 note_id = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -55,6 +55,8 @@ service SchedulerService {
|
||||||
returns (ComputeOptimalRetentionResponse);
|
returns (ComputeOptimalRetentionResponse);
|
||||||
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
|
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
|
||||||
returns (SimulateFsrsReviewResponse);
|
returns (SimulateFsrsReviewResponse);
|
||||||
|
rpc SimulateFsrsWorkload(SimulateFsrsReviewRequest)
|
||||||
|
returns (SimulateFsrsWorkloadResponse);
|
||||||
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
|
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
|
||||||
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
|
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
|
||||||
returns (EvaluateParamsResponse);
|
returns (EvaluateParamsResponse);
|
||||||
|
@ -405,6 +407,8 @@ message SimulateFsrsReviewRequest {
|
||||||
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
|
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
|
||||||
optional uint32 suspend_after_lapse_count = 12;
|
optional uint32 suspend_after_lapse_count = 12;
|
||||||
float historical_retention = 13;
|
float historical_retention = 13;
|
||||||
|
uint32 learning_step_count = 14;
|
||||||
|
uint32 relearning_step_count = 15;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SimulateFsrsReviewResponse {
|
message SimulateFsrsReviewResponse {
|
||||||
|
@ -414,6 +418,12 @@ message SimulateFsrsReviewResponse {
|
||||||
repeated float daily_time_cost = 4;
|
repeated float daily_time_cost = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message SimulateFsrsWorkloadResponse {
|
||||||
|
map<uint32, float> cost = 1;
|
||||||
|
map<uint32, float> memorized = 2;
|
||||||
|
map<uint32, uint32> review_count = 3;
|
||||||
|
}
|
||||||
|
|
||||||
message ComputeOptimalRetentionResponse {
|
message ComputeOptimalRetentionResponse {
|
||||||
float optimal_retention = 1;
|
float optimal_retention = 1;
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,10 +74,15 @@ message SearchNode {
|
||||||
repeated SearchNode nodes = 1;
|
repeated SearchNode nodes = 1;
|
||||||
Joiner joiner = 2;
|
Joiner joiner = 2;
|
||||||
}
|
}
|
||||||
|
enum FieldSearchMode {
|
||||||
|
FIELD_SEARCH_MODE_NORMAL = 0;
|
||||||
|
FIELD_SEARCH_MODE_REGEX = 1;
|
||||||
|
FIELD_SEARCH_MODE_NOCOMBINING = 2;
|
||||||
|
}
|
||||||
message Field {
|
message Field {
|
||||||
string field_name = 1;
|
string field_name = 1;
|
||||||
string text = 2;
|
string text = 2;
|
||||||
bool is_re = 3;
|
FieldSearchMode mode = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
oneof filter {
|
oneof filter {
|
||||||
|
|
|
@ -246,7 +246,7 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
|
||||||
return BackendError(err.message, help_page, context, backtrace)
|
return BackendError(err.message, help_page, context, backtrace)
|
||||||
|
|
||||||
elif val == kind.SEARCH_ERROR:
|
elif val == kind.SEARCH_ERROR:
|
||||||
return SearchError(markdown(err.message), help_page, context, backtrace)
|
return SearchError(err.message, help_page, context, backtrace)
|
||||||
|
|
||||||
elif val == kind.UNDO_EMPTY:
|
elif val == kind.UNDO_EMPTY:
|
||||||
return UndoEmpty(err.message, help_page, context, backtrace)
|
return UndoEmpty(err.message, help_page, context, backtrace)
|
||||||
|
|
|
@ -528,7 +528,7 @@ class Collection(DeprecatedNamesMixin):
|
||||||
def new_note(self, notetype: NotetypeDict) -> Note:
|
def new_note(self, notetype: NotetypeDict) -> Note:
|
||||||
return Note(self, notetype)
|
return Note(self, notetype)
|
||||||
|
|
||||||
def add_note(self, note: Note, deck_id: DeckId) -> OpChanges:
|
def add_note(self, note: Note, deck_id: DeckId) -> OpChangesWithCount:
|
||||||
hooks.note_will_be_added(self, note, deck_id)
|
hooks.note_will_be_added(self, note, deck_id)
|
||||||
out = self._backend.add_note(note=note._to_backend_note(), deck_id=deck_id)
|
out = self._backend.add_note(note=note._to_backend_note(), deck_id=deck_id)
|
||||||
note.id = NoteId(out.note_id)
|
note.id = NoteId(out.note_id)
|
||||||
|
|
|
@ -175,8 +175,8 @@ class MnemoFact:
|
||||||
def fact_view(self) -> type[MnemoFactView]:
|
def fact_view(self) -> type[MnemoFactView]:
|
||||||
try:
|
try:
|
||||||
fact_view = self.cards[0].fact_view_id
|
fact_view = self.cards[0].fact_view_id
|
||||||
except IndexError as err:
|
except IndexError:
|
||||||
raise Exception(f"Fact {id} has no cards") from err
|
return FrontOnly
|
||||||
|
|
||||||
if fact_view.startswith("1.") or fact_view.startswith("1::"):
|
if fact_view.startswith("1.") or fact_view.startswith("1::"):
|
||||||
return FrontOnly
|
return FrontOnly
|
||||||
|
@ -187,7 +187,7 @@ class MnemoFact:
|
||||||
elif fact_view.startswith("5.1"):
|
elif fact_view.startswith("5.1"):
|
||||||
return Cloze
|
return Cloze
|
||||||
|
|
||||||
raise Exception(f"Fact {id} has unknown fact view: {fact_view}")
|
raise Exception(f"Fact {self.id} has unknown fact view: {fact_view}")
|
||||||
|
|
||||||
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
|
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
|
||||||
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]
|
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]
|
||||||
|
|
|
@ -73,7 +73,7 @@ langs = sorted(
|
||||||
("ଓଡ଼ିଆ", "or_OR"),
|
("ଓଡ଼ିଆ", "or_OR"),
|
||||||
("Filipino", "tl"),
|
("Filipino", "tl"),
|
||||||
("ئۇيغۇر", "ug"),
|
("ئۇيغۇر", "ug"),
|
||||||
("Oʻzbek", "uz_UZ"),
|
("Oʻzbekcha", "uz_UZ"),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ from collections.abc import Callable
|
||||||
import aqt.editor
|
import aqt.editor
|
||||||
import aqt.forms
|
import aqt.forms
|
||||||
from anki._legacy import deprecated
|
from anki._legacy import deprecated
|
||||||
from anki.collection import OpChanges, SearchNode
|
from anki.collection import OpChanges, OpChangesWithCount, SearchNode
|
||||||
from anki.decks import DeckId
|
from anki.decks import DeckId
|
||||||
from anki.models import NotetypeId
|
from anki.models import NotetypeId
|
||||||
from anki.notes import Note, NoteFieldsCheckResult, NoteId
|
from anki.notes import Note, NoteFieldsCheckResult, NoteId
|
||||||
|
@ -294,13 +294,13 @@ class AddCards(QMainWindow):
|
||||||
|
|
||||||
target_deck_id = self.deck_chooser.selected_deck_id
|
target_deck_id = self.deck_chooser.selected_deck_id
|
||||||
|
|
||||||
def on_success(changes: OpChanges) -> None:
|
def on_success(changes: OpChangesWithCount) -> None:
|
||||||
# only used for detecting changed sticky fields on close
|
# only used for detecting changed sticky fields on close
|
||||||
self._last_added_note = note
|
self._last_added_note = note
|
||||||
|
|
||||||
self.addHistory(note)
|
self.addHistory(note)
|
||||||
|
|
||||||
tooltip(tr.adding_added(), period=500)
|
tooltip(tr.importing_cards_added(count=changes.count), period=500)
|
||||||
av_player.stop_and_clear_queue()
|
av_player.stop_and_clear_queue()
|
||||||
self._load_new_note(sticky_fields_from=note)
|
self._load_new_note(sticky_fields_from=note)
|
||||||
gui_hooks.add_cards_did_add_note(note)
|
gui_hooks.add_cards_did_add_note(note)
|
||||||
|
|
|
@ -10,6 +10,8 @@ import re
|
||||||
from collections.abc import Callable, Sequence
|
from collections.abc import Callable, Sequence
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from markdown import markdown
|
||||||
|
|
||||||
import aqt
|
import aqt
|
||||||
import aqt.browser
|
import aqt.browser
|
||||||
import aqt.editor
|
import aqt.editor
|
||||||
|
@ -20,7 +22,7 @@ from anki.cards import Card, CardId
|
||||||
from anki.collection import Collection, Config, OpChanges, SearchNode
|
from anki.collection import Collection, Config, OpChanges, SearchNode
|
||||||
from anki.consts import *
|
from anki.consts import *
|
||||||
from anki.decks import DeckId
|
from anki.decks import DeckId
|
||||||
from anki.errors import NotFoundError
|
from anki.errors import NotFoundError, SearchError
|
||||||
from anki.lang import without_unicode_isolation
|
from anki.lang import without_unicode_isolation
|
||||||
from anki.models import NotetypeId
|
from anki.models import NotetypeId
|
||||||
from anki.notes import NoteId
|
from anki.notes import NoteId
|
||||||
|
@ -498,6 +500,8 @@ class Browser(QMainWindow):
|
||||||
text = self.current_search()
|
text = self.current_search()
|
||||||
try:
|
try:
|
||||||
normed = self.col.build_search_string(text)
|
normed = self.col.build_search_string(text)
|
||||||
|
except SearchError as err:
|
||||||
|
showWarning(markdown(str(err)))
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
showWarning(str(err))
|
showWarning(str(err))
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -13,7 +13,7 @@ import aqt.browser
|
||||||
from anki.cards import Card
|
from anki.cards import Card
|
||||||
from anki.collection import Config
|
from anki.collection import Config
|
||||||
from anki.tags import MARKED_TAG
|
from anki.tags import MARKED_TAG
|
||||||
from aqt import AnkiQt, gui_hooks
|
from aqt import AnkiQt, gui_hooks, is_mac
|
||||||
from aqt.qt import (
|
from aqt.qt import (
|
||||||
QCheckBox,
|
QCheckBox,
|
||||||
QDialog,
|
QDialog,
|
||||||
|
@ -81,10 +81,15 @@ class Previewer(QDialog):
|
||||||
qconnect(self.finished, self._on_finished)
|
qconnect(self.finished, self._on_finished)
|
||||||
self.silentlyClose = True
|
self.silentlyClose = True
|
||||||
self.vbox = QVBoxLayout()
|
self.vbox = QVBoxLayout()
|
||||||
|
spacing = 6
|
||||||
self.vbox.setContentsMargins(0, 0, 0, 0)
|
self.vbox.setContentsMargins(0, 0, 0, 0)
|
||||||
|
self.vbox.setSpacing(spacing)
|
||||||
self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER)
|
self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER)
|
||||||
self.vbox.addWidget(self._web)
|
self.vbox.addWidget(self._web)
|
||||||
self.bbox = QDialogButtonBox()
|
self.bbox = QDialogButtonBox()
|
||||||
|
self.bbox.setContentsMargins(
|
||||||
|
spacing, spacing if is_mac else 0, spacing, spacing
|
||||||
|
)
|
||||||
self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight)
|
self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight)
|
||||||
|
|
||||||
gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER)
|
gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER)
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 727 B |
27
qt/aqt/data/qt/icons/media-record.svg
Normal file
27
qt/aqt/data/qt/icons/media-record.svg
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg width="21" height="21" viewBox="0 0 21 21" version="1.1"
|
||||||
|
xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<g id="Layer-1" transform="translate(0.5,0.5)">
|
||||||
|
<rect x="0" y="0" width="20" height="20" fill="none"/>
|
||||||
|
<g transform="translate(14.8974,6.3648)">
|
||||||
|
<path d="M0,0C0,3.403 -2.042,6.161 -4.56,6.161C-7.078,6.161 -9.12,3.403 -9.12,0C-9.12,-3.403 -7.078,-6.161 -4.56,-6.161C-2.042,-6.161 0,-3.403 0,0"
|
||||||
|
fill="black" fill-rule="nonzero"/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(0,-1,-1,0,10.3374,1.8048)">
|
||||||
|
<ellipse cx="-4.56" cy="0" rx="6.161" ry="4.56"
|
||||||
|
fill="none" stroke="black" stroke-width="0.25"/>
|
||||||
|
</g>
|
||||||
|
<g transform="translate(3.1987,14.4958)">
|
||||||
|
<path d="M0,-9.484C-0.76,-4.212 3.287,0 7.12,-0.046C10.864,-0.09 14.742,-4.199 14.076,-9.343"
|
||||||
|
fill="none" stroke="black" stroke-width="2" fill-rule="nonzero"/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(-1,0,0,1,20.573,18.613)">
|
||||||
|
<rect x="5.387" y="0.601" width="9.799" height="0.185"
|
||||||
|
fill="none" stroke="black" stroke-width="2"/>
|
||||||
|
</g>
|
||||||
|
<g transform="matrix(-1,0,0,1,20.741,13.51)">
|
||||||
|
<rect x="9.899" y="1.163" width="0.943" height="4.164"
|
||||||
|
fill="none" stroke="black" stroke-width="2"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
|
@ -151,6 +151,7 @@ class Editor:
|
||||||
self.add_webview()
|
self.add_webview()
|
||||||
self.setupWeb()
|
self.setupWeb()
|
||||||
self.setupShortcuts()
|
self.setupShortcuts()
|
||||||
|
self.setupColourPalette()
|
||||||
gui_hooks.editor_did_init(self)
|
gui_hooks.editor_did_init(self)
|
||||||
|
|
||||||
# Initial setup
|
# Initial setup
|
||||||
|
@ -349,6 +350,14 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too
|
||||||
keys, fn, _ = row
|
keys, fn, _ = row
|
||||||
QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore
|
QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore
|
||||||
|
|
||||||
|
def setupColourPalette(self) -> None:
|
||||||
|
if not (colors := self.mw.col.get_config("customColorPickerPalette")):
|
||||||
|
return
|
||||||
|
for i, colour in enumerate(colors[: QColorDialog.customCount()]):
|
||||||
|
if not QColor.isValidColorName(colour):
|
||||||
|
continue
|
||||||
|
QColorDialog.setCustomColor(i, QColor.fromString(colour))
|
||||||
|
|
||||||
def _addFocusCheck(self, fn: Callable) -> Callable:
|
def _addFocusCheck(self, fn: Callable) -> Callable:
|
||||||
def checkFocus() -> None:
|
def checkFocus() -> None:
|
||||||
if self.currentField is None:
|
if self.currentField is None:
|
||||||
|
|
|
@ -483,7 +483,7 @@ def update_deck_configs() -> bytes:
|
||||||
update.abort = True
|
update.abort = True
|
||||||
|
|
||||||
def on_success(changes: OpChanges) -> None:
|
def on_success(changes: OpChanges) -> None:
|
||||||
if isinstance(window := aqt.mw.app.activeWindow(), DeckOptionsDialog):
|
if isinstance(window := aqt.mw.app.activeModalWidget(), DeckOptionsDialog):
|
||||||
window.reject()
|
window.reject()
|
||||||
|
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
|
@ -511,7 +511,7 @@ def set_scheduling_states() -> bytes:
|
||||||
|
|
||||||
def import_done() -> bytes:
|
def import_done() -> bytes:
|
||||||
def update_window_modality() -> None:
|
def update_window_modality() -> None:
|
||||||
if window := aqt.mw.app.activeWindow():
|
if window := aqt.mw.app.activeModalWidget():
|
||||||
from aqt.import_export.import_dialog import ImportDialog
|
from aqt.import_export.import_dialog import ImportDialog
|
||||||
|
|
||||||
if isinstance(window, ImportDialog):
|
if isinstance(window, ImportDialog):
|
||||||
|
@ -529,7 +529,7 @@ def import_request(endpoint: str) -> bytes:
|
||||||
response.ParseFromString(output)
|
response.ParseFromString(output)
|
||||||
|
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
window = aqt.mw.app.activeWindow()
|
window = aqt.mw.app.activeModalWidget()
|
||||||
on_op_finished(aqt.mw, response, window)
|
on_op_finished(aqt.mw, response, window)
|
||||||
|
|
||||||
aqt.mw.taskman.run_on_main(handle_on_main)
|
aqt.mw.taskman.run_on_main(handle_on_main)
|
||||||
|
@ -569,7 +569,7 @@ def change_notetype() -> bytes:
|
||||||
data = request.data
|
data = request.data
|
||||||
|
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
window = aqt.mw.app.activeWindow()
|
window = aqt.mw.app.activeModalWidget()
|
||||||
if isinstance(window, ChangeNotetypeDialog):
|
if isinstance(window, ChangeNotetypeDialog):
|
||||||
window.save(data)
|
window.save(data)
|
||||||
|
|
||||||
|
@ -579,7 +579,7 @@ def change_notetype() -> bytes:
|
||||||
|
|
||||||
def deck_options_require_close() -> bytes:
|
def deck_options_require_close() -> bytes:
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
window = aqt.mw.app.activeWindow()
|
window = aqt.mw.app.activeModalWidget()
|
||||||
if isinstance(window, DeckOptionsDialog):
|
if isinstance(window, DeckOptionsDialog):
|
||||||
window.require_close()
|
window.require_close()
|
||||||
|
|
||||||
|
@ -591,7 +591,7 @@ def deck_options_require_close() -> bytes:
|
||||||
|
|
||||||
def deck_options_ready() -> bytes:
|
def deck_options_ready() -> bytes:
|
||||||
def handle_on_main() -> None:
|
def handle_on_main() -> None:
|
||||||
window = aqt.mw.app.activeWindow()
|
window = aqt.mw.app.activeModalWidget()
|
||||||
if isinstance(window, DeckOptionsDialog):
|
if isinstance(window, DeckOptionsDialog):
|
||||||
window.set_ready()
|
window.set_ready()
|
||||||
|
|
||||||
|
@ -599,6 +599,15 @@ def deck_options_ready() -> bytes:
|
||||||
return b""
|
return b""
|
||||||
|
|
||||||
|
|
||||||
|
def save_custom_colours() -> bytes:
|
||||||
|
colors = [
|
||||||
|
QColorDialog.customColor(i).name(QColor.NameFormat.HexArgb)
|
||||||
|
for i in range(QColorDialog.customCount())
|
||||||
|
]
|
||||||
|
aqt.mw.col.set_config("customColorPickerPalette", colors)
|
||||||
|
return b""
|
||||||
|
|
||||||
|
|
||||||
post_handler_list = [
|
post_handler_list = [
|
||||||
congrats_info,
|
congrats_info,
|
||||||
get_deck_configs_for_update,
|
get_deck_configs_for_update,
|
||||||
|
@ -614,6 +623,7 @@ post_handler_list = [
|
||||||
search_in_browser,
|
search_in_browser,
|
||||||
deck_options_require_close,
|
deck_options_require_close,
|
||||||
deck_options_ready,
|
deck_options_ready,
|
||||||
|
save_custom_colours,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -654,6 +664,7 @@ exposed_backend_list = [
|
||||||
"evaluate_params_legacy",
|
"evaluate_params_legacy",
|
||||||
"get_optimal_retention_parameters",
|
"get_optimal_retention_parameters",
|
||||||
"simulate_fsrs_review",
|
"simulate_fsrs_review",
|
||||||
|
"simulate_fsrs_workload",
|
||||||
# DeckConfigService
|
# DeckConfigService
|
||||||
"get_ignored_before_count",
|
"get_ignored_before_count",
|
||||||
"get_retention_workload",
|
"get_retention_workload",
|
||||||
|
|
|
@ -18,7 +18,7 @@ def add_note(
|
||||||
parent: QWidget,
|
parent: QWidget,
|
||||||
note: Note,
|
note: Note,
|
||||||
target_deck_id: DeckId,
|
target_deck_id: DeckId,
|
||||||
) -> CollectionOp[OpChanges]:
|
) -> CollectionOp[OpChangesWithCount]:
|
||||||
return CollectionOp(parent, lambda col: col.add_note(note, target_deck_id))
|
return CollectionOp(parent, lambda col: col.add_note(note, target_deck_id))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -147,6 +147,7 @@ def update_and_restart() -> None:
|
||||||
|
|
||||||
with contextlib.suppress(ResourceWarning):
|
with contextlib.suppress(ResourceWarning):
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
|
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
|
||||||
# fixes a bug where launcher fails to appear if opening it
|
# fixes a bug where launcher fails to appear if opening it
|
||||||
# straight after updating
|
# straight after updating
|
||||||
if "GNOME_TERMINAL_SCREEN" in env:
|
if "GNOME_TERMINAL_SCREEN" in env:
|
||||||
|
@ -156,12 +157,15 @@ def update_and_restart() -> None:
|
||||||
creationflags = (
|
creationflags = (
|
||||||
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
||||||
)
|
)
|
||||||
|
# On Windows 10, changing the handles breaks ANSI display
|
||||||
|
io = None if sys.platform == "win32" else subprocess.DEVNULL
|
||||||
|
|
||||||
subprocess.Popen(
|
subprocess.Popen(
|
||||||
[launcher],
|
[launcher],
|
||||||
start_new_session=True,
|
start_new_session=True,
|
||||||
stdin=subprocess.DEVNULL,
|
stdin=io,
|
||||||
stdout=subprocess.DEVNULL,
|
stdout=io,
|
||||||
stderr=subprocess.DEVNULL,
|
stderr=io,
|
||||||
env=env,
|
env=env,
|
||||||
creationflags=creationflags,
|
creationflags=creationflags,
|
||||||
)
|
)
|
||||||
|
|
|
@ -17,6 +17,7 @@ import aqt.browser
|
||||||
import aqt.operations
|
import aqt.operations
|
||||||
from anki.cards import Card, CardId
|
from anki.cards import Card, CardId
|
||||||
from anki.collection import Config, OpChanges, OpChangesWithCount
|
from anki.collection import Config, OpChanges, OpChangesWithCount
|
||||||
|
from anki.lang import with_collapsed_whitespace
|
||||||
from anki.scheduler.base import ScheduleCardsAsNew
|
from anki.scheduler.base import ScheduleCardsAsNew
|
||||||
from anki.scheduler.v3 import (
|
from anki.scheduler.v3 import (
|
||||||
CardAnswer,
|
CardAnswer,
|
||||||
|
@ -966,11 +967,15 @@ timerStopped = false;
|
||||||
elapsed = self.mw.col.timeboxReached()
|
elapsed = self.mw.col.timeboxReached()
|
||||||
if elapsed:
|
if elapsed:
|
||||||
assert not isinstance(elapsed, bool)
|
assert not isinstance(elapsed, bool)
|
||||||
part1 = tr.studying_card_studied_in(count=elapsed[1])
|
cards_val = elapsed[1]
|
||||||
mins = int(round(elapsed[0] / 60))
|
minutes_val = int(round(elapsed[0] / 60))
|
||||||
part2 = tr.studying_minute(count=mins)
|
message = with_collapsed_whitespace(
|
||||||
|
tr.studying_card_studied_in_minute(
|
||||||
|
cards=cards_val, minutes=str(minutes_val)
|
||||||
|
)
|
||||||
|
)
|
||||||
fin = tr.studying_finish()
|
fin = tr.studying_finish()
|
||||||
diag = askUserDialog(f"{part1} {part2}", [tr.studying_continue(), fin])
|
diag = askUserDialog(message, [tr.studying_continue(), fin])
|
||||||
diag.setIcon(QMessageBox.Icon.Information)
|
diag.setIcon(QMessageBox.Icon.Information)
|
||||||
if diag.run() == fin:
|
if diag.run() == fin:
|
||||||
self.mw.moveToState("deckBrowser")
|
self.mw.moveToState("deckBrowser")
|
||||||
|
|
|
@ -32,6 +32,7 @@ from aqt._macos_helper import macos_helper
|
||||||
from aqt.mpv import MPV, MPVBase, MPVCommandError
|
from aqt.mpv import MPV, MPVBase, MPVCommandError
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
from aqt.taskman import TaskManager
|
from aqt.taskman import TaskManager
|
||||||
|
from aqt.theme import theme_manager
|
||||||
from aqt.utils import (
|
from aqt.utils import (
|
||||||
disable_help_button,
|
disable_help_button,
|
||||||
restoreGeom,
|
restoreGeom,
|
||||||
|
@ -630,18 +631,44 @@ class QtAudioInputRecorder(Recorder):
|
||||||
self.mw = mw
|
self.mw = mw
|
||||||
self._parent = parent
|
self._parent = parent
|
||||||
|
|
||||||
from PyQt6.QtMultimedia import QAudioFormat, QAudioSource # type: ignore
|
from PyQt6.QtMultimedia import QAudioSource, QMediaDevices # type: ignore
|
||||||
|
|
||||||
format = QAudioFormat()
|
# Get the default audio input device
|
||||||
format.setChannelCount(2)
|
device = QMediaDevices.defaultAudioInput()
|
||||||
format.setSampleRate(44100)
|
|
||||||
format.setSampleFormat(QAudioFormat.SampleFormat.Int16)
|
|
||||||
|
|
||||||
source = QAudioSource(format, parent)
|
# Try to use Int16 format first (avoids conversion)
|
||||||
|
preferred_format = device.preferredFormat()
|
||||||
|
int16_format = preferred_format
|
||||||
|
int16_format.setSampleFormat(preferred_format.SampleFormat.Int16)
|
||||||
|
|
||||||
|
if device.isFormatSupported(int16_format):
|
||||||
|
# Use Int16 if supported
|
||||||
|
format = int16_format
|
||||||
|
else:
|
||||||
|
# Fall back to device's preferred format
|
||||||
|
format = preferred_format
|
||||||
|
|
||||||
|
# Create the audio source with the chosen format
|
||||||
|
source = QAudioSource(device, format, parent)
|
||||||
|
|
||||||
|
# Store the actual format being used
|
||||||
self._format = source.format()
|
self._format = source.format()
|
||||||
self._audio_input = source
|
self._audio_input = source
|
||||||
|
|
||||||
|
def _convert_float_to_int16(self, float_buffer: bytearray) -> bytes:
|
||||||
|
"""Convert float32 audio samples to int16 format for WAV output."""
|
||||||
|
import struct
|
||||||
|
|
||||||
|
float_count = len(float_buffer) // 4 # 4 bytes per float32
|
||||||
|
floats = struct.unpack(f"{float_count}f", float_buffer)
|
||||||
|
|
||||||
|
# Convert to int16 range, clipping and scaling in one step
|
||||||
|
int16_samples = [
|
||||||
|
max(-32768, min(32767, int(max(-1.0, min(1.0, f)) * 32767))) for f in floats
|
||||||
|
]
|
||||||
|
|
||||||
|
return struct.pack(f"{len(int16_samples)}h", *int16_samples)
|
||||||
|
|
||||||
def start(self, on_done: Callable[[], None]) -> None:
|
def start(self, on_done: Callable[[], None]) -> None:
|
||||||
self._iodevice = self._audio_input.start()
|
self._iodevice = self._audio_input.start()
|
||||||
self._buffer = bytearray()
|
self._buffer = bytearray()
|
||||||
|
@ -664,18 +691,32 @@ class QtAudioInputRecorder(Recorder):
|
||||||
return
|
return
|
||||||
|
|
||||||
def write_file() -> None:
|
def write_file() -> None:
|
||||||
# swallow the first 300ms to allow audio device to quiesce
|
from PyQt6.QtMultimedia import QAudioFormat
|
||||||
wait = int(44100 * self.STARTUP_DELAY)
|
|
||||||
if len(self._buffer) <= wait:
|
|
||||||
return
|
|
||||||
self._buffer = self._buffer[wait:]
|
|
||||||
|
|
||||||
# write out the wave file
|
# swallow the first 300ms to allow audio device to quiesce
|
||||||
|
bytes_per_frame = self._format.bytesPerFrame()
|
||||||
|
frames_to_skip = int(self._format.sampleRate() * self.STARTUP_DELAY)
|
||||||
|
bytes_to_skip = frames_to_skip * bytes_per_frame
|
||||||
|
|
||||||
|
if len(self._buffer) <= bytes_to_skip:
|
||||||
|
return
|
||||||
|
self._buffer = self._buffer[bytes_to_skip:]
|
||||||
|
|
||||||
|
# Check if we need to convert float samples to int16
|
||||||
|
if self._format.sampleFormat() == QAudioFormat.SampleFormat.Float:
|
||||||
|
audio_data = self._convert_float_to_int16(self._buffer)
|
||||||
|
sample_width = 2 # int16 is 2 bytes
|
||||||
|
else:
|
||||||
|
# For integer formats, use the data as-is
|
||||||
|
audio_data = bytes(self._buffer)
|
||||||
|
sample_width = self._format.bytesPerSample()
|
||||||
|
|
||||||
|
# write out the wave file with the correct format parameters
|
||||||
wf = wave.open(self.output_path, "wb")
|
wf = wave.open(self.output_path, "wb")
|
||||||
wf.setnchannels(self._format.channelCount())
|
wf.setnchannels(self._format.channelCount())
|
||||||
wf.setsampwidth(2)
|
wf.setsampwidth(sample_width)
|
||||||
wf.setframerate(self._format.sampleRate())
|
wf.setframerate(self._format.sampleRate())
|
||||||
wf.writeframes(self._buffer)
|
wf.writeframes(audio_data)
|
||||||
wf.close()
|
wf.close()
|
||||||
|
|
||||||
def and_then(fut: Future) -> None:
|
def and_then(fut: Future) -> None:
|
||||||
|
@ -743,7 +784,8 @@ class RecordDialog(QDialog):
|
||||||
def _setup_dialog(self) -> None:
|
def _setup_dialog(self) -> None:
|
||||||
self.setWindowTitle("Anki")
|
self.setWindowTitle("Anki")
|
||||||
icon = QLabel()
|
icon = QLabel()
|
||||||
icon.setPixmap(QPixmap("icons:media-record.png"))
|
qicon = theme_manager.icon_from_resources("icons:media-record.svg")
|
||||||
|
icon.setPixmap(qicon.pixmap(60, 60))
|
||||||
self.label = QLabel("...")
|
self.label = QLabel("...")
|
||||||
hbox = QHBoxLayout()
|
hbox = QHBoxLayout()
|
||||||
hbox.addWidget(icon)
|
hbox.addWidget(icon)
|
||||||
|
|
|
@ -180,7 +180,7 @@ class CustomStyles:
|
||||||
QPushButton {{
|
QPushButton {{
|
||||||
margin: 1px;
|
margin: 1px;
|
||||||
}}
|
}}
|
||||||
QPushButton:focus {{
|
QPushButton:focus, QPushButton:default:hover {{
|
||||||
border: 2px solid {tm.var(colors.BORDER_FOCUS)};
|
border: 2px solid {tm.var(colors.BORDER_FOCUS)};
|
||||||
outline: none;
|
outline: none;
|
||||||
margin: 0px;
|
margin: 0px;
|
||||||
|
@ -199,9 +199,6 @@ class CustomStyles:
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
}}
|
}}
|
||||||
QPushButton:default:hover {{
|
|
||||||
border-width: 2px;
|
|
||||||
}}
|
|
||||||
QPushButton:pressed,
|
QPushButton:pressed,
|
||||||
QPushButton:checked,
|
QPushButton:checked,
|
||||||
QSpinBox::up-button:pressed,
|
QSpinBox::up-button:pressed,
|
||||||
|
|
|
@ -73,7 +73,7 @@ def handle_sync_error(mw: aqt.main.AnkiQt, err: Exception) -> None:
|
||||||
elif isinstance(err, Interrupted):
|
elif isinstance(err, Interrupted):
|
||||||
# no message to show
|
# no message to show
|
||||||
return
|
return
|
||||||
show_warning(str(err))
|
show_warning(str(err), parent=mw)
|
||||||
|
|
||||||
|
|
||||||
def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None:
|
def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None:
|
||||||
|
@ -118,7 +118,7 @@ def sync_collection(mw: aqt.main.AnkiQt, on_done: Callable[[], None]) -> None:
|
||||||
if out.new_endpoint:
|
if out.new_endpoint:
|
||||||
mw.pm.set_current_sync_url(out.new_endpoint)
|
mw.pm.set_current_sync_url(out.new_endpoint)
|
||||||
if out.server_message:
|
if out.server_message:
|
||||||
showText(out.server_message)
|
showText(out.server_message, parent=mw)
|
||||||
if out.required == out.NO_CHANGES:
|
if out.required == out.NO_CHANGES:
|
||||||
tooltip(parent=mw, msg=tr.sync_collection_complete())
|
tooltip(parent=mw, msg=tr.sync_collection_complete())
|
||||||
# all done; track media progress
|
# all done; track media progress
|
||||||
|
|
|
@ -115,7 +115,7 @@ class ThemeManager:
|
||||||
# Workaround for Qt bug. First attempt was percent-escaping the chars,
|
# Workaround for Qt bug. First attempt was percent-escaping the chars,
|
||||||
# but Qt can't handle that.
|
# but Qt can't handle that.
|
||||||
# https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11
|
# https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11
|
||||||
path = re.sub(r"([\u00A1-\u00FF])", r"\\\1", path)
|
path = re.sub(r"(['\u00A1-\u00FF])", r"\\\1", path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def icon_from_resources(self, path: str | ColoredIcon) -> QIcon:
|
def icon_from_resources(self, path: str | ColoredIcon) -> QIcon:
|
||||||
|
|
|
@ -226,29 +226,45 @@ def ask_user_dialog(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def show_info(text: str, callback: Callable | None = None, **kwargs: Any) -> MessageBox:
|
def show_info(
|
||||||
|
text: str,
|
||||||
|
callback: Callable | None = None,
|
||||||
|
parent: QWidget | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> MessageBox:
|
||||||
"Show a small info window with an OK button."
|
"Show a small info window with an OK button."
|
||||||
if "icon" not in kwargs:
|
if "icon" not in kwargs:
|
||||||
kwargs["icon"] = QMessageBox.Icon.Information
|
kwargs["icon"] = QMessageBox.Icon.Information
|
||||||
return MessageBox(
|
return MessageBox(
|
||||||
text,
|
text,
|
||||||
callback=(lambda _: callback()) if callback is not None else None,
|
callback=(lambda _: callback()) if callback is not None else None,
|
||||||
|
parent=parent,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def show_warning(
|
def show_warning(
|
||||||
text: str, callback: Callable | None = None, **kwargs: Any
|
text: str,
|
||||||
|
callback: Callable | None = None,
|
||||||
|
parent: QWidget | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
) -> MessageBox:
|
) -> MessageBox:
|
||||||
"Show a small warning window with an OK button."
|
"Show a small warning window with an OK button."
|
||||||
return show_info(text, icon=QMessageBox.Icon.Warning, callback=callback, **kwargs)
|
return show_info(
|
||||||
|
text, icon=QMessageBox.Icon.Warning, callback=callback, parent=parent, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def show_critical(
|
def show_critical(
|
||||||
text: str, callback: Callable | None = None, **kwargs: Any
|
text: str,
|
||||||
|
callback: Callable | None = None,
|
||||||
|
parent: QWidget | None = None,
|
||||||
|
**kwargs: Any,
|
||||||
) -> MessageBox:
|
) -> MessageBox:
|
||||||
"Show a small critical error window with an OK button."
|
"Show a small critical error window with an OK button."
|
||||||
return show_info(text, icon=QMessageBox.Icon.Critical, callback=callback, **kwargs)
|
return show_info(
|
||||||
|
text, icon=QMessageBox.Icon.Critical, callback=callback, parent=parent, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def showWarning(
|
def showWarning(
|
||||||
|
|
|
@ -90,17 +90,21 @@ def update_and_restart() -> None:
|
||||||
|
|
||||||
with contextlib.suppress(ResourceWarning):
|
with contextlib.suppress(ResourceWarning):
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
|
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
|
||||||
creationflags = 0
|
creationflags = 0
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
creationflags = (
|
creationflags = (
|
||||||
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
||||||
)
|
)
|
||||||
|
# On Windows, changing the handles breaks ANSI display
|
||||||
|
io = None if sys.platform == "win32" else subprocess.DEVNULL
|
||||||
|
|
||||||
subprocess.Popen(
|
subprocess.Popen(
|
||||||
[launcher],
|
[launcher],
|
||||||
start_new_session=True,
|
start_new_session=True,
|
||||||
stdin=subprocess.DEVNULL,
|
stdin=io,
|
||||||
stdout=subprocess.DEVNULL,
|
stdout=io,
|
||||||
stderr=subprocess.DEVNULL,
|
stderr=io,
|
||||||
env=env,
|
env=env,
|
||||||
creationflags=creationflags,
|
creationflags=creationflags,
|
||||||
)
|
)
|
||||||
|
|
|
@ -30,6 +30,12 @@ lipo -create \
|
||||||
-output "$APP_LAUNCHER/Contents/MacOS/launcher"
|
-output "$APP_LAUNCHER/Contents/MacOS/launcher"
|
||||||
cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/"
|
cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/"
|
||||||
|
|
||||||
|
# Build install_name_tool stub
|
||||||
|
clang -arch arm64 -o "$OUTPUT_DIR/stub_arm64" stub.c
|
||||||
|
clang -arch x86_64 -o "$OUTPUT_DIR/stub_x86_64" stub.c
|
||||||
|
lipo -create "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64" -output "$APP_LAUNCHER/Contents/MacOS/install_name_tool"
|
||||||
|
rm "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64"
|
||||||
|
|
||||||
# Copy support files
|
# Copy support files
|
||||||
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
|
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
|
||||||
sed "s/ANKI_VERSION/$ANKI_VERSION/g" Info.plist > "$APP_LAUNCHER/Contents/Info.plist"
|
sed "s/ANKI_VERSION/$ANKI_VERSION/g" Info.plist > "$APP_LAUNCHER/Contents/Info.plist"
|
||||||
|
@ -40,7 +46,7 @@ cp ../versions.py "$APP_LAUNCHER/Contents/Resources/"
|
||||||
|
|
||||||
# Codesign/bundle
|
# Codesign/bundle
|
||||||
if [ -z "$NODMG" ]; then
|
if [ -z "$NODMG" ]; then
|
||||||
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
|
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/install_name_tool" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
|
||||||
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
|
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
|
||||||
--entitlements entitlements.python.xml \
|
--entitlements entitlements.python.xml \
|
||||||
"$i"
|
"$i"
|
||||||
|
|
6
qt/launcher/mac/stub.c
Normal file
6
qt/launcher/mac/stub.c
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
int main(void) {
|
||||||
|
return 0;
|
||||||
|
}
|
|
@ -46,10 +46,13 @@ struct State {
|
||||||
uv_lock_path: std::path::PathBuf,
|
uv_lock_path: std::path::PathBuf,
|
||||||
sync_complete_marker: std::path::PathBuf,
|
sync_complete_marker: std::path::PathBuf,
|
||||||
launcher_trigger_file: std::path::PathBuf,
|
launcher_trigger_file: std::path::PathBuf,
|
||||||
|
mirror_path: std::path::PathBuf,
|
||||||
pyproject_modified_by_user: bool,
|
pyproject_modified_by_user: bool,
|
||||||
previous_version: Option<String>,
|
previous_version: Option<String>,
|
||||||
resources_dir: std::path::PathBuf,
|
resources_dir: std::path::PathBuf,
|
||||||
venv_folder: std::path::PathBuf,
|
venv_folder: std::path::PathBuf,
|
||||||
|
/// system Python + PyQt6 library mode
|
||||||
|
system_qt: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
@ -71,6 +74,7 @@ pub enum MainMenuChoice {
|
||||||
Version(VersionKind),
|
Version(VersionKind),
|
||||||
ToggleBetas,
|
ToggleBetas,
|
||||||
ToggleCache,
|
ToggleCache,
|
||||||
|
DownloadMirror,
|
||||||
Uninstall,
|
Uninstall,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,9 +90,13 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run() -> Result<()> {
|
fn run() -> Result<()> {
|
||||||
let uv_install_root = dirs::data_local_dir()
|
let uv_install_root = if let Ok(custom_root) = std::env::var("ANKI_LAUNCHER_VENV_ROOT") {
|
||||||
|
std::path::PathBuf::from(custom_root)
|
||||||
|
} else {
|
||||||
|
dirs::data_local_dir()
|
||||||
.context("Unable to determine data_dir")?
|
.context("Unable to determine data_dir")?
|
||||||
.join("AnkiProgramFiles");
|
.join("AnkiProgramFiles")
|
||||||
|
};
|
||||||
|
|
||||||
let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?;
|
let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?;
|
||||||
|
|
||||||
|
@ -108,8 +116,11 @@ fn run() -> Result<()> {
|
||||||
uv_lock_path: uv_install_root.join("uv.lock"),
|
uv_lock_path: uv_install_root.join("uv.lock"),
|
||||||
sync_complete_marker: uv_install_root.join(".sync_complete"),
|
sync_complete_marker: uv_install_root.join(".sync_complete"),
|
||||||
launcher_trigger_file: uv_install_root.join(".want-launcher"),
|
launcher_trigger_file: uv_install_root.join(".want-launcher"),
|
||||||
|
mirror_path: uv_install_root.join("mirror"),
|
||||||
pyproject_modified_by_user: false, // calculated later
|
pyproject_modified_by_user: false, // calculated later
|
||||||
previous_version: None,
|
previous_version: None,
|
||||||
|
system_qt: (cfg!(unix) && !cfg!(target_os = "macos"))
|
||||||
|
&& resources_dir.join("system_qt").exists(),
|
||||||
resources_dir,
|
resources_dir,
|
||||||
venv_folder: uv_install_root.join(".venv"),
|
venv_folder: uv_install_root.join(".venv"),
|
||||||
};
|
};
|
||||||
|
@ -155,12 +166,7 @@ fn run() -> Result<()> {
|
||||||
|
|
||||||
check_versions(&mut state);
|
check_versions(&mut state);
|
||||||
|
|
||||||
let first_run = !state.venv_folder.exists();
|
|
||||||
if first_run {
|
|
||||||
handle_version_install_or_update(&state, MainMenuChoice::Latest)?;
|
|
||||||
} else {
|
|
||||||
main_menu_loop(&state)?;
|
main_menu_loop(&state)?;
|
||||||
}
|
|
||||||
|
|
||||||
// Write marker file to indicate we've completed the sync process
|
// Write marker file to indicate we've completed the sync process
|
||||||
write_sync_marker(&state)?;
|
write_sync_marker(&state)?;
|
||||||
|
@ -195,8 +201,8 @@ fn extract_aqt_version(state: &State) -> Option<String> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let output = Command::new(&state.uv_path)
|
let output = uv_command(state)
|
||||||
.current_dir(&state.uv_install_root)
|
.ok()?
|
||||||
.env("VIRTUAL_ENV", &state.venv_folder)
|
.env("VIRTUAL_ENV", &state.venv_folder)
|
||||||
.args(["pip", "show", "aqt"])
|
.args(["pip", "show", "aqt"])
|
||||||
.output()
|
.output()
|
||||||
|
@ -263,24 +269,11 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let have_venv = state.venv_folder.exists();
|
|
||||||
if cfg!(target_os = "macos") && !have_developer_tools() && !have_venv {
|
|
||||||
println!("If you see a pop-up about 'install_name_tool', you can cancel it, and ignore the warning below.\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare to sync the venv
|
// Prepare to sync the venv
|
||||||
let mut command = Command::new(&state.uv_path);
|
let mut command = uv_command(state)?;
|
||||||
command.current_dir(&state.uv_install_root);
|
|
||||||
|
|
||||||
// remove UV_* environment variables to avoid interference
|
|
||||||
for (key, _) in std::env::vars() {
|
|
||||||
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
|
|
||||||
command.env_remove(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if cfg!(target_os = "macos") {
|
||||||
// remove CONDA_PREFIX/bin from PATH to avoid conda interference
|
// remove CONDA_PREFIX/bin from PATH to avoid conda interference
|
||||||
#[cfg(target_os = "macos")]
|
|
||||||
if let Ok(conda_prefix) = std::env::var("CONDA_PREFIX") {
|
if let Ok(conda_prefix) = std::env::var("CONDA_PREFIX") {
|
||||||
if let Ok(current_path) = std::env::var("PATH") {
|
if let Ok(current_path) = std::env::var("PATH") {
|
||||||
let conda_bin = format!("{conda_prefix}/bin");
|
let conda_bin = format!("{conda_prefix}/bin");
|
||||||
|
@ -292,6 +285,30 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
||||||
command.env("PATH", new_path);
|
command.env("PATH", new_path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// put our fake install_name_tool at the top of the path to override
|
||||||
|
// potential conflicts
|
||||||
|
if let Ok(current_path) = std::env::var("PATH") {
|
||||||
|
let exe_dir = std::env::current_exe()
|
||||||
|
.ok()
|
||||||
|
.and_then(|exe| exe.parent().map(|p| p.to_path_buf()));
|
||||||
|
if let Some(exe_dir) = exe_dir {
|
||||||
|
let new_path = format!("{}:{}", exe_dir.display(), current_path);
|
||||||
|
command.env("PATH", new_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create venv with system site packages if system Qt is enabled
|
||||||
|
if state.system_qt {
|
||||||
|
let mut venv_command = uv_command(state)?;
|
||||||
|
venv_command.args([
|
||||||
|
"venv",
|
||||||
|
"--no-managed-python",
|
||||||
|
"--system-site-packages",
|
||||||
|
"--no-config",
|
||||||
|
]);
|
||||||
|
venv_command.ensure_success()?;
|
||||||
|
}
|
||||||
|
|
||||||
command
|
command
|
||||||
.env("UV_CACHE_DIR", &state.uv_cache_dir)
|
.env("UV_CACHE_DIR", &state.uv_cache_dir)
|
||||||
|
@ -299,13 +316,19 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
|
||||||
.env(
|
.env(
|
||||||
"UV_HTTP_TIMEOUT",
|
"UV_HTTP_TIMEOUT",
|
||||||
std::env::var("UV_HTTP_TIMEOUT").unwrap_or_else(|_| "180".to_string()),
|
std::env::var("UV_HTTP_TIMEOUT").unwrap_or_else(|_| "180".to_string()),
|
||||||
)
|
);
|
||||||
.args(["sync", "--upgrade", "--managed-python", "--no-config"]);
|
|
||||||
|
|
||||||
// Add python version if .python-version file exists
|
command.args(["sync", "--upgrade", "--no-config"]);
|
||||||
|
if !state.system_qt {
|
||||||
|
command.arg("--managed-python");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add python version if .python-version file exists (but not for system Qt)
|
||||||
if let Some(version) = &python_version_trimmed {
|
if let Some(version) = &python_version_trimmed {
|
||||||
|
if !state.system_qt {
|
||||||
command.args(["--python", version]);
|
command.args(["--python", version]);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if state.no_cache_marker.exists() {
|
if state.no_cache_marker.exists() {
|
||||||
command.env("UV_NO_CACHE", "1");
|
command.env("UV_NO_CACHE", "1");
|
||||||
|
@ -379,6 +402,11 @@ fn main_menu_loop(state: &State) -> Result<()> {
|
||||||
println!();
|
println!();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
MainMenuChoice::DownloadMirror => {
|
||||||
|
show_mirror_submenu(state)?;
|
||||||
|
println!();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
MainMenuChoice::Uninstall => {
|
MainMenuChoice::Uninstall => {
|
||||||
if handle_uninstall(state)? {
|
if handle_uninstall(state)? {
|
||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
|
@ -443,8 +471,13 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
||||||
"6) Cache downloads: {}",
|
"6) Cache downloads: {}",
|
||||||
if cache_enabled { "on" } else { "off" }
|
if cache_enabled { "on" } else { "off" }
|
||||||
);
|
);
|
||||||
|
let mirror_enabled = is_mirror_enabled(state);
|
||||||
|
println!(
|
||||||
|
"7) Download mirror: {}",
|
||||||
|
if mirror_enabled { "on" } else { "off" }
|
||||||
|
);
|
||||||
println!();
|
println!();
|
||||||
println!("7) Uninstall");
|
println!("8) Uninstall");
|
||||||
print!("> ");
|
print!("> ");
|
||||||
let _ = stdout().flush();
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
@ -483,7 +516,8 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
|
||||||
}
|
}
|
||||||
"5" => MainMenuChoice::ToggleBetas,
|
"5" => MainMenuChoice::ToggleBetas,
|
||||||
"6" => MainMenuChoice::ToggleCache,
|
"6" => MainMenuChoice::ToggleCache,
|
||||||
"7" => MainMenuChoice::Uninstall,
|
"7" => MainMenuChoice::DownloadMirror,
|
||||||
|
"8" => MainMenuChoice::Uninstall,
|
||||||
_ => {
|
_ => {
|
||||||
println!("Invalid input. Please try again.");
|
println!("Invalid input. Please try again.");
|
||||||
continue;
|
continue;
|
||||||
|
@ -649,10 +683,9 @@ fn filter_and_normalize_versions(
|
||||||
fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
fn fetch_versions(state: &State) -> Result<Vec<String>> {
|
||||||
let versions_script = state.resources_dir.join("versions.py");
|
let versions_script = state.resources_dir.join("versions.py");
|
||||||
|
|
||||||
let mut cmd = Command::new(&state.uv_path);
|
let mut cmd = uv_command(state)?;
|
||||||
cmd.current_dir(&state.uv_install_root)
|
cmd.args(["run", "--no-project", "--no-config", "--managed-python"])
|
||||||
.args(["run", "--no-project", "--no-config", "--managed-python"])
|
.args(["--with", "pip-system-certs,requests[socks]"]);
|
||||||
.args(["--with", "pip-system-certs"]);
|
|
||||||
|
|
||||||
let python_version = read_file(&state.dist_python_version_path)?;
|
let python_version = read_file(&state.dist_python_version_path)?;
|
||||||
let python_version_str =
|
let python_version_str =
|
||||||
|
@ -716,7 +749,26 @@ fn apply_version_kind(version_kind: &VersionKind, state: &State) -> Result<()> {
|
||||||
&format!("anki-release=={version}\",\n \"anki=={version}\",\n \"aqt=={version}"),
|
&format!("anki-release=={version}\",\n \"anki=={version}\",\n \"aqt=={version}"),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
write_file(&state.user_pyproject_path, &updated_content)?;
|
|
||||||
|
let final_content = if state.system_qt {
|
||||||
|
format!(
|
||||||
|
concat!(
|
||||||
|
"{}\n\n[tool.uv]\n",
|
||||||
|
"override-dependencies = [\n",
|
||||||
|
" \"pyqt6; sys_platform=='never'\",\n",
|
||||||
|
" \"pyqt6-qt6; sys_platform=='never'\",\n",
|
||||||
|
" \"pyqt6-webengine; sys_platform=='never'\",\n",
|
||||||
|
" \"pyqt6-webengine-qt6; sys_platform=='never'\",\n",
|
||||||
|
" \"pyqt6_sip; sys_platform=='never'\"\n",
|
||||||
|
"]\n"
|
||||||
|
),
|
||||||
|
updated_content
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
updated_content
|
||||||
|
};
|
||||||
|
|
||||||
|
write_file(&state.user_pyproject_path, &final_content)?;
|
||||||
|
|
||||||
// Update .python-version based on version kind
|
// Update .python-version based on version kind
|
||||||
match version_kind {
|
match version_kind {
|
||||||
|
@ -750,6 +802,9 @@ fn update_pyproject_for_version(menu_choice: MainMenuChoice, state: &State) -> R
|
||||||
MainMenuChoice::ToggleCache => {
|
MainMenuChoice::ToggleCache => {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
MainMenuChoice::DownloadMirror => {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
MainMenuChoice::Uninstall => {
|
MainMenuChoice::Uninstall => {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
@ -905,12 +960,28 @@ fn handle_uninstall(state: &State) -> Result<bool> {
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn have_developer_tools() -> bool {
|
fn uv_command(state: &State) -> Result<Command> {
|
||||||
Command::new("xcode-select")
|
let mut command = Command::new(&state.uv_path);
|
||||||
.args(["-p"])
|
command.current_dir(&state.uv_install_root);
|
||||||
.output()
|
|
||||||
.map(|output| output.status.success())
|
// remove UV_* environment variables to avoid interference
|
||||||
.unwrap_or(false)
|
for (key, _) in std::env::vars() {
|
||||||
|
if key.starts_with("UV_") {
|
||||||
|
command.env_remove(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
command
|
||||||
|
.env_remove("VIRTUAL_ENV")
|
||||||
|
.env_remove("SSLKEYLOGFILE");
|
||||||
|
|
||||||
|
// Add mirror environment variable if enabled
|
||||||
|
if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
|
||||||
|
command
|
||||||
|
.env("UV_PYTHON_INSTALL_MIRROR", &python_mirror)
|
||||||
|
.env("UV_DEFAULT_INDEX", &pypi_mirror);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(command)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
|
fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
|
||||||
|
@ -935,10 +1006,75 @@ fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
|
||||||
// Set UV and Python paths for the Python code
|
// Set UV and Python paths for the Python code
|
||||||
cmd.env("ANKI_LAUNCHER_UV", state.uv_path.utf8()?.as_str());
|
cmd.env("ANKI_LAUNCHER_UV", state.uv_path.utf8()?.as_str());
|
||||||
cmd.env("UV_PROJECT", state.uv_install_root.utf8()?.as_str());
|
cmd.env("UV_PROJECT", state.uv_install_root.utf8()?.as_str());
|
||||||
|
cmd.env_remove("SSLKEYLOGFILE");
|
||||||
|
|
||||||
Ok(cmd)
|
Ok(cmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_mirror_enabled(state: &State) -> bool {
|
||||||
|
state.mirror_path.exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_mirror_urls(state: &State) -> Result<Option<(String, String)>> {
|
||||||
|
if !state.mirror_path.exists() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let content = read_file(&state.mirror_path)?;
|
||||||
|
let content_str = String::from_utf8(content).context("Invalid UTF-8 in mirror file")?;
|
||||||
|
|
||||||
|
let lines: Vec<&str> = content_str.lines().collect();
|
||||||
|
if lines.len() >= 2 {
|
||||||
|
Ok(Some((
|
||||||
|
lines[0].trim().to_string(),
|
||||||
|
lines[1].trim().to_string(),
|
||||||
|
)))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn show_mirror_submenu(state: &State) -> Result<()> {
|
||||||
|
loop {
|
||||||
|
println!("Download mirror options:");
|
||||||
|
println!("1) No mirror");
|
||||||
|
println!("2) China");
|
||||||
|
print!("> ");
|
||||||
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
let mut input = String::new();
|
||||||
|
let _ = stdin().read_line(&mut input);
|
||||||
|
let input = input.trim();
|
||||||
|
|
||||||
|
match input {
|
||||||
|
"1" => {
|
||||||
|
// Remove mirror file
|
||||||
|
if state.mirror_path.exists() {
|
||||||
|
let _ = remove_file(&state.mirror_path);
|
||||||
|
}
|
||||||
|
println!("Mirror disabled.");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
"2" => {
|
||||||
|
// Write China mirror URLs
|
||||||
|
let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/";
|
||||||
|
write_file(&state.mirror_path, china_mirrors)?;
|
||||||
|
println!("China mirror enabled.");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
"" => {
|
||||||
|
// Empty input - return to main menu
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
println!("Invalid input. Please try again.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -62,8 +62,9 @@ pub fn prepare_for_launch_after_update(mut cmd: Command, root: &Path) -> Result<
|
||||||
pub fn relaunch_in_terminal() -> Result<()> {
|
pub fn relaunch_in_terminal() -> Result<()> {
|
||||||
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
|
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
|
||||||
Command::new("open")
|
Command::new("open")
|
||||||
.args(["-a", "Terminal"])
|
.args(["-na", "Terminal"])
|
||||||
.arg(current_exe)
|
.arg(current_exe)
|
||||||
|
.env_remove("ANKI_LAUNCHER_WANT_TERMINAL")
|
||||||
.ensure_spawn()?;
|
.ensure_spawn()?;
|
||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,8 +116,9 @@ pub use windows::ensure_terminal_shown;
|
||||||
pub fn ensure_terminal_shown() -> Result<()> {
|
pub fn ensure_terminal_shown() -> Result<()> {
|
||||||
use std::io::IsTerminal;
|
use std::io::IsTerminal;
|
||||||
|
|
||||||
|
let want_terminal = std::env::var("ANKI_LAUNCHER_WANT_TERMINAL").is_ok();
|
||||||
let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout());
|
let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout());
|
||||||
if !stdout_is_terminal {
|
if want_terminal || !stdout_is_terminal {
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
mac::relaunch_in_terminal()?;
|
mac::relaunch_in_terminal()?;
|
||||||
#[cfg(not(target_os = "macos"))]
|
#[cfg(not(target_os = "macos"))]
|
||||||
|
|
|
@ -3,9 +3,9 @@
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
import pip_system_certs.wrapt_requests
|
import pip_system_certs.wrapt_requests
|
||||||
|
import requests
|
||||||
|
|
||||||
pip_system_certs.wrapt_requests.inject_truststore()
|
pip_system_certs.wrapt_requests.inject_truststore()
|
||||||
|
|
||||||
|
@ -15,8 +15,9 @@ def main():
|
||||||
url = "https://pypi.org/pypi/aqt/json"
|
url = "https://pypi.org/pypi/aqt/json"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with urllib.request.urlopen(url, timeout=30) as response:
|
response = requests.get(url, timeout=30)
|
||||||
data = json.loads(response.read().decode("utf-8"))
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
releases = data.get("releases", {})
|
releases = data.get("releases", {})
|
||||||
|
|
||||||
# Create list of (version, upload_time) tuples
|
# Create list of (version, upload_time) tuples
|
||||||
|
|
|
@ -33,6 +33,12 @@ class _MacOSHelper:
|
||||||
"On completion, file should be saved if no error has arrived."
|
"On completion, file should be saved if no error has arrived."
|
||||||
self._dll.end_wav_record()
|
self._dll.end_wav_record()
|
||||||
|
|
||||||
|
def disable_appnap(self) -> None:
|
||||||
|
self._dll.disable_appnap()
|
||||||
|
|
||||||
|
def enable_appnap(self) -> None:
|
||||||
|
self._dll.enable_appnap()
|
||||||
|
|
||||||
|
|
||||||
# this must not be overwritten or deallocated
|
# this must not be overwritten or deallocated
|
||||||
@CFUNCTYPE(None, c_char_p) # type: ignore
|
@CFUNCTYPE(None, c_char_p) # type: ignore
|
||||||
|
|
25
qt/mac/appnap.swift
Normal file
25
qt/mac/appnap.swift
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
private var currentActivity: NSObjectProtocol?
|
||||||
|
|
||||||
|
@_cdecl("disable_appnap")
|
||||||
|
public func disableAppNap() {
|
||||||
|
// No-op if already assigned
|
||||||
|
guard currentActivity == nil else { return }
|
||||||
|
|
||||||
|
currentActivity = ProcessInfo.processInfo.beginActivity(
|
||||||
|
options: .userInitiatedAllowingIdleSystemSleep,
|
||||||
|
reason: "AppNap is disabled"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
@_cdecl("enable_appnap")
|
||||||
|
public func enableAppNap() {
|
||||||
|
guard let activity = currentActivity else { return }
|
||||||
|
|
||||||
|
ProcessInfo.processInfo.endActivity(activity)
|
||||||
|
currentActivity = nil
|
||||||
|
}
|
|
@ -15,6 +15,7 @@ echo "Building macOS helper dylib..."
|
||||||
# Create the wheel using uv
|
# Create the wheel using uv
|
||||||
echo "Creating wheel..."
|
echo "Creating wheel..."
|
||||||
cd "$SCRIPT_DIR"
|
cd "$SCRIPT_DIR"
|
||||||
|
rm -rf dist
|
||||||
"$PROJ_ROOT/out/extracted/uv/uv" build --wheel
|
"$PROJ_ROOT/out/extracted/uv/uv" build --wheel
|
||||||
|
|
||||||
echo "Build complete!"
|
echo "Build complete!"
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
|
@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "anki-mac-helper"
|
name = "anki-mac-helper"
|
||||||
version = "0.1.0"
|
version = "0.1.1"
|
||||||
description = "Small support library for Anki on Macs"
|
description = "Small support library for Anki on Macs"
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.9"
|
||||||
license = { text = "AGPL-3.0-or-later" }
|
license = { text = "AGPL-3.0-or-later" }
|
||||||
|
|
14
qt/mac/update-launcher-env
Executable file
14
qt/mac/update-launcher-env
Executable file
|
@ -0,0 +1,14 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Build and install into the launcher venv
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
./build.sh
|
||||||
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||||
|
export VIRTUAL_ENV=$HOME/Library/Application\ Support/AnkiProgramFiles/.venv
|
||||||
|
else
|
||||||
|
export VIRTUAL_ENV=$HOME/.local/share/AnkiProgramFiles/.venv
|
||||||
|
fi
|
||||||
|
../../out/extracted/uv/uv pip install dist/*.whl
|
||||||
|
|
|
@ -12,7 +12,7 @@ dependencies = [
|
||||||
"send2trash",
|
"send2trash",
|
||||||
"waitress>=2.0.0",
|
"waitress>=2.0.0",
|
||||||
"pywin32; sys.platform == 'win32'",
|
"pywin32; sys.platform == 'win32'",
|
||||||
"anki-mac-helper; sys.platform == 'darwin'",
|
"anki-mac-helper>=0.1.1; sys.platform == 'darwin'",
|
||||||
"pip-system-certs!=5.1",
|
"pip-system-certs!=5.1",
|
||||||
"pyqt6>=6.2",
|
"pyqt6>=6.2",
|
||||||
"pyqt6-webengine>=6.2",
|
"pyqt6-webengine>=6.2",
|
||||||
|
|
|
@ -81,6 +81,7 @@ pin-project.workspace = true
|
||||||
prost.workspace = true
|
prost.workspace = true
|
||||||
pulldown-cmark.workspace = true
|
pulldown-cmark.workspace = true
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
|
rayon.workspace = true
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
reqwest.workspace = true
|
reqwest.workspace = true
|
||||||
rusqlite.workspace = true
|
rusqlite.workspace = true
|
||||||
|
|
|
@ -22,6 +22,7 @@ inflections.workspace = true
|
||||||
anki_io.workspace = true
|
anki_io.workspace = true
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
|
regex.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
fluent.workspace = true
|
fluent.workspace = true
|
||||||
|
|
|
@ -4,6 +4,5 @@
|
||||||
// Include auto-generated content
|
// Include auto-generated content
|
||||||
|
|
||||||
#![allow(clippy::all)]
|
#![allow(clippy::all)]
|
||||||
#![allow(text_direction_codepoint_in_literal)]
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
|
||||||
|
|
|
@ -195,12 +195,30 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
for (module, contents) in modules {
|
for (module, contents) in modules {
|
||||||
writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap();
|
let escaped_contents = escape_unicode_control_chars(contents);
|
||||||
|
writeln!(
|
||||||
|
buf,
|
||||||
|
r###" "{module}" => r##"{escaped_contents}"##,"###
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.push_str("};\n");
|
buf.push_str("};\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn escape_unicode_control_chars(input: &str) -> String {
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
|
static RE: std::sync::OnceLock<Regex> = std::sync::OnceLock::new();
|
||||||
|
let re = RE.get_or_init(|| Regex::new(r"[\u{202a}-\u{202e}\u{2066}-\u{2069}]").unwrap());
|
||||||
|
|
||||||
|
re.replace_all(input, |caps: ®ex::Captures| {
|
||||||
|
let c = caps.get(0).unwrap().as_str().chars().next().unwrap();
|
||||||
|
format!("\\u{{{:04x}}}", c as u32)
|
||||||
|
})
|
||||||
|
.into_owned()
|
||||||
|
}
|
||||||
|
|
||||||
fn lang_constant_name(lang: &str) -> String {
|
fn lang_constant_name(lang: &str) -> String {
|
||||||
lang.to_ascii_uppercase().replace('-', "_")
|
lang.to_ascii_uppercase().replace('-', "_")
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,14 +42,14 @@ enum CheckableUrl {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CheckableUrl {
|
impl CheckableUrl {
|
||||||
fn url(&self) -> Cow<str> {
|
fn url(&self) -> Cow<'_, str> {
|
||||||
match *self {
|
match *self {
|
||||||
Self::HelpPage(page) => help_page_to_link(page).into(),
|
Self::HelpPage(page) => help_page_to_link(page).into(),
|
||||||
Self::String(s) => s.into(),
|
Self::String(s) => s.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn anchor(&self) -> Cow<str> {
|
fn anchor(&self) -> Cow<'_, str> {
|
||||||
match *self {
|
match *self {
|
||||||
Self::HelpPage(page) => help_page_link_suffix(page).into(),
|
Self::HelpPage(page) => help_page_link_suffix(page).into(),
|
||||||
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),
|
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),
|
||||||
|
|
|
@ -94,7 +94,7 @@ impl BackendCollectionService for Backend {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Backend {
|
impl Backend {
|
||||||
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
|
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
|
||||||
let guard = self.col.lock().unwrap();
|
let guard = self.col.lock().unwrap();
|
||||||
guard
|
guard
|
||||||
.is_some()
|
.is_some()
|
||||||
|
@ -102,7 +102,7 @@ impl Backend {
|
||||||
.ok_or(AnkiError::CollectionNotOpen)
|
.ok_or(AnkiError::CollectionNotOpen)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
|
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
|
||||||
let guard = self.col.lock().unwrap();
|
let guard = self.col.lock().unwrap();
|
||||||
guard
|
guard
|
||||||
.is_none()
|
.is_none()
|
||||||
|
|
|
@ -105,7 +105,8 @@ impl Card {
|
||||||
|
|
||||||
/// Returns true if the card has a due date in terms of days.
|
/// Returns true if the card has a due date in terms of days.
|
||||||
fn is_due_in_days(&self) -> bool {
|
fn is_due_in_days(&self) -> bool {
|
||||||
matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|
self.ctype != CardType::New && self.original_or_current_due() <= 365_000 // keep consistent with SQL
|
||||||
|
|| matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|
||||||
|| (self.ctype == CardType::Review && self.is_undue_queue())
|
|| (self.ctype == CardType::Review && self.is_undue_queue())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,22 +126,20 @@ impl Card {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This uses card.due and card.ivl to infer the elapsed time. If 'set due
|
/// If last_review_date isn't stored in the card, this uses card.due and
|
||||||
/// date' or an add-on has changed the due date, this won't be accurate.
|
/// card.ivl to infer the elapsed time, which won't be accurate if
|
||||||
pub(crate) fn days_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
|
/// 'set due date' or an add-on has changed the due date.
|
||||||
|
pub(crate) fn seconds_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
|
||||||
if let Some(last_review_time) = self.last_review_time {
|
if let Some(last_review_time) = self.last_review_time {
|
||||||
Some(timing.next_day_at.elapsed_days_since(last_review_time) as u32)
|
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
|
||||||
} else if !self.is_due_in_days() {
|
} else if self.is_due_in_days() {
|
||||||
Some(
|
|
||||||
(timing.next_day_at.0 as u32).saturating_sub(self.original_or_current_due() as u32)
|
|
||||||
/ 86_400,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
self.due_time(timing).map(|due| {
|
self.due_time(timing).map(|due| {
|
||||||
(due.adding_secs(-86_400 * self.interval as i64)
|
(due.adding_secs(-86_400 * self.interval as i64)
|
||||||
.elapsed_secs()
|
.elapsed_secs()) as u32
|
||||||
/ 86_400) as u32
|
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
let last_review_time = TimestampSecs(self.original_or_current_due() as i64);
|
||||||
|
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -543,12 +542,12 @@ impl RowContext {
|
||||||
self.cards[0]
|
self.cards[0]
|
||||||
.memory_state
|
.memory_state
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.zip(self.cards[0].days_since_last_review(&self.timing))
|
.zip(self.cards[0].seconds_since_last_review(&self.timing))
|
||||||
.zip(Some(self.cards[0].decay.unwrap_or(FSRS5_DEFAULT_DECAY)))
|
.zip(Some(self.cards[0].decay.unwrap_or(FSRS5_DEFAULT_DECAY)))
|
||||||
.map(|((state, days_elapsed), decay)| {
|
.map(|((state, seconds), decay)| {
|
||||||
let r = FSRS::new(None).unwrap().current_retrievability(
|
let r = FSRS::new(None).unwrap().current_retrievability_seconds(
|
||||||
(*state).into(),
|
(*state).into(),
|
||||||
days_elapsed,
|
seconds,
|
||||||
decay,
|
decay,
|
||||||
);
|
);
|
||||||
format!("{:.0}%", r * 100.)
|
format!("{:.0}%", r * 100.)
|
||||||
|
|
|
@ -34,7 +34,7 @@ pub fn prettify_av_tags<S: Into<String> + AsRef<str>>(txt: S) -> String {
|
||||||
|
|
||||||
/// Parse `txt` into [CardNodes] and return the result,
|
/// Parse `txt` into [CardNodes] and return the result,
|
||||||
/// or [None] if it only contains text nodes.
|
/// or [None] if it only contains text nodes.
|
||||||
fn nodes_or_text_only(txt: &str) -> Option<CardNodes> {
|
fn nodes_or_text_only(txt: &str) -> Option<CardNodes<'_>> {
|
||||||
let nodes = CardNodes::parse(txt);
|
let nodes = CardNodes::parse(txt);
|
||||||
(!nodes.text_only).then_some(nodes)
|
(!nodes.text_only).then_some(nodes)
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,13 +103,13 @@ fn is_not0<'parser, 'arr: 'parser, 's: 'parser>(
|
||||||
move |s| alt((is_not(arr), success(""))).parse(s)
|
move |s| alt((is_not(arr), success(""))).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn node(s: &str) -> IResult<Node> {
|
fn node(s: &str) -> IResult<'_, Node<'_>> {
|
||||||
alt((sound_node, tag_node, text_node)).parse(s)
|
alt((sound_node, tag_node, text_node)).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
|
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
|
||||||
/// video file.
|
/// video file.
|
||||||
fn sound_node(s: &str) -> IResult<Node> {
|
fn sound_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||||
map(
|
map(
|
||||||
delimited(tag("[sound:"), is_not("]"), tag("]")),
|
delimited(tag("[sound:"), is_not("]"), tag("]")),
|
||||||
Node::SoundOrVideo,
|
Node::SoundOrVideo,
|
||||||
|
@ -117,7 +117,7 @@ fn sound_node(s: &str) -> IResult<Node> {
|
||||||
.parse(s)
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
fn take_till_potential_tag_start(s: &str) -> IResult<'_, &str> {
|
||||||
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
|
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
|
||||||
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
|
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
|
||||||
Ok(match after.find('[') {
|
Ok(match after.find('[') {
|
||||||
|
@ -127,9 +127,9 @@ fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An Anki tag `[anki:tag...]...[/anki:tag]`.
|
/// An Anki tag `[anki:tag...]...[/anki:tag]`.
|
||||||
fn tag_node(s: &str) -> IResult<Node> {
|
fn tag_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||||
/// Match the start of an opening tag and return its name.
|
/// Match the start of an opening tag and return its name.
|
||||||
fn name(s: &str) -> IResult<&str> {
|
fn name(s: &str) -> IResult<'_, &str> {
|
||||||
preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s)
|
preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,12 +139,12 @@ fn tag_node(s: &str) -> IResult<Node> {
|
||||||
) -> impl FnMut(&'s str) -> IResult<'s, Vec<(&'s str, &'s str)>> + 'name {
|
) -> impl FnMut(&'s str) -> IResult<'s, Vec<(&'s str, &'s str)>> + 'name {
|
||||||
/// List of whitespace-separated `key=val` tuples, where `val` may be
|
/// List of whitespace-separated `key=val` tuples, where `val` may be
|
||||||
/// empty.
|
/// empty.
|
||||||
fn options(s: &str) -> IResult<Vec<(&str, &str)>> {
|
fn options(s: &str) -> IResult<'_, Vec<(&str, &str)>> {
|
||||||
fn key(s: &str) -> IResult<&str> {
|
fn key(s: &str) -> IResult<'_, &str> {
|
||||||
is_not("] \t\r\n=").parse(s)
|
is_not("] \t\r\n=").parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn val(s: &str) -> IResult<&str> {
|
fn val(s: &str) -> IResult<'_, &str> {
|
||||||
alt((
|
alt((
|
||||||
delimited(tag("\""), is_not0("\""), tag("\"")),
|
delimited(tag("\""), is_not0("\""), tag("\"")),
|
||||||
is_not0("] \t\r\n\""),
|
is_not0("] \t\r\n\""),
|
||||||
|
@ -197,7 +197,7 @@ fn tag_node(s: &str) -> IResult<Node> {
|
||||||
.parse(s)
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn text_node(s: &str) -> IResult<Node> {
|
fn text_node(s: &str) -> IResult<'_, Node<'_>> {
|
||||||
map(take_till_potential_tag_start, Node::Text).parse(s)
|
map(take_till_potential_tag_start, Node::Text).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,8 +54,8 @@ enum Token<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Tokenize string
|
/// Tokenize string
|
||||||
fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
|
fn tokenize(mut text: &str) -> impl Iterator<Item = Token<'_>> {
|
||||||
fn open_cloze(text: &str) -> IResult<&str, Token> {
|
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
||||||
// opening brackets and 'c'
|
// opening brackets and 'c'
|
||||||
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
|
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
|
||||||
// following number
|
// following number
|
||||||
|
@ -75,12 +75,12 @@ fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
|
||||||
Ok((text, Token::OpenCloze(digits)))
|
Ok((text, Token::OpenCloze(digits)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn close_cloze(text: &str) -> IResult<&str, Token> {
|
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
|
||||||
map(tag("}}"), |_| Token::CloseCloze).parse(text)
|
map(tag("}}"), |_| Token::CloseCloze).parse(text)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Match a run of text until an open/close marker is encountered.
|
/// Match a run of text until an open/close marker is encountered.
|
||||||
fn normal_text(text: &str) -> IResult<&str, Token> {
|
fn normal_text(text: &str) -> IResult<&str, Token<'_>> {
|
||||||
if text.is_empty() {
|
if text.is_empty() {
|
||||||
return Err(nom::Err::Error(nom::error::make_error(
|
return Err(nom::Err::Error(nom::error::make_error(
|
||||||
text,
|
text,
|
||||||
|
@ -132,7 +132,7 @@ impl ExtractedCloze<'_> {
|
||||||
self.hint.unwrap_or("...")
|
self.hint.unwrap_or("...")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clozed_text(&self) -> Cow<str> {
|
fn clozed_text(&self) -> Cow<'_, str> {
|
||||||
// happy efficient path?
|
// happy efficient path?
|
||||||
if self.nodes.len() == 1 {
|
if self.nodes.len() == 1 {
|
||||||
if let TextOrCloze::Text(text) = self.nodes.last().unwrap() {
|
if let TextOrCloze::Text(text) = self.nodes.last().unwrap() {
|
||||||
|
@ -353,7 +353,7 @@ pub fn parse_image_occlusions(text: &str) -> Vec<ImageOcclusion> {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<str> {
|
pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
let mut active_cloze_found_in_text = false;
|
let mut active_cloze_found_in_text = false;
|
||||||
for node in &parse_text_with_clozes(text) {
|
for node in &parse_text_with_clozes(text) {
|
||||||
|
@ -376,7 +376,7 @@ pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<str>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<str> {
|
pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
|
||||||
let mut output = Vec::new();
|
let mut output = Vec::new();
|
||||||
for node in &parse_text_with_clozes(text) {
|
for node in &parse_text_with_clozes(text) {
|
||||||
reveal_cloze_text_in_nodes(node, cloze_ord, question, &mut output);
|
reveal_cloze_text_in_nodes(node, cloze_ord, question, &mut output);
|
||||||
|
@ -384,7 +384,7 @@ pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow
|
||||||
output.join(", ").into()
|
output.join(", ").into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<str> {
|
pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<'_, str> {
|
||||||
let mut output = Vec::new();
|
let mut output = Vec::new();
|
||||||
for node in &parse_text_with_clozes(text) {
|
for node in &parse_text_with_clozes(text) {
|
||||||
reveal_cloze_text_in_nodes(node, cloze_ord, false, &mut output);
|
reveal_cloze_text_in_nodes(node, cloze_ord, false, &mut output);
|
||||||
|
@ -460,7 +460,7 @@ pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> {
|
||||||
CLOZE.replace_all(text, "$1")
|
CLOZE.replace_all(text, "$1")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_html_inside_mathjax(text: &str) -> Cow<str> {
|
fn strip_html_inside_mathjax(text: &str) -> Cow<'_, str> {
|
||||||
MATHJAX.replace_all(text, |caps: &Captures| -> String {
|
MATHJAX.replace_all(text, |caps: &Captures| -> String {
|
||||||
format!(
|
format!(
|
||||||
"{}{}{}",
|
"{}{}{}",
|
||||||
|
|
|
@ -24,6 +24,7 @@ use crate::notetype::NotetypeId;
|
||||||
use crate::notetype::NotetypeKind;
|
use crate::notetype::NotetypeKind;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::progress::ThrottlingProgressHandler;
|
use crate::progress::ThrottlingProgressHandler;
|
||||||
|
use crate::storage::card::CardFixStats;
|
||||||
use crate::timestamp::TimestampMillis;
|
use crate::timestamp::TimestampMillis;
|
||||||
use crate::timestamp::TimestampSecs;
|
use crate::timestamp::TimestampSecs;
|
||||||
|
|
||||||
|
@ -40,6 +41,7 @@ pub struct CheckDatabaseOutput {
|
||||||
notetypes_recovered: usize,
|
notetypes_recovered: usize,
|
||||||
invalid_utf8: usize,
|
invalid_utf8: usize,
|
||||||
invalid_ids: usize,
|
invalid_ids: usize,
|
||||||
|
card_last_review_time_empty: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Default)]
|
#[derive(Debug, Clone, Copy, Default)]
|
||||||
|
@ -69,6 +71,11 @@ impl CheckDatabaseOutput {
|
||||||
if self.card_properties_invalid > 0 {
|
if self.card_properties_invalid > 0 {
|
||||||
probs.push(tr.database_check_card_properties(self.card_properties_invalid));
|
probs.push(tr.database_check_card_properties(self.card_properties_invalid));
|
||||||
}
|
}
|
||||||
|
if self.card_last_review_time_empty > 0 {
|
||||||
|
probs.push(
|
||||||
|
tr.database_check_card_last_review_time_empty(self.card_last_review_time_empty),
|
||||||
|
);
|
||||||
|
}
|
||||||
if self.cards_missing_note > 0 {
|
if self.cards_missing_note > 0 {
|
||||||
probs.push(tr.database_check_card_missing_note(self.cards_missing_note));
|
probs.push(tr.database_check_card_missing_note(self.cards_missing_note));
|
||||||
}
|
}
|
||||||
|
@ -158,14 +165,25 @@ impl Collection {
|
||||||
|
|
||||||
fn check_card_properties(&mut self, out: &mut CheckDatabaseOutput) -> Result<()> {
|
fn check_card_properties(&mut self, out: &mut CheckDatabaseOutput) -> Result<()> {
|
||||||
let timing = self.timing_today()?;
|
let timing = self.timing_today()?;
|
||||||
let (new_cnt, other_cnt) = self.storage.fix_card_properties(
|
let CardFixStats {
|
||||||
|
new_cards_fixed,
|
||||||
|
other_cards_fixed,
|
||||||
|
last_review_time_fixed,
|
||||||
|
} = self.storage.fix_card_properties(
|
||||||
timing.days_elapsed,
|
timing.days_elapsed,
|
||||||
TimestampSecs::now(),
|
TimestampSecs::now(),
|
||||||
self.usn()?,
|
self.usn()?,
|
||||||
self.scheduler_version() == SchedulerVersion::V1,
|
self.scheduler_version() == SchedulerVersion::V1,
|
||||||
)?;
|
)?;
|
||||||
out.card_position_too_high = new_cnt;
|
out.card_position_too_high = new_cards_fixed;
|
||||||
out.card_properties_invalid += other_cnt;
|
out.card_properties_invalid += other_cards_fixed;
|
||||||
|
out.card_last_review_time_empty = last_review_time_fixed;
|
||||||
|
|
||||||
|
// Trigger one-way sync if last_review_time was updated to avoid conflicts
|
||||||
|
if last_review_time_fixed > 0 {
|
||||||
|
self.set_schema_modified()?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use anki_proto::generic;
|
use anki_proto::generic;
|
||||||
|
use rayon::iter::IntoParallelIterator;
|
||||||
|
use rayon::iter::ParallelIterator;
|
||||||
|
|
||||||
use crate::collection::Collection;
|
use crate::collection::Collection;
|
||||||
use crate::deckconfig::DeckConfSchema11;
|
use crate::deckconfig::DeckConfSchema11;
|
||||||
|
@ -9,6 +13,7 @@ use crate::deckconfig::DeckConfigId;
|
||||||
use crate::deckconfig::UpdateDeckConfigsRequest;
|
use crate::deckconfig::UpdateDeckConfigsRequest;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::scheduler::fsrs::params::ignore_revlogs_before_date_to_ms;
|
use crate::scheduler::fsrs::params::ignore_revlogs_before_date_to_ms;
|
||||||
|
use crate::scheduler::fsrs::simulator::is_included_card;
|
||||||
|
|
||||||
impl crate::services::DeckConfigService for Collection {
|
impl crate::services::DeckConfigService for Collection {
|
||||||
fn add_or_update_deck_config_legacy(
|
fn add_or_update_deck_config_legacy(
|
||||||
|
@ -101,68 +106,43 @@ impl crate::services::DeckConfigService for Collection {
|
||||||
&mut self,
|
&mut self,
|
||||||
input: anki_proto::deck_config::GetRetentionWorkloadRequest,
|
input: anki_proto::deck_config::GetRetentionWorkloadRequest,
|
||||||
) -> Result<anki_proto::deck_config::GetRetentionWorkloadResponse> {
|
) -> Result<anki_proto::deck_config::GetRetentionWorkloadResponse> {
|
||||||
const LEARN_SPAN: usize = 100_000_000;
|
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
|
||||||
const TERMINATION_PROB: f32 = 0.001;
|
|
||||||
// the default values are from https://github.com/open-spaced-repetition/Anki-button-usage/blob/881009015c2a85ac911021d76d0aacb124849937/analysis.ipynb
|
|
||||||
const DEFAULT_LEARN_COST: f32 = 19.4698;
|
|
||||||
const DEFAULT_PASS_COST: f32 = 7.8454;
|
|
||||||
const DEFAULT_FAIL_COST: f32 = 23.185;
|
|
||||||
const DEFAULT_INITIAL_PASS_RATE: f32 = 0.7645;
|
|
||||||
|
|
||||||
let guard =
|
let guard =
|
||||||
self.search_cards_into_table(&input.search, crate::search::SortMode::NoOrder)?;
|
self.search_cards_into_table(&input.search, crate::search::SortMode::NoOrder)?;
|
||||||
let costs = guard.col.storage.get_costs_for_retention()?;
|
|
||||||
|
|
||||||
fn smoothing(obs: f32, default: f32, count: u32) -> f32 {
|
let revlogs = guard
|
||||||
let alpha = count as f32 / (50.0 + count as f32);
|
.col
|
||||||
obs * alpha + default * (1.0 - alpha)
|
.storage
|
||||||
}
|
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
||||||
|
|
||||||
let cost_success = smoothing(
|
let mut config = guard.col.get_optimal_retention_parameters(revlogs)?;
|
||||||
costs.average_pass_time_ms / 1000.0,
|
let cards = guard
|
||||||
DEFAULT_PASS_COST,
|
.col
|
||||||
costs.pass_count,
|
.storage
|
||||||
);
|
.all_searched_cards()?
|
||||||
let cost_failure = smoothing(
|
.into_iter()
|
||||||
costs.average_fail_time_ms / 1000.0,
|
.filter(is_included_card)
|
||||||
DEFAULT_FAIL_COST,
|
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
|
||||||
costs.fail_count,
|
.collect::<Vec<fsrs::Card>>();
|
||||||
);
|
|
||||||
let cost_learn = smoothing(
|
|
||||||
costs.average_learn_time_ms / 1000.0,
|
|
||||||
DEFAULT_LEARN_COST,
|
|
||||||
costs.learn_count,
|
|
||||||
);
|
|
||||||
let initial_pass_rate = smoothing(
|
|
||||||
costs.initial_pass_rate,
|
|
||||||
DEFAULT_INITIAL_PASS_RATE,
|
|
||||||
costs.pass_count,
|
|
||||||
);
|
|
||||||
|
|
||||||
let before = fsrs::expected_workload(
|
config.deck_size = guard.cards;
|
||||||
|
|
||||||
|
let costs = (70u32..=99u32)
|
||||||
|
.into_par_iter()
|
||||||
|
.map(|dr| {
|
||||||
|
Ok((
|
||||||
|
dr,
|
||||||
|
fsrs::expected_workload_with_existing_cards(
|
||||||
&input.w,
|
&input.w,
|
||||||
input.before,
|
dr as f32 / 100.,
|
||||||
LEARN_SPAN,
|
&config,
|
||||||
cost_success,
|
&cards,
|
||||||
cost_failure,
|
)?,
|
||||||
cost_learn,
|
))
|
||||||
initial_pass_rate,
|
|
||||||
TERMINATION_PROB,
|
|
||||||
)?;
|
|
||||||
let after = fsrs::expected_workload(
|
|
||||||
&input.w,
|
|
||||||
input.after,
|
|
||||||
LEARN_SPAN,
|
|
||||||
cost_success,
|
|
||||||
cost_failure,
|
|
||||||
cost_learn,
|
|
||||||
initial_pass_rate,
|
|
||||||
TERMINATION_PROB,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(anki_proto::deck_config::GetRetentionWorkloadResponse {
|
|
||||||
factor: after / before,
|
|
||||||
})
|
})
|
||||||
|
.collect::<Result<HashMap<_, _>>>()?;
|
||||||
|
|
||||||
|
Ok(anki_proto::deck_config::GetRetentionWorkloadResponse { costs })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -212,10 +212,10 @@ impl Collection {
|
||||||
if fsrs_toggled {
|
if fsrs_toggled {
|
||||||
self.set_config_bool_inner(BoolKey::Fsrs, req.fsrs)?;
|
self.set_config_bool_inner(BoolKey::Fsrs, req.fsrs)?;
|
||||||
}
|
}
|
||||||
|
let mut deck_desired_retention: HashMap<DeckId, f32> = Default::default();
|
||||||
for deck in self.storage.get_all_decks()? {
|
for deck in self.storage.get_all_decks()? {
|
||||||
if let Ok(normal) = deck.normal() {
|
if let Ok(normal) = deck.normal() {
|
||||||
let deck_id = deck.id;
|
let deck_id = deck.id;
|
||||||
|
|
||||||
// previous order & params
|
// previous order & params
|
||||||
let previous_config_id = DeckConfigId(normal.config_id);
|
let previous_config_id = DeckConfigId(normal.config_id);
|
||||||
let previous_config = configs_before_update.get(&previous_config_id);
|
let previous_config = configs_before_update.get(&previous_config_id);
|
||||||
|
@ -223,21 +223,23 @@ impl Collection {
|
||||||
.map(|c| c.inner.new_card_insert_order())
|
.map(|c| c.inner.new_card_insert_order())
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
let previous_params = previous_config.map(|c| c.fsrs_params());
|
let previous_params = previous_config.map(|c| c.fsrs_params());
|
||||||
let previous_retention = previous_config.map(|c| c.inner.desired_retention);
|
let previous_preset_dr = previous_config.map(|c| c.inner.desired_retention);
|
||||||
|
let previous_deck_dr = normal.desired_retention;
|
||||||
|
let previous_dr = previous_deck_dr.or(previous_preset_dr);
|
||||||
let previous_easy_days = previous_config.map(|c| &c.inner.easy_days_percentages);
|
let previous_easy_days = previous_config.map(|c| &c.inner.easy_days_percentages);
|
||||||
|
|
||||||
// if a selected (sub)deck, or its old config was removed, update deck to point
|
// if a selected (sub)deck, or its old config was removed, update deck to point
|
||||||
// to new config
|
// to new config
|
||||||
let current_config_id = if selected_deck_ids.contains(&deck.id)
|
let (current_config_id, current_deck_dr) = if selected_deck_ids.contains(&deck.id)
|
||||||
|| !configs_after_update.contains_key(&previous_config_id)
|
|| !configs_after_update.contains_key(&previous_config_id)
|
||||||
{
|
{
|
||||||
let mut updated = deck.clone();
|
let mut updated = deck.clone();
|
||||||
updated.normal_mut()?.config_id = selected_config.id.0;
|
updated.normal_mut()?.config_id = selected_config.id.0;
|
||||||
update_deck_limits(updated.normal_mut()?, &req.limits, today);
|
update_deck_limits(updated.normal_mut()?, &req.limits, today);
|
||||||
self.update_deck_inner(&mut updated, deck, usn)?;
|
self.update_deck_inner(&mut updated, deck, usn)?;
|
||||||
selected_config.id
|
(selected_config.id, updated.normal()?.desired_retention)
|
||||||
} else {
|
} else {
|
||||||
previous_config_id
|
(previous_config_id, previous_deck_dr)
|
||||||
};
|
};
|
||||||
|
|
||||||
// if new order differs, deck needs re-sorting
|
// if new order differs, deck needs re-sorting
|
||||||
|
@ -251,11 +253,12 @@ impl Collection {
|
||||||
|
|
||||||
// if params differ, memory state needs to be recomputed
|
// if params differ, memory state needs to be recomputed
|
||||||
let current_params = current_config.map(|c| c.fsrs_params());
|
let current_params = current_config.map(|c| c.fsrs_params());
|
||||||
let current_retention = current_config.map(|c| c.inner.desired_retention);
|
let current_preset_dr = current_config.map(|c| c.inner.desired_retention);
|
||||||
|
let current_dr = current_deck_dr.or(current_preset_dr);
|
||||||
let current_easy_days = current_config.map(|c| &c.inner.easy_days_percentages);
|
let current_easy_days = current_config.map(|c| &c.inner.easy_days_percentages);
|
||||||
if fsrs_toggled
|
if fsrs_toggled
|
||||||
|| previous_params != current_params
|
|| previous_params != current_params
|
||||||
|| previous_retention != current_retention
|
|| previous_dr != current_dr
|
||||||
|| (req.fsrs_reschedule && previous_easy_days != current_easy_days)
|
|| (req.fsrs_reschedule && previous_easy_days != current_easy_days)
|
||||||
{
|
{
|
||||||
decks_needing_memory_recompute
|
decks_needing_memory_recompute
|
||||||
|
@ -263,7 +266,9 @@ impl Collection {
|
||||||
.or_default()
|
.or_default()
|
||||||
.push(deck_id);
|
.push(deck_id);
|
||||||
}
|
}
|
||||||
|
if let Some(desired_retention) = current_deck_dr {
|
||||||
|
deck_desired_retention.insert(deck_id, desired_retention);
|
||||||
|
}
|
||||||
self.adjust_remaining_steps_in_deck(deck_id, previous_config, current_config, usn)?;
|
self.adjust_remaining_steps_in_deck(deck_id, previous_config, current_config, usn)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -277,10 +282,11 @@ impl Collection {
|
||||||
if req.fsrs {
|
if req.fsrs {
|
||||||
Some(UpdateMemoryStateRequest {
|
Some(UpdateMemoryStateRequest {
|
||||||
params: c.fsrs_params().clone(),
|
params: c.fsrs_params().clone(),
|
||||||
desired_retention: c.inner.desired_retention,
|
preset_desired_retention: c.inner.desired_retention,
|
||||||
max_interval: c.inner.maximum_review_interval,
|
max_interval: c.inner.maximum_review_interval,
|
||||||
reschedule: req.fsrs_reschedule,
|
reschedule: req.fsrs_reschedule,
|
||||||
historical_retention: c.inner.historical_retention,
|
historical_retention: c.inner.historical_retention,
|
||||||
|
deck_desired_retention: deck_desired_retention.clone(),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -409,6 +415,7 @@ fn normal_deck_to_limits(deck: &NormalDeck, today: u32) -> Limits {
|
||||||
.new_limit_today
|
.new_limit_today
|
||||||
.map(|limit| limit.today == today)
|
.map(|limit| limit.today == today)
|
||||||
.unwrap_or_default(),
|
.unwrap_or_default(),
|
||||||
|
desired_retention: deck.desired_retention,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -417,6 +424,7 @@ fn update_deck_limits(deck: &mut NormalDeck, limits: &Limits, today: u32) {
|
||||||
deck.new_limit = limits.new;
|
deck.new_limit = limits.new;
|
||||||
update_day_limit(&mut deck.review_limit_today, limits.review_today, today);
|
update_day_limit(&mut deck.review_limit_today, limits.review_today, today);
|
||||||
update_day_limit(&mut deck.new_limit_today, limits.new_today, today);
|
update_day_limit(&mut deck.new_limit_today, limits.new_today, today);
|
||||||
|
deck.desired_retention = limits.desired_retention;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_day_limit(day_limit: &mut Option<DayLimit>, new_limit: Option<u32>, today: u32) {
|
fn update_day_limit(day_limit: &mut Option<DayLimit>, new_limit: Option<u32>, today: u32) {
|
||||||
|
|
|
@ -31,6 +31,7 @@ pub(crate) use name::immediate_parent_name;
|
||||||
pub use name::NativeDeckName;
|
pub use name::NativeDeckName;
|
||||||
pub use schema11::DeckSchema11;
|
pub use schema11::DeckSchema11;
|
||||||
|
|
||||||
|
use crate::deckconfig::DeckConfig;
|
||||||
use crate::define_newtype;
|
use crate::define_newtype;
|
||||||
use crate::error::FilteredDeckError;
|
use crate::error::FilteredDeckError;
|
||||||
use crate::markdown::render_markdown;
|
use crate::markdown::render_markdown;
|
||||||
|
@ -89,6 +90,16 @@ impl Deck {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the effective desired retention value for a deck.
|
||||||
|
/// Returns deck-specific desired retention if available, otherwise falls
|
||||||
|
/// back to config default.
|
||||||
|
pub fn effective_desired_retention(&self, config: &DeckConfig) -> f32 {
|
||||||
|
self.normal()
|
||||||
|
.ok()
|
||||||
|
.and_then(|d| d.desired_retention)
|
||||||
|
.unwrap_or(config.inner.desired_retention)
|
||||||
|
}
|
||||||
|
|
||||||
// used by tests at the moment
|
// used by tests at the moment
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
|
|
@ -191,7 +191,7 @@ fn invalid_char_for_deck_component(c: char) -> bool {
|
||||||
c.is_ascii_control()
|
c.is_ascii_control()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn normalized_deck_name_component(comp: &str) -> Cow<str> {
|
fn normalized_deck_name_component(comp: &str) -> Cow<'_, str> {
|
||||||
let mut out = normalize_to_nfc(comp);
|
let mut out = normalize_to_nfc(comp);
|
||||||
if out.contains(invalid_char_for_deck_component) {
|
if out.contains(invalid_char_for_deck_component) {
|
||||||
out = out.replace(invalid_char_for_deck_component, "").into();
|
out = out.replace(invalid_char_for_deck_component, "").into();
|
||||||
|
|
|
@ -135,6 +135,8 @@ pub struct NormalDeckSchema11 {
|
||||||
review_limit_today: Option<DayLimit>,
|
review_limit_today: Option<DayLimit>,
|
||||||
#[serde(default, deserialize_with = "default_on_invalid")]
|
#[serde(default, deserialize_with = "default_on_invalid")]
|
||||||
new_limit_today: Option<DayLimit>,
|
new_limit_today: Option<DayLimit>,
|
||||||
|
#[serde(default, deserialize_with = "default_on_invalid")]
|
||||||
|
desired_retention: Option<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
|
@ -249,6 +251,7 @@ impl Default for NormalDeckSchema11 {
|
||||||
new_limit: None,
|
new_limit: None,
|
||||||
review_limit_today: None,
|
review_limit_today: None,
|
||||||
new_limit_today: None,
|
new_limit_today: None,
|
||||||
|
desired_retention: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -325,6 +328,7 @@ impl From<NormalDeckSchema11> for NormalDeck {
|
||||||
new_limit: deck.new_limit,
|
new_limit: deck.new_limit,
|
||||||
review_limit_today: deck.review_limit_today,
|
review_limit_today: deck.review_limit_today,
|
||||||
new_limit_today: deck.new_limit_today,
|
new_limit_today: deck.new_limit_today,
|
||||||
|
desired_retention: deck.desired_retention.map(|v| v as f32 / 100.0),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -366,6 +370,7 @@ impl From<Deck> for DeckSchema11 {
|
||||||
new_limit: norm.new_limit,
|
new_limit: norm.new_limit,
|
||||||
review_limit_today: norm.review_limit_today,
|
review_limit_today: norm.review_limit_today,
|
||||||
new_limit_today: norm.new_limit_today,
|
new_limit_today: norm.new_limit_today,
|
||||||
|
desired_retention: norm.desired_retention.map(|v| (v * 100.0) as u32),
|
||||||
common: deck.into(),
|
common: deck.into(),
|
||||||
}),
|
}),
|
||||||
DeckKind::Filtered(ref filt) => DeckSchema11::Filtered(FilteredDeckSchema11 {
|
DeckKind::Filtered(ref filt) => DeckSchema11::Filtered(FilteredDeckSchema11 {
|
||||||
|
@ -430,7 +435,8 @@ static RESERVED_DECK_KEYS: Set<&'static str> = phf_set! {
|
||||||
"browserCollapsed",
|
"browserCollapsed",
|
||||||
"extendRev",
|
"extendRev",
|
||||||
"id",
|
"id",
|
||||||
"collapsed"
|
"collapsed",
|
||||||
|
"desiredRetention",
|
||||||
};
|
};
|
||||||
|
|
||||||
impl From<&Deck> for DeckTodaySchema11 {
|
impl From<&Deck> for DeckTodaySchema11 {
|
||||||
|
|
|
@ -231,7 +231,10 @@ fn svg_getter(notetypes: &[Notetype]) -> impl Fn(NotetypeId) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
fn gather_notes(&mut self, search: impl TryIntoSearch) -> Result<(Vec<Note>, NoteTableGuard)> {
|
fn gather_notes(
|
||||||
|
&mut self,
|
||||||
|
search: impl TryIntoSearch,
|
||||||
|
) -> Result<(Vec<Note>, NoteTableGuard<'_>)> {
|
||||||
let guard = self.search_notes_into_table(search)?;
|
let guard = self.search_notes_into_table(search)?;
|
||||||
guard
|
guard
|
||||||
.col
|
.col
|
||||||
|
@ -240,7 +243,7 @@ impl Collection {
|
||||||
.map(|notes| (notes, guard))
|
.map(|notes| (notes, guard))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gather_cards(&mut self) -> Result<(Vec<Card>, CardTableGuard)> {
|
fn gather_cards(&mut self) -> Result<(Vec<Card>, CardTableGuard<'_>)> {
|
||||||
let guard = self.search_cards_of_notes_into_table()?;
|
let guard = self.search_cards_of_notes_into_table()?;
|
||||||
guard
|
guard
|
||||||
.col
|
.col
|
||||||
|
|
|
@ -664,7 +664,7 @@ mod test {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
fn import(self, col: &mut Collection) -> NoteContext {
|
fn import(self, col: &mut Collection) -> NoteContext<'_> {
|
||||||
let mut progress_handler = col.new_progress_handler();
|
let mut progress_handler = col.new_progress_handler();
|
||||||
let media_map = Box::leak(Box::new(self.media_map));
|
let media_map = Box::leak(Box::new(self.media_map));
|
||||||
let mut ctx = NoteContext::new(
|
let mut ctx = NoteContext::new(
|
||||||
|
|
|
@ -154,7 +154,7 @@ pub(super) fn extract_media_entries(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn safe_normalized_file_name(name: &str) -> Result<Cow<str>> {
|
pub(super) fn safe_normalized_file_name(name: &str) -> Result<Cow<'_, str>> {
|
||||||
if !filename_is_safe(name) {
|
if !filename_is_safe(name) {
|
||||||
Err(AnkiError::ImportError {
|
Err(AnkiError::ImportError {
|
||||||
source: ImportError::Corrupt,
|
source: ImportError::Corrupt,
|
||||||
|
|
|
@ -147,7 +147,7 @@ fn rendered_nodes_to_str(nodes: &[RenderedNode]) -> String {
|
||||||
.join("")
|
.join("")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
|
fn field_to_record_field(field: &str, with_html: bool) -> Cow<'_, str> {
|
||||||
let mut text = strip_redundant_sections(field);
|
let mut text = strip_redundant_sections(field);
|
||||||
if !with_html {
|
if !with_html {
|
||||||
text = text.map_cow(|t| html_to_text_line(t, false));
|
text = text.map_cow(|t| html_to_text_line(t, false));
|
||||||
|
@ -155,7 +155,7 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
|
||||||
text
|
text
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_redundant_sections(text: &str) -> Cow<str> {
|
fn strip_redundant_sections(text: &str) -> Cow<'_, str> {
|
||||||
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
static RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
Regex::new(
|
Regex::new(
|
||||||
r"(?isx)
|
r"(?isx)
|
||||||
|
@ -169,7 +169,7 @@ fn strip_redundant_sections(text: &str) -> Cow<str> {
|
||||||
RE.replace_all(text.as_ref(), "")
|
RE.replace_all(text.as_ref(), "")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_answer_side_question(text: &str) -> Cow<str> {
|
fn strip_answer_side_question(text: &str) -> Cow<'_, str> {
|
||||||
static RE: LazyLock<Regex> =
|
static RE: LazyLock<Regex> =
|
||||||
LazyLock::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
|
LazyLock::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
|
||||||
RE.replace_all(text.as_ref(), "")
|
RE.replace_all(text.as_ref(), "")
|
||||||
|
@ -251,7 +251,7 @@ impl NoteContext {
|
||||||
.chain(self.tags(note))
|
.chain(self.tags(note))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn notetype_name(&self, note: &Note) -> Option<Cow<[u8]>> {
|
fn notetype_name(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||||
self.with_notetype.then(|| {
|
self.with_notetype.then(|| {
|
||||||
self.notetypes
|
self.notetypes
|
||||||
.get(¬e.notetype_id)
|
.get(¬e.notetype_id)
|
||||||
|
@ -259,7 +259,7 @@ impl NoteContext {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deck_name(&self, note: &Note) -> Option<Cow<[u8]>> {
|
fn deck_name(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||||
self.with_deck.then(|| {
|
self.with_deck.then(|| {
|
||||||
self.deck_ids
|
self.deck_ids
|
||||||
.get(¬e.id)
|
.get(¬e.id)
|
||||||
|
@ -268,7 +268,7 @@ impl NoteContext {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tags(&self, note: &Note) -> Option<Cow<[u8]>> {
|
fn tags(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
|
||||||
self.with_tags
|
self.with_tags
|
||||||
.then(|| Cow::from(note.tags.join(" ").into_bytes()))
|
.then(|| Cow::from(note.tags.join(" ").into_bytes()))
|
||||||
}
|
}
|
||||||
|
|
|
@ -511,7 +511,7 @@ impl NoteContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Note {
|
impl Note {
|
||||||
fn first_field_stripped(&self) -> Cow<str> {
|
fn first_field_stripped(&self) -> Cow<'_, str> {
|
||||||
strip_html_preserving_media_filenames(&self.fields()[0])
|
strip_html_preserving_media_filenames(&self.fields()[0])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -623,7 +623,7 @@ impl ForeignNote {
|
||||||
.all(|(opt, field)| opt.as_ref().map(|s| s == field).unwrap_or(true))
|
.all(|(opt, field)| opt.as_ref().map(|s| s == field).unwrap_or(true))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn first_field_stripped(&self) -> Option<Cow<str>> {
|
fn first_field_stripped(&self) -> Option<Cow<'_, str>> {
|
||||||
self.fields
|
self.fields
|
||||||
.first()
|
.first()
|
||||||
.and_then(|s| s.as_ref())
|
.and_then(|s| s.as_ref())
|
||||||
|
|
|
@ -48,7 +48,7 @@ pub struct ExtractedLatex {
|
||||||
pub(crate) fn extract_latex_expanding_clozes(
|
pub(crate) fn extract_latex_expanding_clozes(
|
||||||
text: &str,
|
text: &str,
|
||||||
svg: bool,
|
svg: bool,
|
||||||
) -> (Cow<str>, Vec<ExtractedLatex>) {
|
) -> (Cow<'_, str>, Vec<ExtractedLatex>) {
|
||||||
if text.contains("{{c") {
|
if text.contains("{{c") {
|
||||||
let expanded = expand_clozes_to_reveal_latex(text);
|
let expanded = expand_clozes_to_reveal_latex(text);
|
||||||
let (text, extracts) = extract_latex(&expanded, svg);
|
let (text, extracts) = extract_latex(&expanded, svg);
|
||||||
|
@ -60,7 +60,7 @@ pub(crate) fn extract_latex_expanding_clozes(
|
||||||
|
|
||||||
/// Extract LaTeX from the provided text.
|
/// Extract LaTeX from the provided text.
|
||||||
/// Expects cloze deletions to already be expanded.
|
/// Expects cloze deletions to already be expanded.
|
||||||
pub fn extract_latex(text: &str, svg: bool) -> (Cow<str>, Vec<ExtractedLatex>) {
|
pub fn extract_latex(text: &str, svg: bool) -> (Cow<'_, str>, Vec<ExtractedLatex>) {
|
||||||
let mut extracted = vec![];
|
let mut extracted = vec![];
|
||||||
|
|
||||||
let new_text = LATEX.replace_all(text, |caps: &Captures| {
|
let new_text = LATEX.replace_all(text, |caps: &Captures| {
|
||||||
|
@ -84,7 +84,7 @@ pub fn extract_latex(text: &str, svg: bool) -> (Cow<str>, Vec<ExtractedLatex>) {
|
||||||
(new_text, extracted)
|
(new_text, extracted)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_html_for_latex(html: &str) -> Cow<str> {
|
fn strip_html_for_latex(html: &str) -> Cow<'_, str> {
|
||||||
let mut out: Cow<str> = html.into();
|
let mut out: Cow<str> = html.into();
|
||||||
if let Cow::Owned(o) = LATEX_NEWLINES.replace_all(html, "\n") {
|
if let Cow::Owned(o) = LATEX_NEWLINES.replace_all(html, "\n") {
|
||||||
out = o.into();
|
out = o.into();
|
||||||
|
|
|
@ -91,7 +91,7 @@ fn nonbreaking_space(char: char) -> bool {
|
||||||
/// - Any problem characters are removed.
|
/// - Any problem characters are removed.
|
||||||
/// - Windows device names like CON and PRN have '_' appended
|
/// - Windows device names like CON and PRN have '_' appended
|
||||||
/// - The filename is limited to 120 bytes.
|
/// - The filename is limited to 120 bytes.
|
||||||
pub(crate) fn normalize_filename(fname: &str) -> Cow<str> {
|
pub(crate) fn normalize_filename(fname: &str) -> Cow<'_, str> {
|
||||||
let mut output = Cow::Borrowed(fname);
|
let mut output = Cow::Borrowed(fname);
|
||||||
|
|
||||||
if !is_nfc(output.as_ref()) {
|
if !is_nfc(output.as_ref()) {
|
||||||
|
@ -102,7 +102,7 @@ pub(crate) fn normalize_filename(fname: &str) -> Cow<str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See normalize_filename(). This function expects NFC-normalized input.
|
/// See normalize_filename(). This function expects NFC-normalized input.
|
||||||
pub(crate) fn normalize_nfc_filename(mut fname: Cow<str>) -> Cow<str> {
|
pub(crate) fn normalize_nfc_filename(mut fname: Cow<'_, str>) -> Cow<'_, str> {
|
||||||
if fname.contains(disallowed_char) {
|
if fname.contains(disallowed_char) {
|
||||||
fname = fname.replace(disallowed_char, "").into()
|
fname = fname.replace(disallowed_char, "").into()
|
||||||
}
|
}
|
||||||
|
@ -137,7 +137,7 @@ pub(crate) fn normalize_nfc_filename(mut fname: Cow<str>) -> Cow<str> {
|
||||||
/// but can be accessed as NFC. On these devices, if the filename
|
/// but can be accessed as NFC. On these devices, if the filename
|
||||||
/// is otherwise valid, the filename is returned as NFC.
|
/// is otherwise valid, the filename is returned as NFC.
|
||||||
#[allow(clippy::collapsible_else_if)]
|
#[allow(clippy::collapsible_else_if)]
|
||||||
pub(crate) fn filename_if_normalized(fname: &str) -> Option<Cow<str>> {
|
pub(crate) fn filename_if_normalized(fname: &str) -> Option<Cow<'_, str>> {
|
||||||
if cfg!(target_vendor = "apple") {
|
if cfg!(target_vendor = "apple") {
|
||||||
if !is_nfc(fname) {
|
if !is_nfc(fname) {
|
||||||
let as_nfc = fname.chars().nfc().collect::<String>();
|
let as_nfc = fname.chars().nfc().collect::<String>();
|
||||||
|
@ -208,7 +208,7 @@ pub(crate) fn add_hash_suffix_to_file_stem(fname: &str, hash: &Sha1Hash) -> Stri
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If filename is longer than max_bytes, truncate it.
|
/// If filename is longer than max_bytes, truncate it.
|
||||||
fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<str> {
|
fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<'_, str> {
|
||||||
if fname.len() <= max_bytes {
|
if fname.len() <= max_bytes {
|
||||||
return Cow::Borrowed(fname);
|
return Cow::Borrowed(fname);
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,7 +87,7 @@ impl TryFrom<anki_proto::notes::AddNoteRequest> for AddNoteRequest {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
pub fn add_note(&mut self, note: &mut Note, did: DeckId) -> Result<OpOutput<()>> {
|
pub fn add_note(&mut self, note: &mut Note, did: DeckId) -> Result<OpOutput<usize>> {
|
||||||
self.transact(Op::AddNote, |col| col.add_note_inner(note, did))
|
self.transact(Op::AddNote, |col| col.add_note_inner(note, did))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,7 +372,7 @@ impl Collection {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add_note_inner(&mut self, note: &mut Note, did: DeckId) -> Result<()> {
|
pub(crate) fn add_note_inner(&mut self, note: &mut Note, did: DeckId) -> Result<usize> {
|
||||||
let nt = self
|
let nt = self
|
||||||
.get_notetype(note.notetype_id)?
|
.get_notetype(note.notetype_id)?
|
||||||
.or_invalid("missing note type")?;
|
.or_invalid("missing note type")?;
|
||||||
|
@ -383,10 +383,11 @@ impl Collection {
|
||||||
note.prepare_for_update(ctx.notetype, normalize_text)?;
|
note.prepare_for_update(ctx.notetype, normalize_text)?;
|
||||||
note.set_modified(ctx.usn);
|
note.set_modified(ctx.usn);
|
||||||
self.add_note_only_undoable(note)?;
|
self.add_note_only_undoable(note)?;
|
||||||
self.generate_cards_for_new_note(&ctx, note, did)?;
|
let count = self.generate_cards_for_new_note(&ctx, note, did)?;
|
||||||
self.set_last_deck_for_notetype(note.notetype_id, did)?;
|
self.set_last_deck_for_notetype(note.notetype_id, did)?;
|
||||||
self.set_last_notetype_for_deck(did, note.notetype_id)?;
|
self.set_last_notetype_for_deck(did, note.notetype_id)?;
|
||||||
self.set_current_notetype_id(note.notetype_id)
|
self.set_current_notetype_id(note.notetype_id)?;
|
||||||
|
Ok(count)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_note(&mut self, note: &mut Note) -> Result<OpOutput<()>> {
|
pub fn update_note(&mut self, note: &mut Note) -> Result<OpOutput<()>> {
|
||||||
|
|
|
@ -215,7 +215,7 @@ impl Collection {
|
||||||
ctx: &CardGenContext<impl Deref<Target = Notetype>>,
|
ctx: &CardGenContext<impl Deref<Target = Notetype>>,
|
||||||
note: &Note,
|
note: &Note,
|
||||||
target_deck_id: DeckId,
|
target_deck_id: DeckId,
|
||||||
) -> Result<()> {
|
) -> Result<usize> {
|
||||||
self.generate_cards_for_note(
|
self.generate_cards_for_note(
|
||||||
ctx,
|
ctx,
|
||||||
note,
|
note,
|
||||||
|
@ -231,7 +231,8 @@ impl Collection {
|
||||||
note: &Note,
|
note: &Note,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let existing = self.storage.existing_cards_for_note(note.id)?;
|
let existing = self.storage.existing_cards_for_note(note.id)?;
|
||||||
self.generate_cards_for_note(ctx, note, &existing, ctx.last_deck, &mut Default::default())
|
self.generate_cards_for_note(ctx, note, &existing, ctx.last_deck, &mut Default::default())?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_cards_for_note(
|
fn generate_cards_for_note(
|
||||||
|
@ -241,12 +242,13 @@ impl Collection {
|
||||||
existing: &[AlreadyGeneratedCardInfo],
|
existing: &[AlreadyGeneratedCardInfo],
|
||||||
target_deck_id: Option<DeckId>,
|
target_deck_id: Option<DeckId>,
|
||||||
cache: &mut CardGenCache,
|
cache: &mut CardGenCache,
|
||||||
) -> Result<()> {
|
) -> Result<usize> {
|
||||||
let cards = ctx.new_cards_required(note, existing, true);
|
let cards = ctx.new_cards_required(note, existing, true);
|
||||||
if cards.is_empty() {
|
if cards.is_empty() {
|
||||||
return Ok(());
|
return Ok(0);
|
||||||
}
|
}
|
||||||
self.add_generated_cards(note.id, &cards, target_deck_id, cache)
|
self.add_generated_cards(note.id, &cards, target_deck_id, cache)?;
|
||||||
|
Ok(cards.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn generate_cards_for_notetype(
|
pub(crate) fn generate_cards_for_notetype(
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub struct RenderCardOutput {
|
||||||
|
|
||||||
impl RenderCardOutput {
|
impl RenderCardOutput {
|
||||||
/// The question text. This is only valid to call when partial_render=false.
|
/// The question text. This is only valid to call when partial_render=false.
|
||||||
pub fn question(&self) -> Cow<str> {
|
pub fn question(&self) -> Cow<'_, str> {
|
||||||
match self.qnodes.as_slice() {
|
match self.qnodes.as_slice() {
|
||||||
[RenderedNode::Text { text }] => text.into(),
|
[RenderedNode::Text { text }] => text.into(),
|
||||||
_ => "not fully rendered".into(),
|
_ => "not fully rendered".into(),
|
||||||
|
@ -33,7 +33,7 @@ impl RenderCardOutput {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The answer text. This is only valid to call when partial_render=false.
|
/// The answer text. This is only valid to call when partial_render=false.
|
||||||
pub fn answer(&self) -> Cow<str> {
|
pub fn answer(&self) -> Cow<'_, str> {
|
||||||
match self.anodes.as_slice() {
|
match self.anodes.as_slice() {
|
||||||
[RenderedNode::Text { text }] => text.into(),
|
[RenderedNode::Text { text }] => text.into(),
|
||||||
_ => "not fully rendered".into(),
|
_ => "not fully rendered".into(),
|
||||||
|
|
|
@ -84,6 +84,42 @@ impl RevlogEntry {
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if this entry represents a reset operation.
|
||||||
|
/// These entries are created when a card is reset using
|
||||||
|
/// [`Collection::reschedule_cards_as_new`].
|
||||||
|
/// The 0 value of `ease_factor` differentiates it
|
||||||
|
/// from entry created by [`Collection::set_due_date`] that has
|
||||||
|
/// `RevlogReviewKind::Manual` but non-zero `ease_factor`.
|
||||||
|
pub(crate) fn is_reset(&self) -> bool {
|
||||||
|
self.review_kind == RevlogReviewKind::Manual && self.ease_factor == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if this entry represents a cramming operation.
|
||||||
|
/// These entries are created when a card is reviewed in a
|
||||||
|
/// filtered deck with "Reschedule cards based on my answers
|
||||||
|
/// in this deck" disabled.
|
||||||
|
/// [`crate::scheduler::answering::CardStateUpdater::apply_preview_state`].
|
||||||
|
/// The 0 value of `ease_factor` distinguishes it from the entry
|
||||||
|
/// created when a card is reviewed before its due date in a
|
||||||
|
/// filtered deck with reschedule enabled or using Grade Now.
|
||||||
|
pub(crate) fn is_cramming(&self) -> bool {
|
||||||
|
self.review_kind == RevlogReviewKind::Filtered && self.ease_factor == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn has_rating(&self) -> bool {
|
||||||
|
self.button_chosen > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the review entry is not manually rescheduled and not
|
||||||
|
/// cramming. Used to filter out entries that shouldn't be considered
|
||||||
|
/// for statistics and scheduling.
|
||||||
|
pub(crate) fn has_rating_and_affects_scheduling(&self) -> bool {
|
||||||
|
// not rescheduled/set due date/reset
|
||||||
|
self.has_rating()
|
||||||
|
// not cramming
|
||||||
|
&& !self.is_cramming()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
|
|
|
@ -444,6 +444,8 @@ impl Collection {
|
||||||
.get_deck(card.deck_id)?
|
.get_deck(card.deck_id)?
|
||||||
.or_not_found(card.deck_id)?;
|
.or_not_found(card.deck_id)?;
|
||||||
let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?;
|
let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?;
|
||||||
|
|
||||||
|
let desired_retention = deck.effective_desired_retention(&config);
|
||||||
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
|
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
|
||||||
let fsrs_next_states = if fsrs_enabled {
|
let fsrs_next_states = if fsrs_enabled {
|
||||||
let params = config.fsrs_params();
|
let params = config.fsrs_params();
|
||||||
|
@ -473,13 +475,13 @@ impl Collection {
|
||||||
};
|
};
|
||||||
Some(fsrs.next_states(
|
Some(fsrs.next_states(
|
||||||
card.memory_state.map(Into::into),
|
card.memory_state.map(Into::into),
|
||||||
config.inner.desired_retention,
|
desired_retention,
|
||||||
days_elapsed,
|
days_elapsed,
|
||||||
)?)
|
)?)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
let desired_retention = fsrs_enabled.then_some(config.inner.desired_retention);
|
let desired_retention = fsrs_enabled.then_some(desired_retention);
|
||||||
let fsrs_short_term_with_steps =
|
let fsrs_short_term_with_steps =
|
||||||
self.get_config_bool(BoolKey::FsrsShortTermWithStepsEnabled);
|
self.get_config_bool(BoolKey::FsrsShortTermWithStepsEnabled);
|
||||||
let fsrs_allow_short_term = if fsrs_enabled {
|
let fsrs_allow_short_term = if fsrs_enabled {
|
||||||
|
@ -662,6 +664,43 @@ pub(crate) mod test {
|
||||||
col.get_scheduling_states(card_id).unwrap().current
|
col.get_scheduling_states(card_id).unwrap().current
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test that deck-specific desired retention is used when available
|
||||||
|
#[test]
|
||||||
|
fn deck_specific_desired_retention() -> Result<()> {
|
||||||
|
let mut col = Collection::new();
|
||||||
|
|
||||||
|
// Enable FSRS
|
||||||
|
col.set_config_bool(BoolKey::Fsrs, true, false)?;
|
||||||
|
|
||||||
|
// Create a deck with specific desired retention
|
||||||
|
let deck_id = DeckId(1);
|
||||||
|
let deck = col.get_deck(deck_id)?.unwrap();
|
||||||
|
let mut deck_clone = (*deck).clone();
|
||||||
|
deck_clone.normal_mut().unwrap().desired_retention = Some(0.85);
|
||||||
|
col.update_deck(&mut deck_clone)?;
|
||||||
|
|
||||||
|
// Create a card in this deck
|
||||||
|
let nt = col.get_notetype_by_name("Basic")?.unwrap();
|
||||||
|
let mut note = nt.new_note();
|
||||||
|
col.add_note(&mut note, deck_id)?;
|
||||||
|
|
||||||
|
// Get the card using search_cards
|
||||||
|
let cards = col.search_cards(note.id, SortMode::NoOrder)?;
|
||||||
|
let card = col.storage.get_card(cards[0])?.unwrap();
|
||||||
|
|
||||||
|
// Test that the card state updater uses deck-specific desired retention
|
||||||
|
let updater = col.card_state_updater(card)?;
|
||||||
|
|
||||||
|
// Print debug information
|
||||||
|
println!("FSRS enabled: {}", col.get_config_bool(BoolKey::Fsrs));
|
||||||
|
println!("Desired retention: {:?}", updater.desired_retention);
|
||||||
|
|
||||||
|
// Verify that the desired retention is from the deck, not the config
|
||||||
|
assert_eq!(updater.desired_retention, Some(0.85));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
// make sure the 'current' state for a card matches the
|
// make sure the 'current' state for a card matches the
|
||||||
// state we applied to it
|
// state we applied to it
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -45,10 +45,11 @@ pub(crate) fn get_decay_from_params(params: &[f32]) -> f32 {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct UpdateMemoryStateRequest {
|
pub(crate) struct UpdateMemoryStateRequest {
|
||||||
pub params: Params,
|
pub params: Params,
|
||||||
pub desired_retention: f32,
|
pub preset_desired_retention: f32,
|
||||||
pub historical_retention: f32,
|
pub historical_retention: f32,
|
||||||
pub max_interval: u32,
|
pub max_interval: u32,
|
||||||
pub reschedule: bool,
|
pub reschedule: bool,
|
||||||
|
pub deck_desired_retention: HashMap<DeckId, f32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct UpdateMemoryStateEntry {
|
pub(crate) struct UpdateMemoryStateEntry {
|
||||||
|
@ -98,7 +99,6 @@ impl Collection {
|
||||||
historical_retention.unwrap_or(0.9),
|
historical_retention.unwrap_or(0.9),
|
||||||
ignore_before,
|
ignore_before,
|
||||||
)?;
|
)?;
|
||||||
let desired_retention = req.as_ref().map(|w| w.desired_retention);
|
|
||||||
let mut progress = self.new_progress_handler::<ComputeMemoryProgress>();
|
let mut progress = self.new_progress_handler::<ComputeMemoryProgress>();
|
||||||
progress.update(false, |s| s.total_cards = items.len() as u32)?;
|
progress.update(false, |s| s.total_cards = items.len() as u32)?;
|
||||||
for (idx, (card_id, item)) in items.into_iter().enumerate() {
|
for (idx, (card_id, item)) in items.into_iter().enumerate() {
|
||||||
|
@ -106,10 +106,16 @@ impl Collection {
|
||||||
let mut card = self.storage.get_card(card_id)?.or_not_found(card_id)?;
|
let mut card = self.storage.get_card(card_id)?.or_not_found(card_id)?;
|
||||||
let original = card.clone();
|
let original = card.clone();
|
||||||
if let Some(req) = &req {
|
if let Some(req) = &req {
|
||||||
|
let preset_desired_retention = req.preset_desired_retention;
|
||||||
// Store decay and desired retention in the card so that add-ons, card info,
|
// Store decay and desired retention in the card so that add-ons, card info,
|
||||||
// stats and browser search/sorts don't need to access the deck config.
|
// stats and browser search/sorts don't need to access the deck config.
|
||||||
// Unlike memory states, scheduler doesn't use decay and dr stored in the card.
|
// Unlike memory states, scheduler doesn't use decay and dr stored in the card.
|
||||||
card.desired_retention = desired_retention;
|
let deck_id = card.original_or_current_deck_id();
|
||||||
|
let desired_retention = *req
|
||||||
|
.deck_desired_retention
|
||||||
|
.get(&deck_id)
|
||||||
|
.unwrap_or(&preset_desired_retention);
|
||||||
|
card.desired_retention = Some(desired_retention);
|
||||||
card.decay = decay;
|
card.decay = decay;
|
||||||
if let Some(item) = item {
|
if let Some(item) = item {
|
||||||
card.set_memory_state(&fsrs, Some(item), historical_retention.unwrap())?;
|
card.set_memory_state(&fsrs, Some(item), historical_retention.unwrap())?;
|
||||||
|
@ -132,7 +138,7 @@ impl Collection {
|
||||||
let original_interval = card.interval;
|
let original_interval = card.interval;
|
||||||
let interval = fsrs.next_interval(
|
let interval = fsrs.next_interval(
|
||||||
Some(state.stability),
|
Some(state.stability),
|
||||||
desired_retention.unwrap(),
|
desired_retention,
|
||||||
0,
|
0,
|
||||||
);
|
);
|
||||||
card.interval = rescheduler
|
card.interval = rescheduler
|
||||||
|
@ -205,7 +211,11 @@ impl Collection {
|
||||||
.storage
|
.storage
|
||||||
.get_deck_config(conf_id)?
|
.get_deck_config(conf_id)?
|
||||||
.or_not_found(conf_id)?;
|
.or_not_found(conf_id)?;
|
||||||
let desired_retention = config.inner.desired_retention;
|
|
||||||
|
// Get deck-specific desired retention if available, otherwise use config
|
||||||
|
// default
|
||||||
|
let desired_retention = deck.effective_desired_retention(&config);
|
||||||
|
|
||||||
let historical_retention = config.inner.historical_retention;
|
let historical_retention = config.inner.historical_retention;
|
||||||
let params = config.fsrs_params();
|
let params = config.fsrs_params();
|
||||||
let decay = get_decay_from_params(params);
|
let decay = get_decay_from_params(params);
|
||||||
|
@ -295,15 +305,15 @@ pub(crate) fn fsrs_items_for_memory_states(
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
struct LastRevlogInfo {
|
pub(crate) struct LastRevlogInfo {
|
||||||
/// Used to determine the actual elapsed time between the last time the user
|
/// Used to determine the actual elapsed time between the last time the user
|
||||||
/// reviewed the card and now, so that we can determine an accurate period
|
/// reviewed the card and now, so that we can determine an accurate period
|
||||||
/// when the card has subsequently been rescheduled to a different day.
|
/// when the card has subsequently been rescheduled to a different day.
|
||||||
last_reviewed_at: Option<TimestampSecs>,
|
pub(crate) last_reviewed_at: Option<TimestampSecs>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a map of cards to info about last review/reschedule.
|
/// Return a map of cards to info about last review.
|
||||||
fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogInfo> {
|
pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogInfo> {
|
||||||
let mut out = HashMap::new();
|
let mut out = HashMap::new();
|
||||||
revlogs
|
revlogs
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -312,8 +322,10 @@ fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, LastRevlogIn
|
||||||
.for_each(|(card_id, group)| {
|
.for_each(|(card_id, group)| {
|
||||||
let mut last_reviewed_at = None;
|
let mut last_reviewed_at = None;
|
||||||
for e in group.into_iter() {
|
for e in group.into_iter() {
|
||||||
if e.button_chosen >= 1 {
|
if e.has_rating_and_affects_scheduling() {
|
||||||
last_reviewed_at = Some(e.id.as_secs());
|
last_reviewed_at = Some(e.id.as_secs());
|
||||||
|
} else if e.is_reset() {
|
||||||
|
last_reviewed_at = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
out.insert(card_id, LastRevlogInfo { last_reviewed_at });
|
out.insert(card_id, LastRevlogInfo { last_reviewed_at });
|
||||||
|
|
|
@ -174,7 +174,7 @@ impl Collection {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let health_check_passed = if health_check {
|
let health_check_passed = if health_check && input.train_set.len() > 300 {
|
||||||
let fsrs = FSRS::new(None)?;
|
let fsrs = FSRS::new(None)?;
|
||||||
fsrs.evaluate_with_time_series_splits(input, |_| true)
|
fsrs.evaluate_with_time_series_splits(input, |_| true)
|
||||||
.ok()
|
.ok()
|
||||||
|
@ -394,13 +394,13 @@ pub(crate) fn reviews_for_fsrs(
|
||||||
let mut revlogs_complete = false;
|
let mut revlogs_complete = false;
|
||||||
// Working backwards from the latest review...
|
// Working backwards from the latest review...
|
||||||
for (index, entry) in entries.iter().enumerate().rev() {
|
for (index, entry) in entries.iter().enumerate().rev() {
|
||||||
if entry.review_kind == RevlogReviewKind::Filtered && entry.ease_factor == 0 {
|
if entry.is_cramming() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// For incomplete review histories, initial memory state is based on the first
|
// For incomplete review histories, initial memory state is based on the first
|
||||||
// user-graded review after the cutoff date with interval >= 1d.
|
// user-graded review after the cutoff date with interval >= 1d.
|
||||||
let within_cutoff = entry.id.0 > ignore_revlogs_before.0;
|
let within_cutoff = entry.id.0 > ignore_revlogs_before.0;
|
||||||
let user_graded = matches!(entry.button_chosen, 1..=4);
|
let user_graded = entry.has_rating();
|
||||||
let interday = entry.interval >= 1 || entry.interval <= -86400;
|
let interday = entry.interval >= 1 || entry.interval <= -86400;
|
||||||
if user_graded && within_cutoff && interday {
|
if user_graded && within_cutoff && interday {
|
||||||
first_user_grade_idx = Some(index);
|
first_user_grade_idx = Some(index);
|
||||||
|
@ -409,10 +409,7 @@ pub(crate) fn reviews_for_fsrs(
|
||||||
if user_graded && entry.review_kind == RevlogReviewKind::Learning {
|
if user_graded && entry.review_kind == RevlogReviewKind::Learning {
|
||||||
first_of_last_learn_entries = Some(index);
|
first_of_last_learn_entries = Some(index);
|
||||||
revlogs_complete = true;
|
revlogs_complete = true;
|
||||||
} else if matches!(
|
} else if entry.is_reset() {
|
||||||
(entry.review_kind, entry.ease_factor),
|
|
||||||
(RevlogReviewKind::Manual, 0)
|
|
||||||
) {
|
|
||||||
// Ignore entries prior to a `Reset` if a learning step has come after,
|
// Ignore entries prior to a `Reset` if a learning step has come after,
|
||||||
// but consider revlogs complete.
|
// but consider revlogs complete.
|
||||||
if first_of_last_learn_entries.is_some() {
|
if first_of_last_learn_entries.is_some() {
|
||||||
|
@ -472,16 +469,7 @@ pub(crate) fn reviews_for_fsrs(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter out unwanted entries
|
// Filter out unwanted entries
|
||||||
entries.retain(|entry| {
|
entries.retain(|entry| entry.has_rating_and_affects_scheduling());
|
||||||
!(
|
|
||||||
// set due date, reset or rescheduled
|
|
||||||
(entry.review_kind == RevlogReviewKind::Manual || entry.button_chosen == 0)
|
|
||||||
|| // cram
|
|
||||||
(entry.review_kind == RevlogReviewKind::Filtered && entry.ease_factor == 0)
|
|
||||||
|| // rescheduled
|
|
||||||
(entry.review_kind == RevlogReviewKind::Rescheduled)
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
// Compute delta_t for each entry
|
// Compute delta_t for each entry
|
||||||
let delta_ts = iter::once(0)
|
let delta_ts = iter::once(0)
|
||||||
|
@ -560,10 +548,14 @@ pub(crate) mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn revlog(review_kind: RevlogReviewKind, days_ago: i64) -> RevlogEntry {
|
pub(crate) fn revlog(review_kind: RevlogReviewKind, days_ago: i64) -> RevlogEntry {
|
||||||
|
let button_chosen = match review_kind {
|
||||||
|
RevlogReviewKind::Manual | RevlogReviewKind::Rescheduled => 0,
|
||||||
|
_ => 3,
|
||||||
|
};
|
||||||
RevlogEntry {
|
RevlogEntry {
|
||||||
review_kind,
|
review_kind,
|
||||||
id: days_ago_ms(days_ago).into(),
|
id: days_ago_ms(days_ago).into(),
|
||||||
button_chosen: 3,
|
button_chosen,
|
||||||
interval: 1,
|
interval: 1,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anki_proto::deck_config::deck_config::config::ReviewCardOrder;
|
use anki_proto::deck_config::deck_config::config::ReviewCardOrder;
|
||||||
use anki_proto::deck_config::deck_config::config::ReviewCardOrder::*;
|
use anki_proto::deck_config::deck_config::config::ReviewCardOrder::*;
|
||||||
use anki_proto::scheduler::SimulateFsrsReviewRequest;
|
use anki_proto::scheduler::SimulateFsrsReviewRequest;
|
||||||
use anki_proto::scheduler::SimulateFsrsReviewResponse;
|
use anki_proto::scheduler::SimulateFsrsReviewResponse;
|
||||||
|
use anki_proto::scheduler::SimulateFsrsWorkloadResponse;
|
||||||
use fsrs::simulate;
|
use fsrs::simulate;
|
||||||
use fsrs::PostSchedulingFn;
|
use fsrs::PostSchedulingFn;
|
||||||
use fsrs::ReviewPriorityFn;
|
use fsrs::ReviewPriorityFn;
|
||||||
|
@ -14,6 +16,8 @@ use fsrs::FSRS;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rand::rngs::StdRng;
|
use rand::rngs::StdRng;
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
|
use rayon::iter::IntoParallelIterator;
|
||||||
|
use rayon::iter::ParallelIterator;
|
||||||
|
|
||||||
use crate::card::CardQueue;
|
use crate::card::CardQueue;
|
||||||
use crate::card::CardType;
|
use crate::card::CardType;
|
||||||
|
@ -93,7 +97,7 @@ fn create_review_priority_fn(
|
||||||
|
|
||||||
// Interval-based ordering
|
// Interval-based ordering
|
||||||
IntervalsAscending => wrap!(|c, _w| c.interval as i32),
|
IntervalsAscending => wrap!(|c, _w| c.interval as i32),
|
||||||
IntervalsDescending => wrap!(|c, _w| -(c.interval as i32)),
|
IntervalsDescending => wrap!(|c, _w| (c.interval as i32).saturating_neg()),
|
||||||
// Retrievability-based ordering
|
// Retrievability-based ordering
|
||||||
RetrievabilityAscending => {
|
RetrievabilityAscending => {
|
||||||
wrap!(move |c, w| (c.retrievability(w) * 1000.0) as i32)
|
wrap!(move |c, w| (c.retrievability(w) * 1000.0) as i32)
|
||||||
|
@ -117,6 +121,12 @@ fn create_review_priority_fn(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_included_card(c: &Card) -> bool {
|
||||||
|
c.queue != CardQueue::Suspended
|
||||||
|
&& c.queue != CardQueue::PreviewRepeat
|
||||||
|
&& c.ctype != CardType::New
|
||||||
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
pub fn simulate_request_to_config(
|
pub fn simulate_request_to_config(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
@ -129,18 +139,14 @@ impl Collection {
|
||||||
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
.get_revlog_entries_for_searched_cards_in_card_order()?;
|
||||||
let mut cards = guard.col.storage.all_searched_cards()?;
|
let mut cards = guard.col.storage.all_searched_cards()?;
|
||||||
drop(guard);
|
drop(guard);
|
||||||
fn is_included_card(c: &Card) -> bool {
|
|
||||||
c.queue != CardQueue::Suspended
|
|
||||||
&& c.queue != CardQueue::PreviewRepeat
|
|
||||||
&& c.ctype != CardType::New
|
|
||||||
}
|
|
||||||
// calculate any missing memory state
|
// calculate any missing memory state
|
||||||
for c in &mut cards {
|
for c in &mut cards {
|
||||||
if is_included_card(c) && c.memory_state.is_none() {
|
if is_included_card(c) && c.memory_state.is_none() {
|
||||||
let original = c.clone();
|
let fsrs_data = self.compute_memory_state(c.id)?;
|
||||||
let new_state = self.compute_memory_state(c.id)?.state;
|
c.memory_state = fsrs_data.state.map(Into::into);
|
||||||
c.memory_state = new_state.map(Into::into);
|
c.desired_retention = Some(fsrs_data.desired_retention);
|
||||||
self.update_card_inner(c, original, self.usn()?)?;
|
c.decay = Some(fsrs_data.decay);
|
||||||
|
self.storage.update_card(c)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
|
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
|
||||||
|
@ -233,8 +239,8 @@ impl Collection {
|
||||||
learning_step_transitions: p.learning_step_transitions,
|
learning_step_transitions: p.learning_step_transitions,
|
||||||
relearning_step_transitions: p.relearning_step_transitions,
|
relearning_step_transitions: p.relearning_step_transitions,
|
||||||
state_rating_costs: p.state_rating_costs,
|
state_rating_costs: p.state_rating_costs,
|
||||||
learning_step_count: p.learning_step_count,
|
learning_step_count: req.learning_step_count as usize,
|
||||||
relearning_step_count: p.relearning_step_count,
|
relearning_step_count: req.relearning_step_count as usize,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok((config, converted_cards))
|
Ok((config, converted_cards))
|
||||||
|
@ -267,10 +273,47 @@ impl Collection {
|
||||||
daily_time_cost: result.cost_per_day,
|
daily_time_cost: result.cost_per_day,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn simulate_workload(
|
||||||
|
&mut self,
|
||||||
|
req: SimulateFsrsReviewRequest,
|
||||||
|
) -> Result<SimulateFsrsWorkloadResponse> {
|
||||||
|
let (config, cards) = self.simulate_request_to_config(&req)?;
|
||||||
|
let dr_workload = (70u32..=99u32)
|
||||||
|
.into_par_iter()
|
||||||
|
.map(|dr| {
|
||||||
|
let result = simulate(
|
||||||
|
&config,
|
||||||
|
&req.params,
|
||||||
|
dr as f32 / 100.,
|
||||||
|
None,
|
||||||
|
Some(cards.clone()),
|
||||||
|
)?;
|
||||||
|
Ok((
|
||||||
|
dr,
|
||||||
|
(
|
||||||
|
*result.memorized_cnt_per_day.last().unwrap_or(&0.),
|
||||||
|
result.cost_per_day.iter().sum::<f32>(),
|
||||||
|
result.review_cnt_per_day.iter().sum::<usize>() as u32
|
||||||
|
+ result.learn_cnt_per_day.iter().sum::<usize>() as u32,
|
||||||
|
),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.collect::<Result<HashMap<_, _>>>()?;
|
||||||
|
Ok(SimulateFsrsWorkloadResponse {
|
||||||
|
memorized: dr_workload.iter().map(|(k, v)| (*k, v.0)).collect(),
|
||||||
|
cost: dr_workload.iter().map(|(k, v)| (*k, v.1)).collect(),
|
||||||
|
review_count: dr_workload.iter().map(|(k, v)| (*k, v.2)).collect(),
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Card {
|
impl Card {
|
||||||
fn convert(card: Card, days_elapsed: i32, memory_state: FsrsMemoryState) -> Option<fsrs::Card> {
|
pub(crate) fn convert(
|
||||||
|
card: Card,
|
||||||
|
days_elapsed: i32,
|
||||||
|
memory_state: FsrsMemoryState,
|
||||||
|
) -> Option<fsrs::Card> {
|
||||||
match card.queue {
|
match card.queue {
|
||||||
CardQueue::DayLearn | CardQueue::Review => {
|
CardQueue::DayLearn | CardQueue::Review => {
|
||||||
let due = card.original_or_current_due();
|
let due = card.original_or_current_due();
|
||||||
|
|
|
@ -61,28 +61,26 @@ impl QueueBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gather_new_cards(&mut self, col: &mut Collection) -> Result<()> {
|
fn gather_new_cards(&mut self, col: &mut Collection) -> Result<()> {
|
||||||
|
let salt = Self::knuth_salt(self.context.timing.days_elapsed);
|
||||||
match self.context.sort_options.new_gather_priority {
|
match self.context.sort_options.new_gather_priority {
|
||||||
NewCardGatherPriority::Deck => {
|
NewCardGatherPriority::Deck => {
|
||||||
self.gather_new_cards_by_deck(col, NewCardSorting::LowestPosition)
|
self.gather_new_cards_by_deck(col, NewCardSorting::LowestPosition)
|
||||||
}
|
}
|
||||||
NewCardGatherPriority::DeckThenRandomNotes => self.gather_new_cards_by_deck(
|
NewCardGatherPriority::DeckThenRandomNotes => {
|
||||||
col,
|
self.gather_new_cards_by_deck(col, NewCardSorting::RandomNotes(salt))
|
||||||
NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
|
}
|
||||||
),
|
|
||||||
NewCardGatherPriority::LowestPosition => {
|
NewCardGatherPriority::LowestPosition => {
|
||||||
self.gather_new_cards_sorted(col, NewCardSorting::LowestPosition)
|
self.gather_new_cards_sorted(col, NewCardSorting::LowestPosition)
|
||||||
}
|
}
|
||||||
NewCardGatherPriority::HighestPosition => {
|
NewCardGatherPriority::HighestPosition => {
|
||||||
self.gather_new_cards_sorted(col, NewCardSorting::HighestPosition)
|
self.gather_new_cards_sorted(col, NewCardSorting::HighestPosition)
|
||||||
}
|
}
|
||||||
NewCardGatherPriority::RandomNotes => self.gather_new_cards_sorted(
|
NewCardGatherPriority::RandomNotes => {
|
||||||
col,
|
self.gather_new_cards_sorted(col, NewCardSorting::RandomNotes(salt))
|
||||||
NewCardSorting::RandomNotes(self.context.timing.days_elapsed),
|
}
|
||||||
),
|
NewCardGatherPriority::RandomCards => {
|
||||||
NewCardGatherPriority::RandomCards => self.gather_new_cards_sorted(
|
self.gather_new_cards_sorted(col, NewCardSorting::RandomCards(salt))
|
||||||
col,
|
}
|
||||||
NewCardSorting::RandomCards(self.context.timing.days_elapsed),
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,4 +167,10 @@ impl QueueBuilder {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Generates a salt for use with fnvhash. Useful to increase randomness
|
||||||
|
// when the base salt is a small integer.
|
||||||
|
fn knuth_salt(base_salt: u32) -> u32 {
|
||||||
|
base_salt.wrapping_mul(2654435761)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ use anki_proto::scheduler::FuzzDeltaResponse;
|
||||||
use anki_proto::scheduler::GetOptimalRetentionParametersResponse;
|
use anki_proto::scheduler::GetOptimalRetentionParametersResponse;
|
||||||
use anki_proto::scheduler::SimulateFsrsReviewRequest;
|
use anki_proto::scheduler::SimulateFsrsReviewRequest;
|
||||||
use anki_proto::scheduler::SimulateFsrsReviewResponse;
|
use anki_proto::scheduler::SimulateFsrsReviewResponse;
|
||||||
|
use anki_proto::scheduler::SimulateFsrsWorkloadResponse;
|
||||||
use fsrs::ComputeParametersInput;
|
use fsrs::ComputeParametersInput;
|
||||||
use fsrs::FSRSItem;
|
use fsrs::FSRSItem;
|
||||||
use fsrs::FSRSReview;
|
use fsrs::FSRSReview;
|
||||||
|
@ -283,6 +284,13 @@ impl crate::services::SchedulerService for Collection {
|
||||||
self.simulate_review(input)
|
self.simulate_review(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn simulate_fsrs_workload(
|
||||||
|
&mut self,
|
||||||
|
input: SimulateFsrsReviewRequest,
|
||||||
|
) -> Result<SimulateFsrsWorkloadResponse> {
|
||||||
|
self.simulate_workload(input)
|
||||||
|
}
|
||||||
|
|
||||||
fn compute_optimal_retention(
|
fn compute_optimal_retention(
|
||||||
&mut self,
|
&mut self,
|
||||||
input: SimulateFsrsReviewRequest,
|
input: SimulateFsrsReviewRequest,
|
||||||
|
|
|
@ -174,7 +174,7 @@ impl LoadBalancer {
|
||||||
&self,
|
&self,
|
||||||
note_id: Option<NoteId>,
|
note_id: Option<NoteId>,
|
||||||
deckconfig_id: DeckConfigId,
|
deckconfig_id: DeckConfigId,
|
||||||
) -> LoadBalancerContext {
|
) -> LoadBalancerContext<'_> {
|
||||||
LoadBalancerContext {
|
LoadBalancerContext {
|
||||||
load_balancer: self,
|
load_balancer: self,
|
||||||
note_id,
|
note_id,
|
||||||
|
|
|
@ -57,10 +57,10 @@ const SECOND: f32 = 1.0;
|
||||||
const MINUTE: f32 = 60.0 * SECOND;
|
const MINUTE: f32 = 60.0 * SECOND;
|
||||||
const HOUR: f32 = 60.0 * MINUTE;
|
const HOUR: f32 = 60.0 * MINUTE;
|
||||||
const DAY: f32 = 24.0 * HOUR;
|
const DAY: f32 = 24.0 * HOUR;
|
||||||
const MONTH: f32 = 30.417 * DAY; // 365/12 ≈ 30.417
|
|
||||||
const YEAR: f32 = 365.0 * DAY;
|
const YEAR: f32 = 365.0 * DAY;
|
||||||
|
const MONTH: f32 = YEAR / 12.0;
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub(crate) enum TimespanUnit {
|
pub(crate) enum TimespanUnit {
|
||||||
Seconds,
|
Seconds,
|
||||||
Minutes,
|
Minutes,
|
||||||
|
@ -111,6 +111,13 @@ impl Timespan {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_unit(self, unit: TimespanUnit) -> Timespan {
|
||||||
|
Timespan {
|
||||||
|
seconds: self.seconds,
|
||||||
|
unit,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Round seconds and days to integers, otherwise
|
/// Round seconds and days to integers, otherwise
|
||||||
/// truncates to one decimal place.
|
/// truncates to one decimal place.
|
||||||
pub fn as_rounded_unit(self) -> f32 {
|
pub fn as_rounded_unit(self) -> f32 {
|
||||||
|
|
|
@ -6,6 +6,7 @@ use std::mem;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use super::writer::write_nodes;
|
use super::writer::write_nodes;
|
||||||
|
use super::FieldSearchMode;
|
||||||
use super::Node;
|
use super::Node;
|
||||||
use super::SearchNode;
|
use super::SearchNode;
|
||||||
use super::StateKind;
|
use super::StateKind;
|
||||||
|
@ -174,7 +175,7 @@ impl SearchNode {
|
||||||
pub fn from_tag_name(name: &str) -> Self {
|
pub fn from_tag_name(name: &str) -> Self {
|
||||||
Self::Tag {
|
Self::Tag {
|
||||||
tag: escape_anki_wildcards_for_search_node(name),
|
tag: escape_anki_wildcards_for_search_node(name),
|
||||||
is_re: false,
|
mode: FieldSearchMode::Normal,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ pub use builder::JoinSearches;
|
||||||
pub use builder::Negated;
|
pub use builder::Negated;
|
||||||
pub use builder::SearchBuilder;
|
pub use builder::SearchBuilder;
|
||||||
pub use parser::parse as parse_search;
|
pub use parser::parse as parse_search;
|
||||||
|
pub use parser::FieldSearchMode;
|
||||||
pub use parser::Node;
|
pub use parser::Node;
|
||||||
pub use parser::PropertyKind;
|
pub use parser::PropertyKind;
|
||||||
pub use parser::RatingKind;
|
pub use parser::RatingKind;
|
||||||
|
@ -226,7 +227,7 @@ impl Collection {
|
||||||
&mut self,
|
&mut self,
|
||||||
search: impl TryIntoSearch,
|
search: impl TryIntoSearch,
|
||||||
mode: SortMode,
|
mode: SortMode,
|
||||||
) -> Result<CardTableGuard> {
|
) -> Result<CardTableGuard<'_>> {
|
||||||
let top_node = search.try_into_search()?;
|
let top_node = search.try_into_search()?;
|
||||||
let writer = SqlWriter::new(self, ReturnItemType::Cards);
|
let writer = SqlWriter::new(self, ReturnItemType::Cards);
|
||||||
let want_order = mode != SortMode::NoOrder;
|
let want_order = mode != SortMode::NoOrder;
|
||||||
|
@ -299,7 +300,7 @@ impl Collection {
|
||||||
pub(crate) fn search_notes_into_table(
|
pub(crate) fn search_notes_into_table(
|
||||||
&mut self,
|
&mut self,
|
||||||
search: impl TryIntoSearch,
|
search: impl TryIntoSearch,
|
||||||
) -> Result<NoteTableGuard> {
|
) -> Result<NoteTableGuard<'_>> {
|
||||||
let top_node = search.try_into_search()?;
|
let top_node = search.try_into_search()?;
|
||||||
let writer = SqlWriter::new(self, ReturnItemType::Notes);
|
let writer = SqlWriter::new(self, ReturnItemType::Notes);
|
||||||
let mode = SortMode::NoOrder;
|
let mode = SortMode::NoOrder;
|
||||||
|
@ -320,7 +321,7 @@ impl Collection {
|
||||||
|
|
||||||
/// Place the ids of cards with notes in 'search_nids' into 'search_cids'.
|
/// Place the ids of cards with notes in 'search_nids' into 'search_cids'.
|
||||||
/// Returns number of added cards.
|
/// Returns number of added cards.
|
||||||
pub(crate) fn search_cards_of_notes_into_table(&mut self) -> Result<CardTableGuard> {
|
pub(crate) fn search_cards_of_notes_into_table(&mut self) -> Result<CardTableGuard<'_>> {
|
||||||
self.storage.setup_searched_cards_table()?;
|
self.storage.setup_searched_cards_table()?;
|
||||||
let cards = self.storage.search_cards_of_notes_into_table()?;
|
let cards = self.storage.search_cards_of_notes_into_table()?;
|
||||||
Ok(CardTableGuard { cards, col: self })
|
Ok(CardTableGuard { cards, col: self })
|
||||||
|
@ -378,9 +379,10 @@ fn card_order_from_sort_column(column: Column, timing: SchedTimingToday) -> Cow<
|
||||||
Column::Stability => "extract_fsrs_variable(c.data, 's') asc".into(),
|
Column::Stability => "extract_fsrs_variable(c.data, 's') asc".into(),
|
||||||
Column::Difficulty => "extract_fsrs_variable(c.data, 'd') asc".into(),
|
Column::Difficulty => "extract_fsrs_variable(c.data, 'd') asc".into(),
|
||||||
Column::Retrievability => format!(
|
Column::Retrievability => format!(
|
||||||
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {}, {}) asc",
|
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {}, {}, {}) asc",
|
||||||
timing.days_elapsed,
|
timing.days_elapsed,
|
||||||
timing.next_day_at.0
|
timing.next_day_at.0,
|
||||||
|
timing.now.0,
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
|
use anki_proto::search::search_node::FieldSearchMode as FieldSearchModeProto;
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::escaped;
|
use nom::bytes::complete::escaped;
|
||||||
use nom::bytes::complete::is_not;
|
use nom::bytes::complete::is_not;
|
||||||
|
@ -27,7 +28,6 @@ use crate::error::ParseError;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::error::SearchErrorKind as FailKind;
|
use crate::error::SearchErrorKind as FailKind;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
|
||||||
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err<ParseError<'a>>>;
|
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err<ParseError<'a>>>;
|
||||||
type ParseResult<'a, O> = std::result::Result<O, nom::Err<ParseError<'a>>>;
|
type ParseResult<'a, O> = std::result::Result<O, nom::Err<ParseError<'a>>>;
|
||||||
|
|
||||||
|
@ -48,6 +48,23 @@ pub enum Node {
|
||||||
Search(SearchNode),
|
Search(SearchNode),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub enum FieldSearchMode {
|
||||||
|
Normal,
|
||||||
|
Regex,
|
||||||
|
NoCombining,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<FieldSearchModeProto> for FieldSearchMode {
|
||||||
|
fn from(mode: FieldSearchModeProto) -> Self {
|
||||||
|
match mode {
|
||||||
|
FieldSearchModeProto::Normal => Self::Normal,
|
||||||
|
FieldSearchModeProto::Regex => Self::Regex,
|
||||||
|
FieldSearchModeProto::Nocombining => Self::NoCombining,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum SearchNode {
|
pub enum SearchNode {
|
||||||
// text without a colon
|
// text without a colon
|
||||||
|
@ -56,7 +73,7 @@ pub enum SearchNode {
|
||||||
SingleField {
|
SingleField {
|
||||||
field: String,
|
field: String,
|
||||||
text: String,
|
text: String,
|
||||||
is_re: bool,
|
mode: FieldSearchMode,
|
||||||
},
|
},
|
||||||
AddedInDays(u32),
|
AddedInDays(u32),
|
||||||
EditedInDays(u32),
|
EditedInDays(u32),
|
||||||
|
@ -77,7 +94,7 @@ pub enum SearchNode {
|
||||||
},
|
},
|
||||||
Tag {
|
Tag {
|
||||||
tag: String,
|
tag: String,
|
||||||
is_re: bool,
|
mode: FieldSearchMode,
|
||||||
},
|
},
|
||||||
Duplicates {
|
Duplicates {
|
||||||
notetype_id: NotetypeId,
|
notetype_id: NotetypeId,
|
||||||
|
@ -158,7 +175,7 @@ pub fn parse(input: &str) -> Result<Vec<Node>> {
|
||||||
|
|
||||||
/// Zero or more nodes inside brackets, eg 'one OR two -three'.
|
/// Zero or more nodes inside brackets, eg 'one OR two -three'.
|
||||||
/// Empty vec must be handled by caller.
|
/// Empty vec must be handled by caller.
|
||||||
fn group_inner(input: &str) -> IResult<Vec<Node>> {
|
fn group_inner(input: &str) -> IResult<'_, Vec<Node>> {
|
||||||
let mut remaining = input;
|
let mut remaining = input;
|
||||||
let mut nodes = vec![];
|
let mut nodes = vec![];
|
||||||
|
|
||||||
|
@ -203,16 +220,16 @@ fn group_inner(input: &str) -> IResult<Vec<Node>> {
|
||||||
Ok((remaining, nodes))
|
Ok((remaining, nodes))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn whitespace0(s: &str) -> IResult<Vec<char>> {
|
fn whitespace0(s: &str) -> IResult<'_, Vec<char>> {
|
||||||
many0(one_of(" \u{3000}")).parse(s)
|
many0(one_of(" \u{3000}")).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Optional leading space, then a (negated) group or text
|
/// Optional leading space, then a (negated) group or text
|
||||||
fn node(s: &str) -> IResult<Node> {
|
fn node(s: &str) -> IResult<'_, Node> {
|
||||||
preceded(whitespace0, alt((negated_node, group, text))).parse(s)
|
preceded(whitespace0, alt((negated_node, group, text))).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn negated_node(s: &str) -> IResult<Node> {
|
fn negated_node(s: &str) -> IResult<'_, Node> {
|
||||||
map(preceded(char('-'), alt((group, text))), |node| {
|
map(preceded(char('-'), alt((group, text))), |node| {
|
||||||
Node::Not(Box::new(node))
|
Node::Not(Box::new(node))
|
||||||
})
|
})
|
||||||
|
@ -220,7 +237,7 @@ fn negated_node(s: &str) -> IResult<Node> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// One or more nodes surrounded by brackets, eg (one OR two)
|
/// One or more nodes surrounded by brackets, eg (one OR two)
|
||||||
fn group(s: &str) -> IResult<Node> {
|
fn group(s: &str) -> IResult<'_, Node> {
|
||||||
let (opened, _) = char('(')(s)?;
|
let (opened, _) = char('(')(s)?;
|
||||||
let (tail, inner) = group_inner(opened)?;
|
let (tail, inner) = group_inner(opened)?;
|
||||||
if let Some(remaining) = tail.strip_prefix(')') {
|
if let Some(remaining) = tail.strip_prefix(')') {
|
||||||
|
@ -235,18 +252,18 @@ fn group(s: &str) -> IResult<Node> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Either quoted or unquoted text
|
/// Either quoted or unquoted text
|
||||||
fn text(s: &str) -> IResult<Node> {
|
fn text(s: &str) -> IResult<'_, Node> {
|
||||||
alt((quoted_term, partially_quoted_term, unquoted_term)).parse(s)
|
alt((quoted_term, partially_quoted_term, unquoted_term)).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Quoted text, including the outer double quotes.
|
/// Quoted text, including the outer double quotes.
|
||||||
fn quoted_term(s: &str) -> IResult<Node> {
|
fn quoted_term(s: &str) -> IResult<'_, Node> {
|
||||||
let (remaining, term) = quoted_term_str(s)?;
|
let (remaining, term) = quoted_term_str(s)?;
|
||||||
Ok((remaining, Node::Search(search_node_for_text(term)?)))
|
Ok((remaining, Node::Search(search_node_for_text(term)?)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg deck:"foo bar" - quotes must come after the :
|
/// eg deck:"foo bar" - quotes must come after the :
|
||||||
fn partially_quoted_term(s: &str) -> IResult<Node> {
|
fn partially_quoted_term(s: &str) -> IResult<'_, Node> {
|
||||||
let (remaining, (key, val)) = separated_pair(
|
let (remaining, (key, val)) = separated_pair(
|
||||||
escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")),
|
escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")),
|
||||||
char(':'),
|
char(':'),
|
||||||
|
@ -260,7 +277,7 @@ fn partially_quoted_term(s: &str) -> IResult<Node> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unquoted text, terminated by whitespace or unescaped ", ( or )
|
/// Unquoted text, terminated by whitespace or unescaped ", ( or )
|
||||||
fn unquoted_term(s: &str) -> IResult<Node> {
|
fn unquoted_term(s: &str) -> IResult<'_, Node> {
|
||||||
match escaped(is_not("\"() \u{3000}\\"), '\\', none_of(" \u{3000}"))(s) {
|
match escaped(is_not("\"() \u{3000}\\"), '\\', none_of(" \u{3000}"))(s) {
|
||||||
Ok((tail, term)) => {
|
Ok((tail, term)) => {
|
||||||
if term.is_empty() {
|
if term.is_empty() {
|
||||||
|
@ -297,7 +314,7 @@ fn unquoted_term(s: &str) -> IResult<Node> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Non-empty string delimited by unescaped double quotes.
|
/// Non-empty string delimited by unescaped double quotes.
|
||||||
fn quoted_term_str(s: &str) -> IResult<&str> {
|
fn quoted_term_str(s: &str) -> IResult<'_, &str> {
|
||||||
let (opened, _) = char('"')(s)?;
|
let (opened, _) = char('"')(s)?;
|
||||||
if let Ok((tail, inner)) =
|
if let Ok((tail, inner)) =
|
||||||
escaped::<_, ParseError, _, _>(is_not(r#""\"#), '\\', anychar).parse(opened)
|
escaped::<_, ParseError, _, _>(is_not(r#""\"#), '\\', anychar).parse(opened)
|
||||||
|
@ -321,7 +338,7 @@ fn quoted_term_str(s: &str) -> IResult<&str> {
|
||||||
|
|
||||||
/// Determine if text is a qualified search, and handle escaped chars.
|
/// Determine if text is a qualified search, and handle escaped chars.
|
||||||
/// Expect well-formed input: unempty and no trailing \.
|
/// Expect well-formed input: unempty and no trailing \.
|
||||||
fn search_node_for_text(s: &str) -> ParseResult<SearchNode> {
|
fn search_node_for_text(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
// leading : is only possible error for well-formed input
|
// leading : is only possible error for well-formed input
|
||||||
let (tail, head) = verify(escaped(is_not(r":\"), '\\', anychar), |t: &str| {
|
let (tail, head) = verify(escaped(is_not(r":\"), '\\', anychar), |t: &str| {
|
||||||
!t.is_empty()
|
!t.is_empty()
|
||||||
|
@ -369,21 +386,21 @@ fn search_node_for_text_with_argument<'a>(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_tag(s: &str) -> ParseResult<SearchNode> {
|
fn parse_tag(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
Ok(if let Some(re) = s.strip_prefix("re:") {
|
Ok(if let Some(re) = s.strip_prefix("re:") {
|
||||||
SearchNode::Tag {
|
SearchNode::Tag {
|
||||||
tag: unescape_quotes(re),
|
tag: unescape_quotes(re),
|
||||||
is_re: true,
|
mode: FieldSearchMode::Regex,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
SearchNode::Tag {
|
SearchNode::Tag {
|
||||||
tag: unescape(s)?,
|
tag: unescape(s)?,
|
||||||
is_re: false,
|
mode: FieldSearchMode::Normal,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_template(s: &str) -> ParseResult<SearchNode> {
|
fn parse_template(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
Ok(SearchNode::CardTemplate(match s.parse::<u16>() {
|
Ok(SearchNode::CardTemplate(match s.parse::<u16>() {
|
||||||
Ok(n) => TemplateKind::Ordinal(n.max(1) - 1),
|
Ok(n) => TemplateKind::Ordinal(n.max(1) - 1),
|
||||||
Err(_) => TemplateKind::Name(unescape(s)?),
|
Err(_) => TemplateKind::Name(unescape(s)?),
|
||||||
|
@ -391,7 +408,7 @@ fn parse_template(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// flag:0-7
|
/// flag:0-7
|
||||||
fn parse_flag(s: &str) -> ParseResult<SearchNode> {
|
fn parse_flag(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
if let Ok(flag) = s.parse::<u8>() {
|
if let Ok(flag) = s.parse::<u8>() {
|
||||||
if flag > 7 {
|
if flag > 7 {
|
||||||
Err(parse_failure(s, FailKind::InvalidFlag))
|
Err(parse_failure(s, FailKind::InvalidFlag))
|
||||||
|
@ -404,7 +421,7 @@ fn parse_flag(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg resched:3
|
/// eg resched:3
|
||||||
fn parse_resched(s: &str) -> ParseResult<SearchNode> {
|
fn parse_resched(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_u32(s, "resched:").map(|days| SearchNode::Rated {
|
parse_u32(s, "resched:").map(|days| SearchNode::Rated {
|
||||||
days,
|
days,
|
||||||
ease: RatingKind::ManualReschedule,
|
ease: RatingKind::ManualReschedule,
|
||||||
|
@ -412,7 +429,7 @@ fn parse_resched(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg prop:ivl>3, prop:ease!=2.5
|
/// eg prop:ivl>3, prop:ease!=2.5
|
||||||
fn parse_prop(prop_clause: &str) -> ParseResult<SearchNode> {
|
fn parse_prop(prop_clause: &str) -> ParseResult<'_, SearchNode> {
|
||||||
let (tail, prop) = alt((
|
let (tail, prop) = alt((
|
||||||
tag("ivl"),
|
tag("ivl"),
|
||||||
tag("due"),
|
tag("due"),
|
||||||
|
@ -580,23 +597,23 @@ fn parse_prop_rated<'a>(num: &str, context: &'a str) -> ParseResult<'a, Property
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg added:1
|
/// eg added:1
|
||||||
fn parse_added(s: &str) -> ParseResult<SearchNode> {
|
fn parse_added(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_u32(s, "added:").map(|n| SearchNode::AddedInDays(n.max(1)))
|
parse_u32(s, "added:").map(|n| SearchNode::AddedInDays(n.max(1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg edited:1
|
/// eg edited:1
|
||||||
fn parse_edited(s: &str) -> ParseResult<SearchNode> {
|
fn parse_edited(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_u32(s, "edited:").map(|n| SearchNode::EditedInDays(n.max(1)))
|
parse_u32(s, "edited:").map(|n| SearchNode::EditedInDays(n.max(1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg introduced:1
|
/// eg introduced:1
|
||||||
fn parse_introduced(s: &str) -> ParseResult<SearchNode> {
|
fn parse_introduced(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_u32(s, "introduced:").map(|n| SearchNode::IntroducedInDays(n.max(1)))
|
parse_u32(s, "introduced:").map(|n| SearchNode::IntroducedInDays(n.max(1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg rated:3 or rated:10:2
|
/// eg rated:3 or rated:10:2
|
||||||
/// second arg must be between 1-4
|
/// second arg must be between 1-4
|
||||||
fn parse_rated(s: &str) -> ParseResult<SearchNode> {
|
fn parse_rated(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
let mut it = s.splitn(2, ':');
|
let mut it = s.splitn(2, ':');
|
||||||
let days = parse_u32(it.next().unwrap(), "rated:")?.max(1);
|
let days = parse_u32(it.next().unwrap(), "rated:")?.max(1);
|
||||||
let button = parse_answer_button(it.next(), s)?;
|
let button = parse_answer_button(it.next(), s)?;
|
||||||
|
@ -604,7 +621,7 @@ fn parse_rated(s: &str) -> ParseResult<SearchNode> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg is:due
|
/// eg is:due
|
||||||
fn parse_state(s: &str) -> ParseResult<SearchNode> {
|
fn parse_state(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
use StateKind::*;
|
use StateKind::*;
|
||||||
Ok(SearchNode::State(match s {
|
Ok(SearchNode::State(match s {
|
||||||
"new" => New,
|
"new" => New,
|
||||||
|
@ -624,7 +641,7 @@ fn parse_state(s: &str) -> ParseResult<SearchNode> {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_mid(s: &str) -> ParseResult<SearchNode> {
|
fn parse_mid(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
parse_i64(s, "mid:").map(|n| SearchNode::NotetypeId(n.into()))
|
parse_i64(s, "mid:").map(|n| SearchNode::NotetypeId(n.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -646,7 +663,7 @@ fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// eg dupe:1231,hello
|
/// eg dupe:1231,hello
|
||||||
fn parse_dupe(s: &str) -> ParseResult<SearchNode> {
|
fn parse_dupe(s: &str) -> ParseResult<'_, SearchNode> {
|
||||||
let mut it = s.splitn(2, ',');
|
let mut it = s.splitn(2, ',');
|
||||||
let ntid = parse_i64(it.next().unwrap(), s)?;
|
let ntid = parse_i64(it.next().unwrap(), s)?;
|
||||||
if let Some(text) = it.next() {
|
if let Some(text) = it.next() {
|
||||||
|
@ -670,13 +687,19 @@ fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchN
|
||||||
SearchNode::SingleField {
|
SearchNode::SingleField {
|
||||||
field: unescape(key)?,
|
field: unescape(key)?,
|
||||||
text: unescape_quotes(stripped),
|
text: unescape_quotes(stripped),
|
||||||
is_re: true,
|
mode: FieldSearchMode::Regex,
|
||||||
|
}
|
||||||
|
} else if let Some(stripped) = val.strip_prefix("nc:") {
|
||||||
|
SearchNode::SingleField {
|
||||||
|
field: unescape(key)?,
|
||||||
|
text: unescape_quotes(stripped),
|
||||||
|
mode: FieldSearchMode::NoCombining,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
SearchNode::SingleField {
|
SearchNode::SingleField {
|
||||||
field: unescape(key)?,
|
field: unescape(key)?,
|
||||||
text: unescape(val)?,
|
text: unescape(val)?,
|
||||||
is_re: false,
|
mode: FieldSearchMode::Normal,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -700,7 +723,7 @@ fn unescape_quotes_and_backslashes(s: &str) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unescape chars with special meaning to the parser.
|
/// Unescape chars with special meaning to the parser.
|
||||||
fn unescape(txt: &str) -> ParseResult<String> {
|
fn unescape(txt: &str) -> ParseResult<'_, String> {
|
||||||
if let Some(seq) = invalid_escape_sequence(txt) {
|
if let Some(seq) = invalid_escape_sequence(txt) {
|
||||||
Err(parse_failure(
|
Err(parse_failure(
|
||||||
txt,
|
txt,
|
||||||
|
@ -806,7 +829,7 @@ mod test {
|
||||||
Search(SingleField {
|
Search(SingleField {
|
||||||
field: "foo".into(),
|
field: "foo".into(),
|
||||||
text: "bar baz".into(),
|
text: "bar baz".into(),
|
||||||
is_re: false,
|
mode: FieldSearchMode::Normal,
|
||||||
})
|
})
|
||||||
]))),
|
]))),
|
||||||
Or,
|
Or,
|
||||||
|
@ -819,7 +842,16 @@ mod test {
|
||||||
vec![Search(SingleField {
|
vec![Search(SingleField {
|
||||||
field: "foo".into(),
|
field: "foo".into(),
|
||||||
text: "bar".into(),
|
text: "bar".into(),
|
||||||
is_re: true
|
mode: FieldSearchMode::Regex,
|
||||||
|
})]
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
parse("foo:nc:bar")?,
|
||||||
|
vec![Search(SingleField {
|
||||||
|
field: "foo".into(),
|
||||||
|
text: "bar".into(),
|
||||||
|
mode: FieldSearchMode::NoCombining,
|
||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -829,7 +861,7 @@ mod test {
|
||||||
vec![Search(SingleField {
|
vec![Search(SingleField {
|
||||||
field: "field".into(),
|
field: "field".into(),
|
||||||
text: "va\"lue".into(),
|
text: "va\"lue".into(),
|
||||||
is_re: false
|
mode: FieldSearchMode::Normal,
|
||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
assert_eq!(parse(r#""field:va\"lue""#)?, parse(r#"field:"va\"lue""#)?,);
|
assert_eq!(parse(r#""field:va\"lue""#)?, parse(r#"field:"va\"lue""#)?,);
|
||||||
|
@ -906,14 +938,14 @@ mod test {
|
||||||
parse("tag:hard")?,
|
parse("tag:hard")?,
|
||||||
vec![Search(Tag {
|
vec![Search(Tag {
|
||||||
tag: "hard".into(),
|
tag: "hard".into(),
|
||||||
is_re: false
|
mode: FieldSearchMode::Normal
|
||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
parse(r"tag:re:\\")?,
|
parse(r"tag:re:\\")?,
|
||||||
vec![Search(Tag {
|
vec![Search(Tag {
|
||||||
tag: r"\\".into(),
|
tag: r"\\".into(),
|
||||||
is_re: true
|
mode: FieldSearchMode::Regex
|
||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
|
@ -6,6 +6,7 @@ use itertools::Itertools;
|
||||||
|
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::search::parse_search;
|
use crate::search::parse_search;
|
||||||
|
use crate::search::FieldSearchMode;
|
||||||
use crate::search::Negated;
|
use crate::search::Negated;
|
||||||
use crate::search::Node;
|
use crate::search::Node;
|
||||||
use crate::search::PropertyKind;
|
use crate::search::PropertyKind;
|
||||||
|
@ -40,7 +41,7 @@ impl TryFrom<anki_proto::search::SearchNode> for Node {
|
||||||
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
|
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
|
||||||
field: escape_anki_wildcards_for_search_node(&s),
|
field: escape_anki_wildcards_for_search_node(&s),
|
||||||
text: "_*".to_string(),
|
text: "_*".to_string(),
|
||||||
is_re: false,
|
mode: FieldSearchMode::Normal,
|
||||||
}),
|
}),
|
||||||
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
|
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
|
||||||
days: rated.days,
|
days: rated.days,
|
||||||
|
@ -107,7 +108,7 @@ impl TryFrom<anki_proto::search::SearchNode> for Node {
|
||||||
Filter::Field(field) => Node::Search(SearchNode::SingleField {
|
Filter::Field(field) => Node::Search(SearchNode::SingleField {
|
||||||
field: escape_anki_wildcards(&field.field_name),
|
field: escape_anki_wildcards(&field.field_name),
|
||||||
text: escape_anki_wildcards(&field.text),
|
text: escape_anki_wildcards(&field.text),
|
||||||
is_re: field.is_re,
|
mode: field.mode().into(),
|
||||||
}),
|
}),
|
||||||
Filter::LiteralText(text) => {
|
Filter::LiteralText(text) => {
|
||||||
let text = escape_anki_wildcards(&text);
|
let text = escape_anki_wildcards(&text);
|
||||||
|
|
|
@ -7,6 +7,7 @@ use std::ops::Range;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use super::parser::FieldSearchMode;
|
||||||
use super::parser::Node;
|
use super::parser::Node;
|
||||||
use super::parser::PropertyKind;
|
use super::parser::PropertyKind;
|
||||||
use super::parser::RatingKind;
|
use super::parser::RatingKind;
|
||||||
|
@ -138,8 +139,8 @@ impl SqlWriter<'_> {
|
||||||
false,
|
false,
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
SearchNode::SingleField { field, text, is_re } => {
|
SearchNode::SingleField { field, text, mode } => {
|
||||||
self.write_field(&norm(field), &self.norm_note(text), *is_re)?
|
self.write_field(&norm(field), &self.norm_note(text), *mode)?
|
||||||
}
|
}
|
||||||
SearchNode::Duplicates { notetype_id, text } => {
|
SearchNode::Duplicates { notetype_id, text } => {
|
||||||
self.write_dupe(*notetype_id, &self.norm_note(text))?
|
self.write_dupe(*notetype_id, &self.norm_note(text))?
|
||||||
|
@ -180,7 +181,7 @@ impl SqlWriter<'_> {
|
||||||
SearchNode::Notetype(notetype) => self.write_notetype(&norm(notetype)),
|
SearchNode::Notetype(notetype) => self.write_notetype(&norm(notetype)),
|
||||||
SearchNode::Rated { days, ease } => self.write_rated(">", -i64::from(*days), ease)?,
|
SearchNode::Rated { days, ease } => self.write_rated(">", -i64::from(*days), ease)?,
|
||||||
|
|
||||||
SearchNode::Tag { tag, is_re } => self.write_tag(&norm(tag), *is_re),
|
SearchNode::Tag { tag, mode } => self.write_tag(&norm(tag), *mode),
|
||||||
SearchNode::State(state) => self.write_state(state)?,
|
SearchNode::State(state) => self.write_state(state)?,
|
||||||
SearchNode::Flag(flag) => {
|
SearchNode::Flag(flag) => {
|
||||||
write!(self.sql, "(c.flags & 7) == {flag}").unwrap();
|
write!(self.sql, "(c.flags & 7) == {flag}").unwrap();
|
||||||
|
@ -296,8 +297,8 @@ impl SqlWriter<'_> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_tag(&mut self, tag: &str, is_re: bool) {
|
fn write_tag(&mut self, tag: &str, mode: FieldSearchMode) {
|
||||||
if is_re {
|
if mode == FieldSearchMode::Regex {
|
||||||
self.args.push(format!("(?i){tag}"));
|
self.args.push(format!("(?i){tag}"));
|
||||||
write!(self.sql, "regexp_tags(?{}, n.tags)", self.args.len()).unwrap();
|
write!(self.sql, "regexp_tags(?{}, n.tags)", self.args.len()).unwrap();
|
||||||
} else {
|
} else {
|
||||||
|
@ -418,13 +419,13 @@ impl SqlWriter<'_> {
|
||||||
write!(self.sql, "extract_fsrs_variable(c.data, 'd') {op} {d}").unwrap()
|
write!(self.sql, "extract_fsrs_variable(c.data, 'd') {op} {d}").unwrap()
|
||||||
}
|
}
|
||||||
PropertyKind::Retrievability(r) => {
|
PropertyKind::Retrievability(r) => {
|
||||||
let (elap, next_day_at) = {
|
let (elap, next_day_at, now) = {
|
||||||
let timing = self.col.timing_today()?;
|
let timing = self.col.timing_today()?;
|
||||||
(timing.days_elapsed, timing.next_day_at)
|
(timing.days_elapsed, timing.next_day_at, timing.now)
|
||||||
};
|
};
|
||||||
write!(
|
write!(
|
||||||
self.sql,
|
self.sql,
|
||||||
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}) {op} {r}"
|
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}, {now}) {op} {r}"
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
@ -567,16 +568,18 @@ impl SqlWriter<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_field(&mut self, field_name: &str, val: &str, is_re: bool) -> Result<()> {
|
fn write_field(&mut self, field_name: &str, val: &str, mode: FieldSearchMode) -> Result<()> {
|
||||||
if matches!(field_name, "*" | "_*" | "*_") {
|
if matches!(field_name, "*" | "_*" | "*_") {
|
||||||
if is_re {
|
if mode == FieldSearchMode::Regex {
|
||||||
self.write_all_fields_regexp(val);
|
self.write_all_fields_regexp(val);
|
||||||
} else {
|
} else {
|
||||||
self.write_all_fields(val);
|
self.write_all_fields(val);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
} else if is_re {
|
} else if mode == FieldSearchMode::Regex {
|
||||||
self.write_single_field_regexp(field_name, val)
|
self.write_single_field_regexp(field_name, val)
|
||||||
|
} else if mode == FieldSearchMode::NoCombining {
|
||||||
|
self.write_single_field_nc(field_name, val)
|
||||||
} else {
|
} else {
|
||||||
self.write_single_field(field_name, val)
|
self.write_single_field(field_name, val)
|
||||||
}
|
}
|
||||||
|
@ -592,6 +595,58 @@ impl SqlWriter<'_> {
|
||||||
write!(self.sql, "regexp_fields(?{}, n.flds)", self.args.len()).unwrap();
|
write!(self.sql, "regexp_fields(?{}, n.flds)", self.args.len()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn write_single_field_nc(&mut self, field_name: &str, val: &str) -> Result<()> {
|
||||||
|
let field_indicies_by_notetype = self.num_fields_and_fields_indices_by_notetype(
|
||||||
|
field_name,
|
||||||
|
matches!(val, "*" | "_*" | "*_"),
|
||||||
|
)?;
|
||||||
|
if field_indicies_by_notetype.is_empty() {
|
||||||
|
write!(self.sql, "false").unwrap();
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let val = to_sql(val);
|
||||||
|
let val = without_combining(&val);
|
||||||
|
self.args.push(val.into());
|
||||||
|
let arg_idx = self.args.len();
|
||||||
|
let field_idx_str = format!("' || ?{arg_idx} || '");
|
||||||
|
let other_idx_str = "%".to_string();
|
||||||
|
|
||||||
|
let notetype_clause = |ctx: &FieldQualifiedSearchContext| -> String {
|
||||||
|
let field_index_clause = |range: &Range<u32>| {
|
||||||
|
let f = (0..ctx.total_fields_in_note)
|
||||||
|
.filter_map(|i| {
|
||||||
|
if i as u32 == range.start {
|
||||||
|
Some(&field_idx_str)
|
||||||
|
} else if range.contains(&(i as u32)) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(&other_idx_str)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.join("\x1f");
|
||||||
|
format!(
|
||||||
|
"coalesce(process_text(n.flds, {}), n.flds) like '{f}' escape '\\'",
|
||||||
|
ProcessTextFlags::NoCombining.bits()
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let all_field_clauses = ctx
|
||||||
|
.field_ranges_to_search
|
||||||
|
.iter()
|
||||||
|
.map(field_index_clause)
|
||||||
|
.join(" or ");
|
||||||
|
format!("(n.mid = {mid} and ({all_field_clauses}))", mid = ctx.ntid)
|
||||||
|
};
|
||||||
|
let all_notetype_clauses = field_indicies_by_notetype
|
||||||
|
.iter()
|
||||||
|
.map(notetype_clause)
|
||||||
|
.join(" or ");
|
||||||
|
write!(self.sql, "({all_notetype_clauses})").unwrap();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn write_single_field_regexp(&mut self, field_name: &str, val: &str) -> Result<()> {
|
fn write_single_field_regexp(&mut self, field_name: &str, val: &str) -> Result<()> {
|
||||||
let field_indicies_by_notetype = self.fields_indices_by_notetype(field_name)?;
|
let field_indicies_by_notetype = self.fields_indices_by_notetype(field_name)?;
|
||||||
if field_indicies_by_notetype.is_empty() {
|
if field_indicies_by_notetype.is_empty() {
|
||||||
|
@ -1116,6 +1171,20 @@ mod test {
|
||||||
vec!["(?i)te.*st".into()]
|
vec!["(?i)te.*st".into()]
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
// field search with no-combine
|
||||||
|
assert_eq!(
|
||||||
|
s(ctx, "front:nc:frânçais"),
|
||||||
|
(
|
||||||
|
concat!(
|
||||||
|
"(((n.mid = 1581236385344 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\')) or ",
|
||||||
|
"(n.mid = 1581236385345 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%\u{1f}%' escape '\\')) or ",
|
||||||
|
"(n.mid = 1581236385346 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\')) or ",
|
||||||
|
"(n.mid = 1581236385347 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\'))))"
|
||||||
|
)
|
||||||
|
.into(),
|
||||||
|
vec!["francais".into()]
|
||||||
|
)
|
||||||
|
);
|
||||||
// all field search
|
// all field search
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
s(ctx, "*:te*st"),
|
s(ctx, "*:te*st"),
|
||||||
|
|
|
@ -9,6 +9,7 @@ use regex::Regex;
|
||||||
use crate::notetype::NotetypeId as NotetypeIdType;
|
use crate::notetype::NotetypeId as NotetypeIdType;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::search::parser::parse;
|
use crate::search::parser::parse;
|
||||||
|
use crate::search::parser::FieldSearchMode;
|
||||||
use crate::search::parser::Node;
|
use crate::search::parser::Node;
|
||||||
use crate::search::parser::PropertyKind;
|
use crate::search::parser::PropertyKind;
|
||||||
use crate::search::parser::RatingKind;
|
use crate::search::parser::RatingKind;
|
||||||
|
@ -69,7 +70,7 @@ fn write_search_node(node: &SearchNode) -> String {
|
||||||
use SearchNode::*;
|
use SearchNode::*;
|
||||||
match node {
|
match node {
|
||||||
UnqualifiedText(s) => maybe_quote(&s.replace(':', "\\:")),
|
UnqualifiedText(s) => maybe_quote(&s.replace(':', "\\:")),
|
||||||
SingleField { field, text, is_re } => write_single_field(field, text, *is_re),
|
SingleField { field, text, mode } => write_single_field(field, text, *mode),
|
||||||
AddedInDays(u) => format!("added:{u}"),
|
AddedInDays(u) => format!("added:{u}"),
|
||||||
EditedInDays(u) => format!("edited:{u}"),
|
EditedInDays(u) => format!("edited:{u}"),
|
||||||
IntroducedInDays(u) => format!("introduced:{u}"),
|
IntroducedInDays(u) => format!("introduced:{u}"),
|
||||||
|
@ -81,7 +82,7 @@ fn write_search_node(node: &SearchNode) -> String {
|
||||||
NotetypeId(NotetypeIdType(i)) => format!("mid:{i}"),
|
NotetypeId(NotetypeIdType(i)) => format!("mid:{i}"),
|
||||||
Notetype(s) => maybe_quote(&format!("note:{s}")),
|
Notetype(s) => maybe_quote(&format!("note:{s}")),
|
||||||
Rated { days, ease } => write_rated(days, ease),
|
Rated { days, ease } => write_rated(days, ease),
|
||||||
Tag { tag, is_re } => write_single_field("tag", tag, *is_re),
|
Tag { tag, mode } => write_single_field("tag", tag, *mode),
|
||||||
Duplicates { notetype_id, text } => write_dupe(notetype_id, text),
|
Duplicates { notetype_id, text } => write_dupe(notetype_id, text),
|
||||||
State(k) => write_state(k),
|
State(k) => write_state(k),
|
||||||
Flag(u) => format!("flag:{u}"),
|
Flag(u) => format!("flag:{u}"),
|
||||||
|
@ -116,14 +117,25 @@ fn needs_quotation(txt: &str) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Also used by tag search, which has the same syntax.
|
/// Also used by tag search, which has the same syntax.
|
||||||
fn write_single_field(field: &str, text: &str, is_re: bool) -> String {
|
fn write_single_field(field: &str, text: &str, mode: FieldSearchMode) -> String {
|
||||||
let re = if is_re { "re:" } else { "" };
|
let prefix = match mode {
|
||||||
let text = if !is_re && text.starts_with("re:") {
|
FieldSearchMode::Normal => "",
|
||||||
|
FieldSearchMode::Regex => "re:",
|
||||||
|
FieldSearchMode::NoCombining => "nc:",
|
||||||
|
};
|
||||||
|
let text = if mode == FieldSearchMode::Normal
|
||||||
|
&& (text.starts_with("re:") || text.starts_with("nc:"))
|
||||||
|
{
|
||||||
text.replacen(':', "\\:", 1)
|
text.replacen(':', "\\:", 1)
|
||||||
} else {
|
} else {
|
||||||
text.to_string()
|
text.to_string()
|
||||||
};
|
};
|
||||||
maybe_quote(&format!("{}:{}{}", field.replace(':', "\\:"), re, &text))
|
maybe_quote(&format!(
|
||||||
|
"{}:{}{}",
|
||||||
|
field.replace(':', "\\:"),
|
||||||
|
prefix,
|
||||||
|
&text
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_template(template: &TemplateKind) -> String {
|
fn write_template(template: &TemplateKind) -> String {
|
||||||
|
|
|
@ -30,14 +30,24 @@ impl Collection {
|
||||||
|
|
||||||
let (average_secs, total_secs) = average_and_total_secs_strings(&revlog);
|
let (average_secs, total_secs) = average_and_total_secs_strings(&revlog);
|
||||||
let timing = self.timing_today()?;
|
let timing = self.timing_today()?;
|
||||||
let seconds_elapsed = if let Some(last_review_time) = card.last_review_time {
|
|
||||||
timing.now.elapsed_secs_since(last_review_time) as u32
|
let last_review_time = if let Some(last_review_time) = card.last_review_time {
|
||||||
|
last_review_time
|
||||||
} else {
|
} else {
|
||||||
self.storage
|
let mut new_card = card.clone();
|
||||||
|
let last_review_time = self
|
||||||
|
.storage
|
||||||
.time_of_last_review(card.id)?
|
.time_of_last_review(card.id)?
|
||||||
.map(|ts| timing.now.elapsed_secs_since(ts))
|
.unwrap_or_default();
|
||||||
.unwrap_or_default() as u32
|
|
||||||
|
new_card.last_review_time = Some(last_review_time);
|
||||||
|
|
||||||
|
self.storage.update_card(&new_card)?;
|
||||||
|
last_review_time
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let seconds_elapsed = timing.now.elapsed_secs_since(last_review_time) as u32;
|
||||||
|
|
||||||
let fsrs_retrievability = card
|
let fsrs_retrievability = card
|
||||||
.memory_state
|
.memory_state
|
||||||
.zip(Some(seconds_elapsed))
|
.zip(Some(seconds_elapsed))
|
||||||
|
@ -187,7 +197,7 @@ impl Collection {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) {
|
fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) {
|
||||||
let normal_answer_count = revlog.iter().filter(|r| r.button_chosen > 0).count();
|
let normal_answer_count = revlog.iter().filter(|r| r.has_rating()).count();
|
||||||
let total_secs: f32 = revlog
|
let total_secs: f32 = revlog
|
||||||
.iter()
|
.iter()
|
||||||
.map(|entry| (entry.taken_millis as f32) / 1000.0)
|
.map(|entry| (entry.taken_millis as f32) / 1000.0)
|
||||||
|
|
|
@ -53,10 +53,7 @@ impl GraphsContext {
|
||||||
self.revlog
|
self.revlog
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|review| {
|
.filter(|review| {
|
||||||
// not rescheduled/set due date/reset
|
review.has_rating_and_affects_scheduling()
|
||||||
review.button_chosen > 0
|
|
||||||
// not cramming
|
|
||||||
&& (review.review_kind != RevlogReviewKind::Filtered || review.ease_factor != 0)
|
|
||||||
// cards with an interval ≥ 1 day
|
// cards with an interval ≥ 1 day
|
||||||
&& (review.review_kind == RevlogReviewKind::Review
|
&& (review.review_kind == RevlogReviewKind::Review
|
||||||
|| review.last_interval <= -86400
|
|| review.last_interval <= -86400
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue