Compare commits

..

No commits in common. "main" and "25.08b1" have entirely different histories.

166 changed files with 5855 additions and 6686 deletions

View file

@ -1 +1 @@
25.09.2
25.08b1

View file

@ -1,2 +1 @@
nodeLinker: node-modules
enableScripts: false

View file

@ -49,7 +49,6 @@ Sander Santema <github.com/sandersantema/>
Thomas Brownback <https://github.com/brownbat/>
Andrew Gaul <andrew@gaul.org>
kenden
Emil Hamrin <github.com/e-hamrin>
Nickolay Yudin <kelciour@gmail.com>
neitrinoweb <github.com/neitrinoweb/>
Andreas Reis <github.com/nwwt>
@ -235,14 +234,6 @@ Emmanuel Ferdman <https://github.com/emmanuel-ferdman>
Sunong2008 <https://github.com/Sunrongguo2008>
Marvin Kopf <marvinkopf@outlook.com>
Kevin Nakamura <grinkers@grinkers.net>
Bradley Szoke <bradleyszoke@gmail.com>
jcznk <https://github.com/jcznk>
Thomas Rixen <thomas.rixen@student.uclouvain.be>
Siyuan Mattuwu Yan <syan4@ualberta.ca>
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
memchr <memchr@proton.me>
Max Romanowski <maxr777@proton.me>
Aldlss <ayaldlss@gmail.com>
********************

1114
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -33,8 +33,9 @@ git = "https://github.com/ankitects/linkcheck.git"
rev = "184b2ca50ed39ca43da13f0b830a463861adb9ca"
[workspace.dependencies.fsrs]
version = "5.1.0"
version = "4.1.1"
# git = "https://github.com/open-spaced-repetition/fsrs-rs.git"
# rev = "a7f7efc10f0a26b14ee348cc7402155685f2a24f"
# path = "../open-spaced-repetition/fsrs-rs"
[workspace.dependencies]
@ -109,7 +110,6 @@ prost-types = "0.13"
pulldown-cmark = "0.13.0"
pyo3 = { version = "0.25.1", features = ["extension-module", "abi3", "abi3-py39"] }
rand = "0.9.1"
rayon = "1.10.0"
regex = "1.11.1"
reqwest = { version = "0.12.20", default-features = false, features = ["json", "socks", "stream", "multipart"] }
rusqlite = { version = "0.36.0", features = ["trace", "functions", "collation", "bundled"] }
@ -133,7 +133,7 @@ tokio-util = { version = "0.7.15", features = ["io"] }
tower-http = { version = "0.6.6", features = ["trace"] }
tracing = { version = "0.1.41", features = ["max_level_trace", "release_max_level_debug"] }
tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] }
tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] }
unic-langid = { version = "0.9.6", features = ["macros"] }
unic-ucd-category = "0.9.0"
unicode-normalization = "0.1.24"
@ -141,7 +141,7 @@ walkdir = "2.5.0"
which = "8.0.0"
widestring = "1.1.0"
winapi = { version = "0.3", features = ["wincon", "winreg"] }
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_Foundation", "Win32_UI_Shell", "Wdk_System_SystemServices"] }
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams", "Win32_System_Console", "Win32_System_Registry", "Win32_Foundation", "Win32_UI_Shell"] }
wiremock = "0.6.3"
xz2 = "0.1.7"
zip = { version = "4.1.0", default-features = false, features = ["deflate", "time"] }

View file

@ -169,7 +169,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
pub fn check_rust(build: &mut Build) -> Result<()> {
let inputs = inputs![
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**,tools/minilints/**}"),
glob!("{rslib/**,pylib/rsbridge/**,ftl/**,build/**,qt/launcher/**}"),
"Cargo.lock",
"Cargo.toml",
"rust-toolchain.toml",

View file

@ -49,46 +49,6 @@ pub trait BuildAction {
}
fn name(&self) -> &'static str {
std::any::type_name::<Self>()
.split("::")
.last()
.unwrap()
.split('<')
.next()
.unwrap()
std::any::type_name::<Self>().split("::").last().unwrap()
}
}
#[cfg(test)]
trait TestBuildAction {}
#[cfg(test)]
impl<T: TestBuildAction + ?Sized> BuildAction for T {
fn command(&self) -> &str {
"test"
}
fn files(&mut self, _build: &mut impl FilesHandle) {}
}
#[allow(dead_code, unused_variables)]
#[test]
fn should_strip_regions_in_type_name() {
struct Bare;
impl TestBuildAction for Bare {}
assert_eq!(Bare {}.name(), "Bare");
struct WithLifeTime<'a>(&'a str);
impl TestBuildAction for WithLifeTime<'_> {}
assert_eq!(WithLifeTime("test").name(), "WithLifeTime");
struct WithMultiLifeTime<'a, 'b>(&'a str, &'b str);
impl TestBuildAction for WithMultiLifeTime<'_, '_> {}
assert_eq!(
WithMultiLifeTime("test", "test").name(),
"WithMultiLifeTime"
);
struct WithGeneric<T>(T);
impl<T> TestBuildAction for WithGeneric<T> {}
assert_eq!(WithGeneric(3).name(), "WithGeneric");
}

View file

@ -67,7 +67,7 @@ impl Platform {
}
/// Append .exe to path if on Windows.
pub fn with_exe(path: &str) -> Cow<'_, str> {
pub fn with_exe(path: &str) -> Cow<str> {
if cfg!(windows) {
format!("{path}.exe").into()
} else {

View file

@ -98,7 +98,7 @@ impl BuildAction for YarnInstall<'_> {
}
}
fn with_cmd_ext(bin: &str) -> Cow<'_, str> {
fn with_cmd_ext(bin: &str) -> Cow<str> {
if cfg!(windows) {
format!("{bin}.cmd").into()
} else {

View file

@ -32,19 +32,10 @@ pub fn setup_pyenv(args: PyenvArgs) {
}
}
let mut command = Command::new(args.uv_bin);
// remove UV_* environment variables to avoid interference
for (key, _) in std::env::vars() {
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
command.env_remove(key);
}
}
run_command(
command
Command::new(args.uv_bin)
.env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone())
.args(["sync", "--locked", "--no-config"])
.args(["sync", "--locked"])
.args(args.extra_args),
);

View file

@ -28,11 +28,7 @@ pub fn setup_yarn(args: YarnArgs) {
.arg("--ignore-scripts"),
);
} else {
run_command(
Command::new(&args.yarn_bin)
.arg("install")
.arg("--immutable"),
);
run_command(Command::new(&args.yarn_bin).arg("install"));
}
std::fs::write(args.stamp, b"").unwrap();

File diff suppressed because it is too large Load diff

View file

@ -1,78 +1,35 @@
# This is a user-contributed Dockerfile. No official support is available.
# This Dockerfile uses three stages.
# 1. Compile anki (and dependencies) and build python wheels.
# 2. Create a virtual environment containing anki and its dependencies.
# 3. Create a final image that only includes anki's virtual environment and required
# system packages.
ARG PYTHON_VERSION="3.9"
ARG DEBIAN_FRONTEND="noninteractive"
FROM ubuntu:24.04 AS build
# Build anki.
FROM python:$PYTHON_VERSION AS build
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
> /usr/local/bin/bazel \
&& chmod +x /usr/local/bin/bazel \
# Bazel expects /usr/bin/python
&& ln -s /usr/local/bin/python /usr/bin/python
WORKDIR /opt/anki
ENV PYTHON_VERSION="3.13"
# System deps
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
git \
build-essential \
pkg-config \
libssl-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
libffi-dev \
zlib1g-dev \
liblzma-dev \
ca-certificates \
ninja-build \
rsync \
libglib2.0-0 \
libgl1 \
libx11-6 \
libxext6 \
libxrender1 \
libxkbcommon0 \
libxkbcommon-x11-0 \
libxcb1 \
libxcb-render0 \
libxcb-shm0 \
libxcb-icccm4 \
libxcb-image0 \
libxcb-keysyms1 \
libxcb-randr0 \
libxcb-shape0 \
libxcb-xfixes0 \
libxcb-xinerama0 \
libxcb-xinput0 \
libsm6 \
libice6 \
&& rm -rf /var/lib/apt/lists/*
# install rust with rustup
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
# Install uv and Python 3.13 with uv
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
ENV PATH="/root/.local/bin:${PATH}"
RUN uv python install ${PYTHON_VERSION} --default
COPY . .
# Build python wheels.
RUN ./tools/build
# Install pre-compiled Anki.
FROM python:3.13-slim AS installer
FROM python:${PYTHON_VERSION}-slim as installer
WORKDIR /opt/anki/
COPY --from=build /opt/anki/out/wheels/ wheels/
COPY --from=build /opt/anki/wheels/ wheels/
# Use virtual environment.
RUN python -m venv venv \
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
# We use another build stage here so we don't include the wheels in the final image.
FROM python:3.13-slim AS final
FROM python:${PYTHON_VERSION}-slim as final
COPY --from=installer /opt/anki/venv /opt/anki/venv
ENV PATH=/opt/anki/venv/bin:$PATH
# Install run-time dependencies.
@ -102,9 +59,9 @@ RUN apt-get update \
libxrender1 \
libxtst6 \
&& rm -rf /var/lib/apt/lists/*
# Add non-root user.
RUN useradd --create-home anki
USER anki
WORKDIR /work
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"

@ -1 +1 @@
Subproject commit 480ef0da728c7ea3485c58529ae7ee02be3e5dba
Subproject commit 3d04bcbf7fefca0007bc9db307409d88210995d8

View file

@ -5,11 +5,6 @@ database-check-card-properties =
[one] Fixed { $count } invalid card property.
*[other] Fixed { $count } invalid card properties.
}
database-check-card-last-review-time-empty =
{ $count ->
[one] Added last review time to { $count } card.
*[other] Added last review time to { $count } cards.
}
database-check-missing-templates =
{ $count ->
[one] Deleted { $count } card with missing template.

View file

@ -384,6 +384,8 @@ deck-config-which-deck = Which deck would you like to display options for?
deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }...
deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters.
deck-config-not-enough-history = Insufficient review history to perform this operation.
deck-config-unable-to-determine-desired-retention =
Unable to determine a minimum recommended retention.
deck-config-must-have-400-reviews =
{ $count ->
[one] Only { $count } review was found.
@ -392,6 +394,7 @@ deck-config-must-have-400-reviews =
# Numbers that control how aggressively the FSRS algorithm schedules cards
deck-config-weights = FSRS parameters
deck-config-compute-optimal-weights = Optimize FSRS parameters
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
deck-config-optimize-button = Optimize Current Preset
# Indicates that a given function or label, provided via the "text" variable, operates slowly.
deck-config-slow-suffix = { $text } (slow)
@ -404,6 +407,7 @@ deck-config-historical-retention = Historical retention
deck-config-smaller-is-better = Smaller numbers indicate a better fit to your review history.
deck-config-steps-too-large-for-fsrs = When FSRS is enabled, steps of 1 day or more are not recommended.
deck-config-get-params = Get Params
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
deck-config-complete = { $num }% complete.
deck-config-iterations = Iteration: { $count }...
deck-config-reschedule-cards-on-change = Reschedule cards on change
@ -464,7 +468,12 @@ deck-config-compute-optimal-weights-tooltip2 =
By default, parameters will be calculated from the review history of all decks using the current preset. You can
optionally adjust the search before calculating the parameters, if you'd like to alter which cards are used for
optimizing the parameters.
deck-config-compute-optimal-retention-tooltip4 =
This tool will attempt to find the desired retention value
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if youre
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
deck-config-please-save-your-changes-first = Please save your changes first.
deck-config-workload-factor-change = Approximate workload: {$factor}x
(compared to {$previousDR}% desired retention)
@ -496,10 +505,7 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
# Description of the y axis in the FSRS simulation
# diagram (Deck options -> FSRS) showing the total number of
# cards that can be recalled or retrieved on a specific date.
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
deck-config-fsrs-simulator-experimental = FSRS simulator (experimental)
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
deck-config-simulate = Simulate
deck-config-clear-last-simulate = Clear Last Simulation
@ -513,9 +519,6 @@ deck-config-save-options-to-preset-confirm = Overwrite the options in your curre
# to show the total number of cards that can be recalled or retrieved on a
# specific date.
deck-config-fsrs-simulator-radio-memorized = Memorized
deck-config-fsrs-simulator-radio-ratio = Time / Memorized Ratio
# $time here is pre-formatted e.g. "10 Seconds"
deck-config-fsrs-simulator-ratio-tooltip = { $time } per memorized card
## Messages related to the FSRS schedulers health check. The health check determines whether the correlation between FSRS predictions and your memory is good or bad. It can be optionally triggered as part of the "Optimize" function.
@ -525,7 +528,7 @@ deck-config-health-check = Check health when optimizing
deck-config-fsrs-bad-fit-warning = Health Check:
Your memory is difficult for FSRS to predict. Recommendations:
- Suspend or reformulate any cards you constantly forget.
- Suspend or reformulate leeches.
- Use the answer buttons consistently. Keep in mind that "Hard" is a passing grade, not a failing grade.
- Understand before you memorize.
@ -536,17 +539,6 @@ deck-config-fsrs-good-fit = Health Check:
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
deck-config-unable-to-determine-desired-retention =
Unable to determine a minimum recommended retention.
deck-config-predicted-minimum-recommended-retention = Minimum recommended retention: { $num }
deck-config-compute-minimum-recommended-retention = Minimum recommended retention
deck-config-compute-optimal-retention-tooltip4 =
This tool will attempt to find the desired retention value
that will lead to the most material learnt, in the least amount of time. The calculated number can serve as a reference
when deciding what to set your desired retention to. You may wish to choose a higher desired retention if youre
willing to invest more study time to achieve it. Setting your desired retention lower than the minimum
is not recommended, as it will lead to a higher workload, because of the high forgetting rate.
deck-config-plotted-on-x-axis = (Plotted on the X-axis)
deck-config-a-100-day-interval =
{ $days ->
[one] A 100 day interval will become { $days } day.

View file

@ -34,7 +34,7 @@ preferences-when-adding-default-to-current-deck = When adding, default to curren
preferences-you-can-restore-backups-via-fileswitch = You can restore backups via File > Switch Profile.
preferences-legacy-timezone-handling = Legacy timezone handling (buggy, but required for AnkiDroid <= 2.14)
preferences-default-search-text = Default search text
preferences-default-search-text-example = e.g. "deck:current"
preferences-default-search-text-example = eg. 'deck:current '
preferences-theme = Theme
preferences-theme-follow-system = Follow System
preferences-theme-light = Light

View file

@ -80,7 +80,7 @@ statistics-reviews =
# This fragment of the tooltip in the FSRS simulation
# diagram (Deck options -> FSRS) shows the total number of
# cards that can be recalled or retrieved on a specific date.
statistics-memorized = {$memorized} cards memorized
statistics-memorized = {$memorized} memorized
statistics-today-title = Today
statistics-today-again-count = Again count:
statistics-today-type-counts = Learn: { $learnCount }, Review: { $reviewCount }, Relearn: { $relearnCount }, Filtered: { $filteredCount }
@ -99,9 +99,9 @@ statistics-counts-relearning-cards = Relearning
statistics-counts-title = Card Counts
statistics-counts-separate-suspended-buried-cards = Separate suspended/buried cards
## Retention represents your actual retention from past reviews, in
## Retention rate represents your actual retention rate from past reviews, in
## comparison to the "desired retention" setting of FSRS, which forecasts
## future retention. Retention is the percentage of all reviewed cards
## future retention. Retention rate is the percentage of all reviewed cards
## that were marked as "Hard," "Good," or "Easy" within a specific time period.
##
## Most of these strings are used as column / row headings in a table.
@ -112,9 +112,9 @@ statistics-counts-separate-suspended-buried-cards = Separate suspended/buried ca
## N.B. Stats cards may be very small on mobile devices and when the Stats
## window is certain sizes.
statistics-true-retention-title = Retention
statistics-true-retention-title = Retention rate
statistics-true-retention-subtitle = Pass rate of cards with an interval ≥ 1 day.
statistics-true-retention-tooltip = If you are using FSRS, your retention is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
statistics-true-retention-tooltip = If you are using FSRS, your retention rate is expected to be close to your desired retention. Please keep in mind that data for a single day is noisy, so it's better to look at monthly data.
statistics-true-retention-range = Range
statistics-true-retention-pass = Pass
statistics-true-retention-fail = Fail

View file

@ -46,20 +46,6 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
studying-unbury = Unbury
studying-what-would-you-like-to-unbury = What would you like to unbury?
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
studying-card-studied-in-minute =
{ $cards ->
[one] { $cards } card
*[other] { $cards } cards
} studied in
{ $minutes ->
[one] { $minutes } minute.
*[other] { $minutes } minutes.
}
studying-question-time-elapsed = Question time elapsed
studying-answer-time-elapsed = Answer time elapsed
## OBSOLETE; you do not need to translate this
studying-card-studied-in =
{ $count ->
[one] { $count } card studied in
@ -70,3 +56,5 @@ studying-minute =
[one] { $count } minute.
*[other] { $count } minutes.
}
studying-question-time-elapsed = Question time elapsed
studying-answer-time-elapsed = Answer time elapsed

@ -1 +1 @@
Subproject commit fd5f984785ad07a0d3dbd893ee3d7e3671eaebd6
Subproject commit c65a9587b1f18931986bdf145872e8e4c44c5c82

View file

@ -82,7 +82,6 @@
"resolutions": {
"canvas": "npm:empty-npm-package@1.0.0",
"cookie": "0.7.0",
"devalue": "^5.3.2",
"vite": "6"
},
"browserslist": [

View file

@ -40,10 +40,12 @@ message DeckConfigId {
message GetRetentionWorkloadRequest {
repeated float w = 1;
string search = 2;
float before = 3;
float after = 4;
}
message GetRetentionWorkloadResponse {
map<uint32, float> costs = 1;
float factor = 1;
}
message GetIgnoredBeforeCountRequest {
@ -217,8 +219,6 @@ message DeckConfigsForUpdate {
bool review_today_active = 5;
// Whether new_today applies to today or a past day.
bool new_today_active = 6;
// Deck-specific desired retention override
optional float desired_retention = 7;
}
string name = 1;
int64 config_id = 2;

View file

@ -83,8 +83,6 @@ message Deck {
optional uint32 new_limit = 7;
DayLimit review_limit_today = 8;
DayLimit new_limit_today = 9;
// Deck-specific desired retention override
optional float desired_retention = 10;
reserved 12 to 15;
}

View file

@ -27,9 +27,6 @@ service FrontendService {
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
// Warns python that the deck option web view is ready to receive requests.
rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
// Save colour picker's custom colour palette
rpc SaveCustomColours(generic.Empty) returns (generic.Empty);
}
service BackendFrontendService {}

View file

@ -59,7 +59,7 @@ message AddNoteRequest {
}
message AddNoteResponse {
collection.OpChangesWithCount changes = 1;
collection.OpChanges changes = 1;
int64 note_id = 2;
}

View file

@ -55,8 +55,6 @@ service SchedulerService {
returns (ComputeOptimalRetentionResponse);
rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
returns (SimulateFsrsReviewResponse);
rpc SimulateFsrsWorkload(SimulateFsrsReviewRequest)
returns (SimulateFsrsWorkloadResponse);
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
returns (EvaluateParamsResponse);
@ -406,9 +404,6 @@ message SimulateFsrsReviewRequest {
repeated float easy_days_percentages = 10;
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
optional uint32 suspend_after_lapse_count = 12;
float historical_retention = 13;
uint32 learning_step_count = 14;
uint32 relearning_step_count = 15;
}
message SimulateFsrsReviewResponse {
@ -418,12 +413,6 @@ message SimulateFsrsReviewResponse {
repeated float daily_time_cost = 4;
}
message SimulateFsrsWorkloadResponse {
map<uint32, float> cost = 1;
map<uint32, float> memorized = 2;
map<uint32, uint32> review_count = 3;
}
message ComputeOptimalRetentionResponse {
float optimal_retention = 1;
}

View file

@ -74,15 +74,10 @@ message SearchNode {
repeated SearchNode nodes = 1;
Joiner joiner = 2;
}
enum FieldSearchMode {
FIELD_SEARCH_MODE_NORMAL = 0;
FIELD_SEARCH_MODE_REGEX = 1;
FIELD_SEARCH_MODE_NOCOMBINING = 2;
}
message Field {
string field_name = 1;
string text = 2;
FieldSearchMode mode = 3;
bool is_re = 3;
}
oneof filter {

View file

@ -246,7 +246,7 @@ def backend_exception_to_pylib(err: backend_pb2.BackendError) -> Exception:
return BackendError(err.message, help_page, context, backtrace)
elif val == kind.SEARCH_ERROR:
return SearchError(err.message, help_page, context, backtrace)
return SearchError(markdown(err.message), help_page, context, backtrace)
elif val == kind.UNDO_EMPTY:
return UndoEmpty(err.message, help_page, context, backtrace)

View file

@ -133,7 +133,6 @@ class Card(DeprecatedNamesMixin):
memory_state=self.memory_state,
desired_retention=self.desired_retention,
decay=self.decay,
last_review_time_secs=self.last_review_time,
)
@deprecated(info="please use col.update_card()")

View file

@ -528,7 +528,7 @@ class Collection(DeprecatedNamesMixin):
def new_note(self, notetype: NotetypeDict) -> Note:
return Note(self, notetype)
def add_note(self, note: Note, deck_id: DeckId) -> OpChangesWithCount:
def add_note(self, note: Note, deck_id: DeckId) -> OpChanges:
hooks.note_will_be_added(self, note, deck_id)
out = self._backend.add_note(note=note._to_backend_note(), deck_id=deck_id)
note.id = NoteId(out.note_id)

View file

@ -175,8 +175,8 @@ class MnemoFact:
def fact_view(self) -> type[MnemoFactView]:
try:
fact_view = self.cards[0].fact_view_id
except IndexError:
return FrontOnly
except IndexError as err:
raise Exception(f"Fact {id} has no cards") from err
if fact_view.startswith("1.") or fact_view.startswith("1::"):
return FrontOnly
@ -187,7 +187,7 @@ class MnemoFact:
elif fact_view.startswith("5.1"):
return Cloze
raise Exception(f"Fact {self.id} has unknown fact view: {fact_view}")
raise Exception(f"Fact {id} has unknown fact view: {fact_view}")
def anki_fields(self, fact_view: type[MnemoFactView]) -> list[str]:
return [munge_field(self.fields.get(k, "")) for k in fact_view.field_keys]

View file

@ -73,7 +73,7 @@ langs = sorted(
("ଓଡ଼ିଆ", "or_OR"),
("Filipino", "tl"),
("ئۇيغۇر", "ug"),
("Oʻzbekcha", "uz_UZ"),
("Oʻzbek", "uz_UZ"),
]
)

View file

@ -7,7 +7,7 @@ dependencies = [
"decorator",
"markdown",
"orjson",
"protobuf>=6.0,<8.0",
"protobuf>=4.21",
"requests[socks]",
# remove after we update to min python 3.11+
"typing_extensions",

View file

@ -70,10 +70,10 @@ def show(mw: aqt.AnkiQt) -> QDialog:
abouttext += f"<p>{lede}"
abouttext += f"<p>{tr.about_anki_is_licensed_under_the_agpl3()}"
abouttext += f"<p>{tr.about_version(val=version_with_build())}<br>"
abouttext += ("Python %s Qt %s Chromium %s<br>") % (
abouttext += ("Python %s Qt %s PyQt %s<br>") % (
platform.python_version(),
qVersion(),
(qWebEngineChromiumVersion() or "").split(".")[0],
PYQT_VERSION_STR,
)
abouttext += (
without_unicode_isolation(tr.about_visit_website(val=aqt.appWebsite))
@ -225,7 +225,6 @@ def show(mw: aqt.AnkiQt) -> QDialog:
"Adnane Taghi",
"Anon_0000",
"Bilolbek Normuminov",
"Sagiv Marzini",
)
)

View file

@ -8,7 +8,7 @@ from collections.abc import Callable
import aqt.editor
import aqt.forms
from anki._legacy import deprecated
from anki.collection import OpChanges, OpChangesWithCount, SearchNode
from anki.collection import OpChanges, SearchNode
from anki.decks import DeckId
from anki.models import NotetypeId
from anki.notes import Note, NoteFieldsCheckResult, NoteId
@ -294,13 +294,13 @@ class AddCards(QMainWindow):
target_deck_id = self.deck_chooser.selected_deck_id
def on_success(changes: OpChangesWithCount) -> None:
def on_success(changes: OpChanges) -> None:
# only used for detecting changed sticky fields on close
self._last_added_note = note
self.addHistory(note)
tooltip(tr.importing_cards_added(count=changes.count), period=500)
tooltip(tr.adding_added(), period=500)
av_player.stop_and_clear_queue()
self._load_new_note(sticky_fields_from=note)
gui_hooks.add_cards_did_add_note(note)

View file

@ -10,8 +10,6 @@ import re
from collections.abc import Callable, Sequence
from typing import Any, cast
from markdown import markdown
import aqt
import aqt.browser
import aqt.editor
@ -22,7 +20,7 @@ from anki.cards import Card, CardId
from anki.collection import Collection, Config, OpChanges, SearchNode
from anki.consts import *
from anki.decks import DeckId
from anki.errors import NotFoundError, SearchError
from anki.errors import NotFoundError
from anki.lang import without_unicode_isolation
from anki.models import NotetypeId
from anki.notes import NoteId
@ -500,8 +498,6 @@ class Browser(QMainWindow):
text = self.current_search()
try:
normed = self.col.build_search_string(text)
except SearchError as err:
showWarning(markdown(str(err)))
except Exception as err:
showWarning(str(err))
else:

View file

@ -51,7 +51,6 @@ class CardInfoDialog(QDialog):
def _setup_ui(self, card_id: CardId | None) -> None:
self.mw.garbage_collect_on_dialog_finish(self)
self.setMinimumSize(400, 300)
disable_help_button(self)
restoreGeom(self, self.GEOMETRY_KEY, default_size=(800, 800))
add_close_shortcut(self)

View file

@ -13,7 +13,7 @@ import aqt.browser
from anki.cards import Card
from anki.collection import Config
from anki.tags import MARKED_TAG
from aqt import AnkiQt, gui_hooks, is_mac
from aqt import AnkiQt, gui_hooks
from aqt.qt import (
QCheckBox,
QDialog,
@ -81,15 +81,10 @@ class Previewer(QDialog):
qconnect(self.finished, self._on_finished)
self.silentlyClose = True
self.vbox = QVBoxLayout()
spacing = 6
self.vbox.setContentsMargins(0, 0, 0, 0)
self.vbox.setSpacing(spacing)
self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER)
self.vbox.addWidget(self._web)
self.bbox = QDialogButtonBox()
self.bbox.setContentsMargins(
spacing, spacing if is_mac else 0, spacing, spacing
)
self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight)
gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER)

Binary file not shown.

After

Width:  |  Height:  |  Size: 727 B

View file

@ -1,27 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="21" height="21" viewBox="0 0 21 21" version="1.1"
xmlns="http://www.w3.org/2000/svg">
<g id="Layer-1" transform="translate(0.5,0.5)">
<rect x="0" y="0" width="20" height="20" fill="none"/>
<g transform="translate(14.8974,6.3648)">
<path d="M0,0C0,3.403 -2.042,6.161 -4.56,6.161C-7.078,6.161 -9.12,3.403 -9.12,0C-9.12,-3.403 -7.078,-6.161 -4.56,-6.161C-2.042,-6.161 0,-3.403 0,0"
fill="black" fill-rule="nonzero"/>
</g>
<g transform="matrix(0,-1,-1,0,10.3374,1.8048)">
<ellipse cx="-4.56" cy="0" rx="6.161" ry="4.56"
fill="none" stroke="black" stroke-width="0.25"/>
</g>
<g transform="translate(3.1987,14.4958)">
<path d="M0,-9.484C-0.76,-4.212 3.287,0 7.12,-0.046C10.864,-0.09 14.742,-4.199 14.076,-9.343"
fill="none" stroke="black" stroke-width="2" fill-rule="nonzero"/>
</g>
<g transform="matrix(-1,0,0,1,20.573,18.613)">
<rect x="5.387" y="0.601" width="9.799" height="0.185"
fill="none" stroke="black" stroke-width="2"/>
</g>
<g transform="matrix(-1,0,0,1,20.741,13.51)">
<rect x="9.899" y="1.163" width="0.943" height="4.164"
fill="none" stroke="black" stroke-width="2"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View file

@ -151,7 +151,6 @@ class Editor:
self.add_webview()
self.setupWeb()
self.setupShortcuts()
self.setupColourPalette()
gui_hooks.editor_did_init(self)
# Initial setup
@ -350,14 +349,6 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too
keys, fn, _ = row
QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore
def setupColourPalette(self) -> None:
if not (colors := self.mw.col.get_config("customColorPickerPalette")):
return
for i, colour in enumerate(colors[: QColorDialog.customCount()]):
if not QColor.isValidColorName(colour):
continue
QColorDialog.setCustomColor(i, QColor.fromString(colour))
def _addFocusCheck(self, fn: Callable) -> Callable:
def checkFocus() -> None:
if self.currentField is None:

View file

@ -1292,10 +1292,9 @@
<tabstop>daily_backups</tabstop>
<tabstop>weekly_backups</tabstop>
<tabstop>monthly_backups</tabstop>
<tabstop>tabWidget</tabstop>
<tabstop>syncAnkiHubLogout</tabstop>
<tabstop>syncAnkiHubLogin</tabstop>
<tabstop>buttonBox</tabstop>
<tabstop>tabWidget</tabstop>
</tabstops>
<resources/>
<connections>

View file

@ -483,7 +483,7 @@ def update_deck_configs() -> bytes:
update.abort = True
def on_success(changes: OpChanges) -> None:
if isinstance(window := aqt.mw.app.activeModalWidget(), DeckOptionsDialog):
if isinstance(window := aqt.mw.app.activeWindow(), DeckOptionsDialog):
window.reject()
def handle_on_main() -> None:
@ -511,7 +511,7 @@ def set_scheduling_states() -> bytes:
def import_done() -> bytes:
def update_window_modality() -> None:
if window := aqt.mw.app.activeModalWidget():
if window := aqt.mw.app.activeWindow():
from aqt.import_export.import_dialog import ImportDialog
if isinstance(window, ImportDialog):
@ -529,7 +529,7 @@ def import_request(endpoint: str) -> bytes:
response.ParseFromString(output)
def handle_on_main() -> None:
window = aqt.mw.app.activeModalWidget()
window = aqt.mw.app.activeWindow()
on_op_finished(aqt.mw, response, window)
aqt.mw.taskman.run_on_main(handle_on_main)
@ -569,7 +569,7 @@ def change_notetype() -> bytes:
data = request.data
def handle_on_main() -> None:
window = aqt.mw.app.activeModalWidget()
window = aqt.mw.app.activeWindow()
if isinstance(window, ChangeNotetypeDialog):
window.save(data)
@ -579,7 +579,7 @@ def change_notetype() -> bytes:
def deck_options_require_close() -> bytes:
def handle_on_main() -> None:
window = aqt.mw.app.activeModalWidget()
window = aqt.mw.app.activeWindow()
if isinstance(window, DeckOptionsDialog):
window.require_close()
@ -591,7 +591,7 @@ def deck_options_require_close() -> bytes:
def deck_options_ready() -> bytes:
def handle_on_main() -> None:
window = aqt.mw.app.activeModalWidget()
window = aqt.mw.app.activeWindow()
if isinstance(window, DeckOptionsDialog):
window.set_ready()
@ -599,15 +599,6 @@ def deck_options_ready() -> bytes:
return b""
def save_custom_colours() -> bytes:
colors = [
QColorDialog.customColor(i).name(QColor.NameFormat.HexArgb)
for i in range(QColorDialog.customCount())
]
aqt.mw.col.set_config("customColorPickerPalette", colors)
return b""
post_handler_list = [
congrats_info,
get_deck_configs_for_update,
@ -623,7 +614,6 @@ post_handler_list = [
search_in_browser,
deck_options_require_close,
deck_options_ready,
save_custom_colours,
]
@ -664,7 +654,6 @@ exposed_backend_list = [
"evaluate_params_legacy",
"get_optimal_retention_parameters",
"simulate_fsrs_review",
"simulate_fsrs_workload",
# DeckConfigService
"get_ignored_before_count",
"get_retention_workload",

View file

@ -18,7 +18,7 @@ def add_note(
parent: QWidget,
note: Note,
target_deck_id: DeckId,
) -> CollectionOp[OpChangesWithCount]:
) -> CollectionOp[OpChanges]:
return CollectionOp(parent, lambda col: col.add_note(note, target_deck_id))

View file

@ -124,14 +124,17 @@ def launcher_executable() -> str | None:
def trigger_launcher_run() -> None:
"""Create a trigger file to request launcher UI on next run."""
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
try:
root = launcher_root()
if not root:
return
trigger_path = Path(root) / ".want-launcher"
trigger_path.touch()
pyproject_path = Path(root) / "pyproject.toml"
if pyproject_path.exists():
# Touch the file to update its mtime
pyproject_path.touch()
except Exception as e:
print(e)
@ -147,25 +150,17 @@ def update_and_restart() -> None:
with contextlib.suppress(ResourceWarning):
env = os.environ.copy()
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
# fixes a bug where launcher fails to appear if opening it
# straight after updating
if "GNOME_TERMINAL_SCREEN" in env:
del env["GNOME_TERMINAL_SCREEN"]
creationflags = 0
if sys.platform == "win32":
creationflags = (
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
)
# On Windows 10, changing the handles breaks ANSI display
io = None if sys.platform == "win32" else subprocess.DEVNULL
subprocess.Popen(
[launcher],
start_new_session=True,
stdin=io,
stdout=io,
stderr=io,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
env=env,
creationflags=creationflags,
)

View file

@ -82,14 +82,11 @@ class Preferences(QDialog):
)
group = self.form.preferences_answer_keys
group.setLayout(layout := QFormLayout())
tab_widget: QWidget = self.form.url_schemes
for ease, label in ease_labels:
layout.addRow(
label,
line_edit := QLineEdit(self.mw.pm.get_answer_key(ease) or ""),
)
QWidget.setTabOrder(tab_widget, line_edit)
tab_widget = line_edit
qconnect(
line_edit.textChanged,
functools.partial(self.mw.pm.set_answer_key, ease),

View file

@ -17,7 +17,6 @@ import aqt.browser
import aqt.operations
from anki.cards import Card, CardId
from anki.collection import Config, OpChanges, OpChangesWithCount
from anki.lang import with_collapsed_whitespace
from anki.scheduler.base import ScheduleCardsAsNew
from anki.scheduler.v3 import (
CardAnswer,
@ -967,15 +966,11 @@ timerStopped = false;
elapsed = self.mw.col.timeboxReached()
if elapsed:
assert not isinstance(elapsed, bool)
cards_val = elapsed[1]
minutes_val = int(round(elapsed[0] / 60))
message = with_collapsed_whitespace(
tr.studying_card_studied_in_minute(
cards=cards_val, minutes=str(minutes_val)
)
)
part1 = tr.studying_card_studied_in(count=elapsed[1])
mins = int(round(elapsed[0] / 60))
part2 = tr.studying_minute(count=mins)
fin = tr.studying_finish()
diag = askUserDialog(message, [tr.studying_continue(), fin])
diag = askUserDialog(f"{part1} {part2}", [tr.studying_continue(), fin])
diag.setIcon(QMessageBox.Icon.Information)
if diag.run() == fin:
self.mw.moveToState("deckBrowser")

View file

@ -32,7 +32,6 @@ from aqt._macos_helper import macos_helper
from aqt.mpv import MPV, MPVBase, MPVCommandError
from aqt.qt import *
from aqt.taskman import TaskManager
from aqt.theme import theme_manager
from aqt.utils import (
disable_help_button,
restoreGeom,
@ -631,44 +630,18 @@ class QtAudioInputRecorder(Recorder):
self.mw = mw
self._parent = parent
from PyQt6.QtMultimedia import QAudioSource, QMediaDevices # type: ignore
from PyQt6.QtMultimedia import QAudioFormat, QAudioSource # type: ignore
# Get the default audio input device
device = QMediaDevices.defaultAudioInput()
format = QAudioFormat()
format.setChannelCount(1)
format.setSampleRate(44100)
format.setSampleFormat(QAudioFormat.SampleFormat.Int16)
# Try to use Int16 format first (avoids conversion)
preferred_format = device.preferredFormat()
int16_format = preferred_format
int16_format.setSampleFormat(preferred_format.SampleFormat.Int16)
source = QAudioSource(format, parent)
if device.isFormatSupported(int16_format):
# Use Int16 if supported
format = int16_format
else:
# Fall back to device's preferred format
format = preferred_format
# Create the audio source with the chosen format
source = QAudioSource(device, format, parent)
# Store the actual format being used
self._format = source.format()
self._audio_input = source
def _convert_float_to_int16(self, float_buffer: bytearray) -> bytes:
"""Convert float32 audio samples to int16 format for WAV output."""
import struct
float_count = len(float_buffer) // 4 # 4 bytes per float32
floats = struct.unpack(f"{float_count}f", float_buffer)
# Convert to int16 range, clipping and scaling in one step
int16_samples = [
max(-32768, min(32767, int(max(-1.0, min(1.0, f)) * 32767))) for f in floats
]
return struct.pack(f"{len(int16_samples)}h", *int16_samples)
def start(self, on_done: Callable[[], None]) -> None:
self._iodevice = self._audio_input.start()
self._buffer = bytearray()
@ -691,32 +664,18 @@ class QtAudioInputRecorder(Recorder):
return
def write_file() -> None:
from PyQt6.QtMultimedia import QAudioFormat
# swallow the first 300ms to allow audio device to quiesce
bytes_per_frame = self._format.bytesPerFrame()
frames_to_skip = int(self._format.sampleRate() * self.STARTUP_DELAY)
bytes_to_skip = frames_to_skip * bytes_per_frame
if len(self._buffer) <= bytes_to_skip:
wait = int(44100 * self.STARTUP_DELAY)
if len(self._buffer) <= wait:
return
self._buffer = self._buffer[bytes_to_skip:]
self._buffer = self._buffer[wait:]
# Check if we need to convert float samples to int16
if self._format.sampleFormat() == QAudioFormat.SampleFormat.Float:
audio_data = self._convert_float_to_int16(self._buffer)
sample_width = 2 # int16 is 2 bytes
else:
# For integer formats, use the data as-is
audio_data = bytes(self._buffer)
sample_width = self._format.bytesPerSample()
# write out the wave file with the correct format parameters
# write out the wave file
wf = wave.open(self.output_path, "wb")
wf.setnchannels(self._format.channelCount())
wf.setsampwidth(sample_width)
wf.setsampwidth(2)
wf.setframerate(self._format.sampleRate())
wf.writeframes(audio_data)
wf.writeframes(self._buffer)
wf.close()
def and_then(fut: Future) -> None:
@ -784,8 +743,7 @@ class RecordDialog(QDialog):
def _setup_dialog(self) -> None:
self.setWindowTitle("Anki")
icon = QLabel()
qicon = theme_manager.icon_from_resources("icons:media-record.svg")
icon.setPixmap(qicon.pixmap(60, 60))
icon.setPixmap(QPixmap("icons:media-record.png"))
self.label = QLabel("...")
hbox = QHBoxLayout()
hbox.addWidget(icon)

View file

@ -177,13 +177,9 @@ class CustomStyles:
QPushButton:default {{
border: 1px solid {tm.var(colors.BORDER_FOCUS)};
}}
QPushButton {{
margin: 1px;
}}
QPushButton:focus, QPushButton:default:hover {{
QPushButton:focus {{
border: 2px solid {tm.var(colors.BORDER_FOCUS)};
outline: none;
margin: 0px;
}}
QPushButton:hover,
QTabBar::tab:hover,
@ -199,6 +195,9 @@ class CustomStyles:
)
};
}}
QPushButton:default:hover {{
border-width: 2px;
}}
QPushButton:pressed,
QPushButton:checked,
QSpinBox::up-button:pressed,

View file

@ -73,7 +73,7 @@ def handle_sync_error(mw: aqt.main.AnkiQt, err: Exception) -> None:
elif isinstance(err, Interrupted):
# no message to show
return
show_warning(str(err), parent=mw)
show_warning(str(err))
def on_normal_sync_timer(mw: aqt.main.AnkiQt) -> None:
@ -118,7 +118,7 @@ def sync_collection(mw: aqt.main.AnkiQt, on_done: Callable[[], None]) -> None:
if out.new_endpoint:
mw.pm.set_current_sync_url(out.new_endpoint)
if out.server_message:
showText(out.server_message, parent=mw)
showText(out.server_message)
if out.required == out.NO_CHANGES:
tooltip(parent=mw, msg=tr.sync_collection_complete())
# all done; track media progress

View file

@ -115,7 +115,7 @@ class ThemeManager:
# Workaround for Qt bug. First attempt was percent-escaping the chars,
# but Qt can't handle that.
# https://forum.qt.io/topic/55274/solved-qss-with-special-characters/11
path = re.sub(r"(['\u00A1-\u00FF])", r"\\\1", path)
path = re.sub(r"([\u00A1-\u00FF])", r"\\\1", path)
return path
def icon_from_resources(self, path: str | ColoredIcon) -> QIcon:

View file

@ -226,45 +226,29 @@ def ask_user_dialog(
)
def show_info(
text: str,
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
) -> MessageBox:
def show_info(text: str, callback: Callable | None = None, **kwargs: Any) -> MessageBox:
"Show a small info window with an OK button."
if "icon" not in kwargs:
kwargs["icon"] = QMessageBox.Icon.Information
return MessageBox(
text,
callback=(lambda _: callback()) if callback is not None else None,
parent=parent,
**kwargs,
)
def show_warning(
text: str,
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
text: str, callback: Callable | None = None, **kwargs: Any
) -> MessageBox:
"Show a small warning window with an OK button."
return show_info(
text, icon=QMessageBox.Icon.Warning, callback=callback, parent=parent, **kwargs
)
return show_info(text, icon=QMessageBox.Icon.Warning, callback=callback, **kwargs)
def show_critical(
text: str,
callback: Callable | None = None,
parent: QWidget | None = None,
**kwargs: Any,
text: str, callback: Callable | None = None, **kwargs: Any
) -> MessageBox:
"Show a small critical error window with an OK button."
return show_info(
text, icon=QMessageBox.Icon.Critical, callback=callback, parent=parent, **kwargs
)
return show_info(text, icon=QMessageBox.Icon.Critical, callback=callback, **kwargs)
def showWarning(

View file

@ -69,14 +69,17 @@ def add_python_requirements(reqs: list[str]) -> tuple[bool, str]:
def trigger_launcher_run() -> None:
"""Create a trigger file to request launcher UI on next run."""
"""Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run."""
try:
root = launcher_root()
if not root:
return
trigger_path = Path(root) / ".want-launcher"
trigger_path.touch()
pyproject_path = Path(root) / "pyproject.toml"
if pyproject_path.exists():
# Touch the file to update its mtime
pyproject_path.touch()
except Exception as e:
print(e)
@ -90,21 +93,17 @@ def update_and_restart() -> None:
with contextlib.suppress(ResourceWarning):
env = os.environ.copy()
env["ANKI_LAUNCHER_WANT_TERMINAL"] = "1"
creationflags = 0
if sys.platform == "win32":
creationflags = (
subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
)
# On Windows, changing the handles breaks ANSI display
io = None if sys.platform == "win32" else subprocess.DEVNULL
subprocess.Popen(
[launcher],
start_new_session=True,
stdin=io,
stdout=io,
stderr=io,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
env=env,
creationflags=creationflags,
)

View file

@ -13,8 +13,7 @@ HOST_ARCH=$(uname -m)
# Define output paths
OUTPUT_DIR="../../../out/launcher"
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher-$ANKI_VERSION-linux"
LAUNCHER_DIR="$OUTPUT_DIR/anki-linux"
# Clean existing output directory
rm -rf "$LAUNCHER_DIR"
@ -78,8 +77,8 @@ chmod +x \
chmod -R a+r "$LAUNCHER_DIR"
ZSTD="zstd -c --long -T0 -18"
TRANSFORM="s%^.%anki-launcher-$ANKI_VERSION-linux%S"
TARBALL="$OUTPUT_DIR/anki-launcher-$ANKI_VERSION-linux.tar.zst"
TRANSFORM="s%^.%anki-linux%S"
TARBALL="$OUTPUT_DIR/anki-linux.tar.zst"
tar -I "$ZSTD" --transform "$TRANSFORM" -cf "$TARBALL" -C "$LAUNCHER_DIR" .

View file

@ -5,7 +5,7 @@
<key>CFBundleDisplayName</key>
<string>Anki</string>
<key>CFBundleShortVersionString</key>
<string>ANKI_VERSION</string>
<string>1.0</string>
<key>LSMinimumSystemVersion</key>
<string>12</string>
<key>LSApplicationCategoryType</key>

View file

@ -30,33 +30,26 @@ lipo -create \
-output "$APP_LAUNCHER/Contents/MacOS/launcher"
cp "$OUTPUT_DIR/uv" "$APP_LAUNCHER/Contents/MacOS/"
# Build install_name_tool stub
clang -arch arm64 -o "$OUTPUT_DIR/stub_arm64" stub.c
clang -arch x86_64 -o "$OUTPUT_DIR/stub_x86_64" stub.c
lipo -create "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64" -output "$APP_LAUNCHER/Contents/MacOS/install_name_tool"
rm "$OUTPUT_DIR/stub_arm64" "$OUTPUT_DIR/stub_x86_64"
# Copy support files
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
sed "s/ANKI_VERSION/$ANKI_VERSION/g" Info.plist > "$APP_LAUNCHER/Contents/Info.plist"
cp Info.plist "$APP_LAUNCHER/Contents/"
cp icon/Assets.car "$APP_LAUNCHER/Contents/Resources/"
cp ../pyproject.toml "$APP_LAUNCHER/Contents/Resources/"
cp ../../../.python-version "$APP_LAUNCHER/Contents/Resources/"
cp ../versions.py "$APP_LAUNCHER/Contents/Resources/"
# Codesign/bundle
if [ -z "$NODMG" ]; then
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/install_name_tool" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
# Codesign
for i in "$APP_LAUNCHER/Contents/MacOS/uv" "$APP_LAUNCHER/Contents/MacOS/launcher" "$APP_LAUNCHER"; do
codesign --force -vvvv -o runtime -s "Developer ID Application:" \
--entitlements entitlements.python.xml \
"$i"
done
done
# Check
codesign -vvv "$APP_LAUNCHER"
spctl -a "$APP_LAUNCHER"
# Check
codesign -vvv "$APP_LAUNCHER"
spctl -a "$APP_LAUNCHER"
# Notarize and build dmg
# Notarize and bundle (skip if NODMG is set)
if [ -z "$NODMG" ]; then
./notarize.sh "$OUTPUT_DIR"
./dmg/build.sh "$OUTPUT_DIR"
fi

View file

@ -6,8 +6,7 @@ set -e
# base folder with Anki.app in it
output="$1"
dist="$1/tmp"
ANKI_VERSION=$(cat ../../../.version | tr -d '\n')
dmg_path="$output/anki-launcher-$ANKI_VERSION-mac.dmg"
dmg_path="$output/Anki.dmg"
if [ -d "/Volumes/Anki" ]
then

View file

@ -1,6 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
int main(void) {
return 0;
}

View file

@ -22,11 +22,6 @@ const NSIS_PATH: &str = "C:\\Program Files (x86)\\NSIS\\makensis.exe";
fn main() -> Result<()> {
println!("Building Windows launcher...");
// Read version early so it can be used throughout the build process
let version = std::fs::read_to_string("../../../.version")?
.trim()
.to_string();
let output_dir = PathBuf::from(OUTPUT_DIR);
let launcher_exe_dir = PathBuf::from(LAUNCHER_EXE_DIR);
let nsis_dir = PathBuf::from(NSIS_DIR);
@ -36,20 +31,16 @@ fn main() -> Result<()> {
extract_nsis_plugins()?;
copy_files(&output_dir)?;
sign_binaries(&output_dir)?;
copy_nsis_files(&nsis_dir, &version)?;
copy_nsis_files(&nsis_dir)?;
build_uninstaller(&output_dir, &nsis_dir)?;
sign_file(&output_dir.join("uninstall.exe"))?;
generate_install_manifest(&output_dir)?;
build_installer(&output_dir, &nsis_dir)?;
let installer_filename = format!("anki-launcher-{version}-windows.exe");
let installer_path = PathBuf::from("../../../out/launcher_exe").join(&installer_filename);
sign_file(&installer_path)?;
sign_file(&PathBuf::from("../../../out/launcher_exe/anki-install.exe"))?;
println!("Build completed successfully!");
println!("Output directory: {}", output_dir.display());
println!("Installer: ../../../out/launcher_exe/{installer_filename}");
println!("Installer: ../../../out/launcher_exe/anki-install.exe");
Ok(())
}
@ -244,13 +235,11 @@ fn generate_install_manifest(output_dir: &Path) -> Result<()> {
Ok(())
}
fn copy_nsis_files(nsis_dir: &Path, version: &str) -> Result<()> {
fn copy_nsis_files(nsis_dir: &Path) -> Result<()> {
println!("Copying NSIS support files...");
// Copy anki.template.nsi as anki.nsi and substitute version placeholders
let template_content = std::fs::read_to_string("anki.template.nsi")?;
let substituted_content = template_content.replace("ANKI_VERSION", version);
write_file(nsis_dir.join("anki.nsi"), substituted_content)?;
// Copy anki.template.nsi as anki.nsi
copy_file("anki.template.nsi", nsis_dir.join("anki.nsi"))?;
// Copy fileassoc.nsh
copy_file("fileassoc.nsh", nsis_dir.join("fileassoc.nsh"))?;

View file

@ -11,6 +11,7 @@ use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use anki_io::copy_file;
use anki_io::copy_if_newer;
use anki_io::create_dir_all;
use anki_io::modified_time;
use anki_io::read_file;
@ -45,14 +46,8 @@ struct State {
dist_python_version_path: std::path::PathBuf,
uv_lock_path: std::path::PathBuf,
sync_complete_marker: std::path::PathBuf,
launcher_trigger_file: std::path::PathBuf,
mirror_path: std::path::PathBuf,
pyproject_modified_by_user: bool,
previous_version: Option<String>,
resources_dir: std::path::PathBuf,
venv_folder: std::path::PathBuf,
/// system Python + PyQt6 library mode
system_qt: bool,
}
#[derive(Debug, Clone)]
@ -61,12 +56,6 @@ pub enum VersionKind {
Uv(String),
}
#[derive(Debug)]
pub struct Releases {
pub latest: Vec<String>,
pub all: Vec<String>,
}
#[derive(Debug, Clone)]
pub enum MainMenuChoice {
Latest,
@ -74,8 +63,8 @@ pub enum MainMenuChoice {
Version(VersionKind),
ToggleBetas,
ToggleCache,
DownloadMirror,
Uninstall,
Quit,
}
fn main() {
@ -90,13 +79,9 @@ fn main() {
}
fn run() -> Result<()> {
let uv_install_root = if let Ok(custom_root) = std::env::var("ANKI_LAUNCHER_VENV_ROOT") {
std::path::PathBuf::from(custom_root)
} else {
dirs::data_local_dir()
let uv_install_root = dirs::data_local_dir()
.context("Unable to determine data_dir")?
.join("AnkiProgramFiles")
};
.join("AnkiProgramFiles");
let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?;
@ -115,14 +100,8 @@ fn run() -> Result<()> {
dist_python_version_path: resources_dir.join(".python-version"),
uv_lock_path: uv_install_root.join("uv.lock"),
sync_complete_marker: uv_install_root.join(".sync_complete"),
launcher_trigger_file: uv_install_root.join(".want-launcher"),
mirror_path: uv_install_root.join("mirror"),
pyproject_modified_by_user: false, // calculated later
previous_version: None,
system_qt: (cfg!(unix) && !cfg!(target_os = "macos"))
&& resources_dir.join("system_qt").exists(),
resources_dir,
venv_folder: uv_install_root.join(".venv"),
};
// Check for uninstall request from Windows uninstaller
@ -132,19 +111,23 @@ fn run() -> Result<()> {
return Ok(());
}
// Create install directory
// Create install directory and copy project files in
create_dir_all(&state.uv_install_root)?;
copy_if_newer(&state.dist_pyproject_path, &state.user_pyproject_path)?;
copy_if_newer(
&state.dist_python_version_path,
&state.user_python_version_path,
)?;
let launcher_requested =
state.launcher_trigger_file.exists() || !state.user_pyproject_path.exists();
let pyproject_has_changed = !state.sync_complete_marker.exists() || {
let pyproject_toml_time = modified_time(&state.user_pyproject_path)?;
let sync_complete_time = modified_time(&state.sync_complete_marker)?;
Ok::<bool, anyhow::Error>(pyproject_toml_time > sync_complete_time)
}
.unwrap_or(true);
// Calculate whether user has custom edits that need syncing
let pyproject_time = file_timestamp_secs(&state.user_pyproject_path);
let sync_time = file_timestamp_secs(&state.sync_complete_marker);
state.pyproject_modified_by_user = pyproject_time > sync_time;
let pyproject_has_changed = state.pyproject_modified_by_user;
if !launcher_requested && !pyproject_has_changed {
// If no launcher request and venv is already up to date, launch Anki normally
if !pyproject_has_changed {
// If venv is already up to date, launch Anki normally
let args: Vec<String> = std::env::args().skip(1).collect();
let cmd = build_python_command(&state, &args)?;
launch_anki_normally(cmd)?;
@ -154,11 +137,6 @@ fn run() -> Result<()> {
// If we weren't in a terminal, respawn ourselves in one
ensure_terminal_shown()?;
if launcher_requested {
// Remove the trigger file to make request ephemeral
let _ = remove_file(&state.launcher_trigger_file);
}
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
println!("\x1B[1mAnki Launcher\x1B[0m\n");
@ -166,10 +144,15 @@ fn run() -> Result<()> {
check_versions(&mut state);
let first_run = !state.uv_install_root.join(".venv").exists();
if first_run {
handle_version_install_or_update(&state, MainMenuChoice::Latest)?;
} else {
main_menu_loop(&state)?;
}
// Write marker file to indicate we've completed the sync process
write_sync_marker(&state)?;
write_sync_marker(&state.sync_complete_marker)?;
#[cfg(target_os = "macos")]
{
@ -195,15 +178,12 @@ fn run() -> Result<()> {
Ok(())
}
fn extract_aqt_version(state: &State) -> Option<String> {
// Check if .venv exists first
if !state.venv_folder.exists() {
return None;
}
let output = uv_command(state)
.ok()?
.env("VIRTUAL_ENV", &state.venv_folder)
fn extract_aqt_version(
uv_path: &std::path::Path,
uv_install_root: &std::path::Path,
) -> Option<String> {
let output = Command::new(uv_path)
.current_dir(uv_install_root)
.args(["pip", "show", "aqt"])
.output()
.ok()?;
@ -228,7 +208,7 @@ fn check_versions(state: &mut State) {
}
// Determine current version by invoking uv pip show aqt
match extract_aqt_version(state) {
match extract_aqt_version(&state.uv_path, &state.uv_install_root) {
Some(version) => {
state.current_version = Some(version);
}
@ -250,15 +230,21 @@ fn check_versions(state: &mut State) {
}
fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Result<()> {
update_pyproject_for_version(choice.clone(), state)?;
update_pyproject_for_version(
choice.clone(),
state.dist_pyproject_path.clone(),
state.user_pyproject_path.clone(),
state.dist_python_version_path.clone(),
state.user_python_version_path.clone(),
)?;
// Extract current version before syncing (but don't write to file yet)
let previous_version_to_save = extract_aqt_version(state);
let previous_version_to_save = extract_aqt_version(&state.uv_path, &state.uv_install_root);
// Remove sync marker before attempting sync
let _ = remove_file(&state.sync_complete_marker);
println!("Updating Anki...\n");
println!("\x1B[1mUpdating Anki...\x1B[0m\n");
let python_version_trimmed = if state.user_python_version_path.exists() {
let python_version = read_file(&state.user_python_version_path)?;
@ -269,65 +255,38 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
None
};
// Prepare to sync the venv
let mut command = uv_command(state)?;
if cfg!(target_os = "macos") {
// remove CONDA_PREFIX/bin from PATH to avoid conda interference
if let Ok(conda_prefix) = std::env::var("CONDA_PREFIX") {
if let Ok(current_path) = std::env::var("PATH") {
let conda_bin = format!("{conda_prefix}/bin");
let filtered_paths: Vec<&str> = current_path
.split(':')
.filter(|&path| path != conda_bin)
.collect();
let new_path = filtered_paths.join(":");
command.env("PATH", new_path);
}
}
// put our fake install_name_tool at the top of the path to override
// potential conflicts
if let Ok(current_path) = std::env::var("PATH") {
let exe_dir = std::env::current_exe()
.ok()
.and_then(|exe| exe.parent().map(|p| p.to_path_buf()));
if let Some(exe_dir) = exe_dir {
let new_path = format!("{}:{}", exe_dir.display(), current_path);
command.env("PATH", new_path);
}
}
}
// Create venv with system site packages if system Qt is enabled
if state.system_qt {
let mut venv_command = uv_command(state)?;
venv_command.args([
"venv",
"--no-managed-python",
"--system-site-packages",
"--no-config",
]);
venv_command.ensure_success()?;
}
// `uv sync` sometimes does not pull in Python automatically
// This might be system/platform specific and/or a uv bug.
let mut command = Command::new(&state.uv_path);
command
.current_dir(&state.uv_install_root)
.env("UV_CACHE_DIR", &state.uv_cache_dir)
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
.env(
"UV_HTTP_TIMEOUT",
std::env::var("UV_HTTP_TIMEOUT").unwrap_or_else(|_| "180".to_string()),
);
.args(["python", "install", "--managed-python"]);
command.args(["sync", "--upgrade", "--no-config"]);
if !state.system_qt {
command.arg("--managed-python");
// Add python version if .python-version file exists
if let Some(version) = &python_version_trimmed {
command.args([version]);
}
// Add python version if .python-version file exists (but not for system Qt)
command.ensure_success().context("Python install failed")?;
// Sync the venv
let mut command = Command::new(&state.uv_path);
command
.current_dir(&state.uv_install_root)
.env("UV_CACHE_DIR", &state.uv_cache_dir)
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
.args(["sync", "--upgrade", "--managed-python"]);
// Add python version if .python-version file exists
if let Some(version) = &python_version_trimmed {
if !state.system_qt {
command.args(["--python", version]);
}
// Set UV_PRERELEASE=allow if beta mode is enabled
if state.prerelease_marker.exists() {
command.env("UV_PRERELEASE", "allow");
}
if state.no_cache_marker.exists() {
@ -338,7 +297,7 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
Ok(_) => {
// Sync succeeded
if matches!(&choice, MainMenuChoice::Version(VersionKind::PyOxidizer(_))) {
inject_helper_addon()?;
inject_helper_addon(&state.uv_install_root)?;
}
// Now that sync succeeded, save the previous version
@ -367,11 +326,9 @@ fn main_menu_loop(state: &State) -> Result<()> {
let menu_choice = get_main_menu_choice(state)?;
match menu_choice {
MainMenuChoice::Quit => std::process::exit(0),
MainMenuChoice::KeepExisting => {
if state.pyproject_modified_by_user {
// User has custom edits, sync them
handle_version_install_or_update(state, MainMenuChoice::KeepExisting)?;
}
// Skip sync, just launch existing installation
break;
}
MainMenuChoice::ToggleBetas => {
@ -402,11 +359,6 @@ fn main_menu_loop(state: &State) -> Result<()> {
println!();
continue;
}
MainMenuChoice::DownloadMirror => {
show_mirror_submenu(state)?;
println!();
continue;
}
MainMenuChoice::Uninstall => {
if handle_uninstall(state)? {
std::process::exit(0);
@ -414,7 +366,9 @@ fn main_menu_loop(state: &State) -> Result<()> {
continue;
}
choice @ (MainMenuChoice::Latest | MainMenuChoice::Version(_)) => {
handle_version_install_or_update(state, choice.clone())?;
if handle_version_install_or_update(state, choice.clone()).is_err() {
continue;
}
break;
}
}
@ -422,37 +376,23 @@ fn main_menu_loop(state: &State) -> Result<()> {
Ok(())
}
fn write_sync_marker(state: &State) -> Result<()> {
fn write_sync_marker(sync_complete_marker: &std::path::Path) -> Result<()> {
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.context("Failed to get system time")?
.as_secs();
write_file(&state.sync_complete_marker, timestamp.to_string())?;
write_file(sync_complete_marker, timestamp.to_string())?;
Ok(())
}
/// Get mtime of provided file, or 0 if unavailable
fn file_timestamp_secs(path: &std::path::Path) -> i64 {
modified_time(path)
.map(|t| t.duration_since(UNIX_EPOCH).unwrap_or_default().as_secs() as i64)
.unwrap_or_default()
}
fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
loop {
println!("1) Latest Anki (press Enter)");
println!("1) Latest Anki (just press enter)");
println!("2) Choose a version");
if let Some(current_version) = &state.current_version {
let normalized_current = normalize_version(current_version);
if state.pyproject_modified_by_user {
println!("3) Sync project changes");
} else {
println!("3) Keep existing version ({normalized_current})");
}
}
if let Some(prev_version) = &state.previous_version {
if state.current_version.as_ref() != Some(prev_version) {
let normalized_prev = normalize_version(prev_version);
@ -471,13 +411,9 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
"6) Cache downloads: {}",
if cache_enabled { "on" } else { "off" }
);
let mirror_enabled = is_mirror_enabled(state);
println!(
"7) Download mirror: {}",
if mirror_enabled { "on" } else { "off" }
);
println!();
println!("8) Uninstall");
println!("7) Uninstall");
println!("8) Quit");
print!("> ");
let _ = stdout().flush();
@ -516,8 +452,8 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
}
"5" => MainMenuChoice::ToggleBetas,
"6" => MainMenuChoice::ToggleCache,
"7" => MainMenuChoice::DownloadMirror,
"8" => MainMenuChoice::Uninstall,
"7" => MainMenuChoice::Uninstall,
"8" => MainMenuChoice::Quit,
_ => {
println!("Invalid input. Please try again.");
continue;
@ -527,9 +463,15 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
}
fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
let releases = get_releases(state)?;
let releases_str = releases
.latest
println!("Please wait...");
let include_prereleases = state.prerelease_marker.exists();
let all_versions = fetch_versions(state)?;
let all_versions = filter_and_normalize_versions(all_versions, include_prereleases);
let latest_patches = with_only_latest_patch(&all_versions);
let latest_releases: Vec<&String> = latest_patches.iter().take(5).collect();
let releases_str = latest_releases
.iter()
.map(|v| v.as_str())
.collect::<Vec<_>>()
@ -552,7 +494,7 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
let normalized_input = normalize_version(input);
// Check if the version exists in the available versions
let version_exists = releases.all.iter().any(|v| v == &normalized_input);
let version_exists = all_versions.iter().any(|v| v == &normalized_input);
match (parse_version_kind(input), version_exists) {
(Some(version_kind), true) => {
@ -560,7 +502,7 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
Ok(Some(version_kind))
}
(None, true) => {
println!("Versions before 2.1.50 can't be installed.");
println!("Versions before 2.1.50 can't be installedn");
Ok(None)
}
_ => {
@ -683,49 +625,47 @@ fn filter_and_normalize_versions(
fn fetch_versions(state: &State) -> Result<Vec<String>> {
let versions_script = state.resources_dir.join("versions.py");
let mut cmd = uv_command(state)?;
cmd.args(["run", "--no-project", "--no-config", "--managed-python"])
.args(["--with", "pip-system-certs,requests[socks]"]);
let mut cmd = Command::new(&state.uv_path);
cmd.current_dir(&state.uv_install_root)
.args(["run", "--no-project"])
.arg(&versions_script);
let python_version = read_file(&state.dist_python_version_path)?;
let python_version_str =
String::from_utf8(python_version).context("Invalid UTF-8 in .python-version")?;
let version_trimmed = python_version_str.trim();
if !version_trimmed.is_empty() {
cmd.args(["--python", version_trimmed]);
}
cmd.arg(&versions_script);
let output = match cmd.utf8_output() {
Ok(output) => output,
Err(e) => {
print!("Unable to check for Anki versions. Please check your internet connection.\n\n");
return Err(e.into());
}
};
let output = cmd.utf8_output()?;
let versions = serde_json::from_str(&output.stdout).context("Failed to parse versions JSON")?;
Ok(versions)
}
fn get_releases(state: &State) -> Result<Releases> {
println!("Checking for updates...");
let include_prereleases = state.prerelease_marker.exists();
let all_versions = fetch_versions(state)?;
let all_versions = filter_and_normalize_versions(all_versions, include_prereleases);
let latest_patches = with_only_latest_patch(&all_versions);
let latest_releases: Vec<String> = latest_patches.into_iter().take(5).collect();
Ok(Releases {
latest: latest_releases,
all: all_versions,
})
}
fn apply_version_kind(version_kind: &VersionKind, state: &State) -> Result<()> {
let content = read_file(&state.dist_pyproject_path)?;
let content_str = String::from_utf8(content).context("Invalid UTF-8 in pyproject.toml")?;
let updated_content = match version_kind {
fn update_pyproject_for_version(
menu_choice: MainMenuChoice,
dist_pyproject_path: std::path::PathBuf,
user_pyproject_path: std::path::PathBuf,
dist_python_version_path: std::path::PathBuf,
user_python_version_path: std::path::PathBuf,
) -> Result<()> {
match menu_choice {
MainMenuChoice::Latest => {
let content = read_file(&dist_pyproject_path)?;
write_file(&user_pyproject_path, &content)?;
let python_version_content = read_file(&dist_python_version_path)?;
write_file(&user_python_version_path, &python_version_content)?;
}
MainMenuChoice::KeepExisting => {
// Do nothing - keep existing pyproject.toml and .python-version
}
MainMenuChoice::ToggleBetas => {
unreachable!();
}
MainMenuChoice::ToggleCache => {
unreachable!();
}
MainMenuChoice::Uninstall => {
unreachable!();
}
MainMenuChoice::Version(version_kind) => {
let content = read_file(&dist_pyproject_path)?;
let content_str =
String::from_utf8(content).context("Invalid UTF-8 in pyproject.toml")?;
let updated_content = match &version_kind {
VersionKind::PyOxidizer(version) => {
// Replace package name and add PyQt6 dependencies
content_str.replace(
@ -744,72 +684,24 @@ fn apply_version_kind(version_kind: &VersionKind, state: &State) -> Result<()> {
),
)
}
VersionKind::Uv(version) => content_str.replace(
"anki-release",
&format!("anki-release=={version}\",\n \"anki=={version}\",\n \"aqt=={version}"),
),
VersionKind::Uv(version) => {
content_str.replace("anki-release", &format!("anki-release=={version}"))
}
};
let final_content = if state.system_qt {
format!(
concat!(
"{}\n\n[tool.uv]\n",
"override-dependencies = [\n",
" \"pyqt6; sys_platform=='never'\",\n",
" \"pyqt6-qt6; sys_platform=='never'\",\n",
" \"pyqt6-webengine; sys_platform=='never'\",\n",
" \"pyqt6-webengine-qt6; sys_platform=='never'\",\n",
" \"pyqt6_sip; sys_platform=='never'\"\n",
"]\n"
),
updated_content
)
} else {
updated_content
};
write_file(&state.user_pyproject_path, &final_content)?;
write_file(&user_pyproject_path, &updated_content)?;
// Update .python-version based on version kind
match version_kind {
match &version_kind {
VersionKind::PyOxidizer(_) => {
write_file(&state.user_python_version_path, "3.9")?;
write_file(&user_python_version_path, "3.9")?;
}
VersionKind::Uv(_) => {
copy_file(
&state.dist_python_version_path,
&state.user_python_version_path,
)?;
copy_file(&dist_python_version_path, &user_python_version_path)?;
}
}
Ok(())
}
fn update_pyproject_for_version(menu_choice: MainMenuChoice, state: &State) -> Result<()> {
match menu_choice {
MainMenuChoice::Latest => {
// Get the latest release version and create a VersionKind for it
let releases = get_releases(state)?;
let latest_version = releases.latest.first().context("No latest version found")?;
apply_version_kind(&VersionKind::Uv(latest_version.clone()), state)?;
}
MainMenuChoice::KeepExisting => {
// Do nothing - keep existing pyproject.toml and .python-version
}
MainMenuChoice::ToggleBetas => {
unreachable!();
}
MainMenuChoice::ToggleCache => {
unreachable!();
}
MainMenuChoice::DownloadMirror => {
unreachable!();
}
MainMenuChoice::Uninstall => {
unreachable!();
}
MainMenuChoice::Version(version_kind) => {
apply_version_kind(&version_kind, state)?;
MainMenuChoice::Quit => {
std::process::exit(0);
}
}
Ok(())
@ -858,7 +750,7 @@ fn parse_version_kind(version: &str) -> Option<VersionKind> {
}
}
fn inject_helper_addon() -> Result<()> {
fn inject_helper_addon(_uv_install_root: &std::path::Path) -> Result<()> {
let addons21_path = get_anki_addons21_path()?;
if !addons21_path.exists() {
@ -960,40 +852,16 @@ fn handle_uninstall(state: &State) -> Result<bool> {
Ok(true)
}
fn uv_command(state: &State) -> Result<Command> {
let mut command = Command::new(&state.uv_path);
command.current_dir(&state.uv_install_root);
// remove UV_* environment variables to avoid interference
for (key, _) in std::env::vars() {
if key.starts_with("UV_") {
command.env_remove(key);
}
}
command
.env_remove("VIRTUAL_ENV")
.env_remove("SSLKEYLOGFILE");
// Add mirror environment variable if enabled
if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
command
.env("UV_PYTHON_INSTALL_MIRROR", &python_mirror)
.env("UV_DEFAULT_INDEX", &pypi_mirror);
}
Ok(command)
}
fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
let python_exe = if cfg!(target_os = "windows") {
let show_console = std::env::var("ANKI_CONSOLE").is_ok();
if show_console {
state.venv_folder.join("Scripts/python.exe")
state.uv_install_root.join(".venv/Scripts/python.exe")
} else {
state.venv_folder.join("Scripts/pythonw.exe")
state.uv_install_root.join(".venv/Scripts/pythonw.exe")
}
} else {
state.venv_folder.join("bin/python")
state.uv_install_root.join(".venv/bin/python")
};
let mut cmd = Command::new(&python_exe);
@ -1006,75 +874,15 @@ fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
// Set UV and Python paths for the Python code
cmd.env("ANKI_LAUNCHER_UV", state.uv_path.utf8()?.as_str());
cmd.env("UV_PROJECT", state.uv_install_root.utf8()?.as_str());
cmd.env_remove("SSLKEYLOGFILE");
// Set UV_PRERELEASE=allow if beta mode is enabled
if state.prerelease_marker.exists() {
cmd.env("UV_PRERELEASE", "allow");
}
Ok(cmd)
}
fn is_mirror_enabled(state: &State) -> bool {
state.mirror_path.exists()
}
fn get_mirror_urls(state: &State) -> Result<Option<(String, String)>> {
if !state.mirror_path.exists() {
return Ok(None);
}
let content = read_file(&state.mirror_path)?;
let content_str = String::from_utf8(content).context("Invalid UTF-8 in mirror file")?;
let lines: Vec<&str> = content_str.lines().collect();
if lines.len() >= 2 {
Ok(Some((
lines[0].trim().to_string(),
lines[1].trim().to_string(),
)))
} else {
Ok(None)
}
}
fn show_mirror_submenu(state: &State) -> Result<()> {
loop {
println!("Download mirror options:");
println!("1) No mirror");
println!("2) China");
print!("> ");
let _ = stdout().flush();
let mut input = String::new();
let _ = stdin().read_line(&mut input);
let input = input.trim();
match input {
"1" => {
// Remove mirror file
if state.mirror_path.exists() {
let _ = remove_file(&state.mirror_path);
}
println!("Mirror disabled.");
break;
}
"2" => {
// Write China mirror URLs
let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/";
write_file(&state.mirror_path, china_mirrors)?;
println!("China mirror enabled.");
break;
}
"" => {
// Empty input - return to main menu
break;
}
_ => {
println!("Invalid input. Please try again.");
continue;
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -62,9 +62,8 @@ pub fn prepare_for_launch_after_update(mut cmd: Command, root: &Path) -> Result<
pub fn relaunch_in_terminal() -> Result<()> {
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
Command::new("open")
.args(["-na", "Terminal"])
.args(["-a", "Terminal"])
.arg(current_exe)
.env_remove("ANKI_LAUNCHER_WANT_TERMINAL")
.ensure_spawn()?;
std::process::exit(0);
}

View file

@ -116,9 +116,8 @@ pub use windows::ensure_terminal_shown;
pub fn ensure_terminal_shown() -> Result<()> {
use std::io::IsTerminal;
let want_terminal = std::env::var("ANKI_LAUNCHER_WANT_TERMINAL").is_ok();
let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout());
if want_terminal || !stdout_is_terminal {
if !stdout_is_terminal {
#[cfg(target_os = "macos")]
mac::relaunch_in_terminal()?;
#[cfg(not(target_os = "macos"))]

View file

@ -11,22 +11,21 @@ pub fn relaunch_in_terminal() -> Result<()> {
// Try terminals in roughly most specific to least specific.
// First, try commonly used terminals for riced systems.
// Second, try common defaults.
// Finally, try x11 compatibility terminals.
// Second, try the minimalist/compatibility terminals.
// Finally, try terminals usually installed by default.
let terminals = [
// commonly used for riced systems
("alacritty", vec!["-e"]),
("kitty", vec![]),
// minimalistic terminals for constrained systems
("foot", vec![]),
// the user's default terminal in Debian/Ubuntu
("urxvt", vec!["-e"]),
("xterm", vec!["-e"]),
("x-terminal-emulator", vec!["-e"]),
// default installs for the most common distros
("xfce4-terminal", vec!["-e"]),
("gnome-terminal", vec!["-e"]),
("gnome-terminal", vec!["--"]),
("konsole", vec!["-e"]),
// x11-compatibility terminals
("urxvt", vec!["-e"]),
("xterm", vec!["-e"]),
];
for (terminal_cmd, args) in &terminals {

View file

@ -8,7 +8,6 @@ use anyhow::Context;
use anyhow::Result;
use widestring::u16cstr;
use windows::core::PCWSTR;
use windows::Wdk::System::SystemServices::RtlGetVersion;
use windows::Win32::System::Console::AttachConsole;
use windows::Win32::System::Console::GetConsoleWindow;
use windows::Win32::System::Console::ATTACH_PARENT_PROCESS;
@ -19,25 +18,8 @@ use windows::Win32::System::Registry::HKEY;
use windows::Win32::System::Registry::HKEY_CURRENT_USER;
use windows::Win32::System::Registry::KEY_READ;
use windows::Win32::System::Registry::REG_SZ;
use windows::Win32::System::SystemInformation::OSVERSIONINFOW;
use windows::Win32::UI::Shell::SetCurrentProcessExplicitAppUserModelID;
/// Returns true if running on Windows 10 (not Windows 11)
fn is_windows_10() -> bool {
unsafe {
let mut info = OSVERSIONINFOW {
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
..Default::default()
};
if RtlGetVersion(&mut info).is_ok() {
// Windows 10 has build numbers < 22000, Windows 11 >= 22000
info.dwBuildNumber < 22000 && info.dwMajorVersion == 10
} else {
false
}
}
}
pub fn ensure_terminal_shown() -> Result<()> {
unsafe {
if !GetConsoleWindow().is_invalid() {
@ -47,14 +29,6 @@ pub fn ensure_terminal_shown() -> Result<()> {
}
if std::env::var("ANKI_IMPLICIT_CONSOLE").is_ok() && attach_to_parent_console() {
// This black magic triggers Windows to switch to the new
// ANSI-supporting console host, which is usually only available
// when the app is built with the console subsystem.
// Only needed on Windows 10, not Windows 11.
if is_windows_10() {
let _ = Command::new("cmd").args(["/C", ""]).status();
}
// Successfully attached to parent console
reconnect_stdio_to_console();
return Ok(());

View file

@ -3,11 +3,7 @@
import json
import sys
import pip_system_certs.wrapt_requests
import requests
pip_system_certs.wrapt_requests.inject_truststore()
import urllib.request
def main():
@ -15,9 +11,8 @@ def main():
url = "https://pypi.org/pypi/aqt/json"
try:
response = requests.get(url, timeout=30)
response.raise_for_status()
data = response.json()
with urllib.request.urlopen(url, timeout=30) as response:
data = json.loads(response.read().decode("utf-8"))
releases = data.get("releases", {})
# Create list of (version, upload_time) tuples

View file

@ -24,7 +24,7 @@ Name "Anki"
Unicode true
; The file to write (relative to nsis directory)
OutFile "..\launcher_exe\anki-launcher-ANKI_VERSION-windows.exe"
OutFile "..\launcher_exe\anki-install.exe"
; Non elevated
RequestExecutionLevel user
@ -214,7 +214,7 @@ Section ""
; Write the uninstall keys for Windows
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayName" "Anki Launcher"
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "ANKI_VERSION"
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "DisplayVersion" "1.0.0"
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "UninstallString" '"$INSTDIR\uninstall.exe"'
WriteRegStr HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "QuietUninstallString" '"$INSTDIR\uninstall.exe" /S'
WriteRegDWORD HKCU "Software\Microsoft\Windows\CurrentVersion\Uninstall\Anki" "NoModify" 1

View file

@ -33,12 +33,6 @@ class _MacOSHelper:
"On completion, file should be saved if no error has arrived."
self._dll.end_wav_record()
def disable_appnap(self) -> None:
self._dll.disable_appnap()
def enable_appnap(self) -> None:
self._dll.enable_appnap()
# this must not be overwritten or deallocated
@CFUNCTYPE(None, c_char_p) # type: ignore

View file

@ -1,25 +0,0 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import Foundation
private var currentActivity: NSObjectProtocol?
@_cdecl("disable_appnap")
public func disableAppNap() {
// No-op if already assigned
guard currentActivity == nil else { return }
currentActivity = ProcessInfo.processInfo.beginActivity(
options: .userInitiatedAllowingIdleSystemSleep,
reason: "AppNap is disabled"
)
}
@_cdecl("enable_appnap")
public func enableAppNap() {
guard let activity = currentActivity else { return }
ProcessInfo.processInfo.endActivity(activity)
currentActivity = nil
}

View file

@ -15,7 +15,6 @@ echo "Building macOS helper dylib..."
# Create the wheel using uv
echo "Creating wheel..."
cd "$SCRIPT_DIR"
rm -rf dist
"$PROJ_ROOT/out/extracted/uv/uv" build --wheel
echo "Build complete!"

View file

@ -1,6 +1,8 @@
# Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import os
import platform
import subprocess
import sys
from pathlib import Path

View file

@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "anki-mac-helper"
version = "0.1.1"
version = "0.1.0"
description = "Small support library for Anki on Macs"
requires-python = ">=3.9"
license = { text = "AGPL-3.0-or-later" }

View file

@ -1,14 +0,0 @@
#!/bin/bash
#
# Build and install into the launcher venv
set -e
./build.sh
if [[ "$OSTYPE" == "darwin"* ]]; then
export VIRTUAL_ENV=$HOME/Library/Application\ Support/AnkiProgramFiles/.venv
else
export VIRTUAL_ENV=$HOME/.local/share/AnkiProgramFiles/.venv
fi
../../out/extracted/uv/uv pip install dist/*.whl

View file

@ -12,7 +12,7 @@ dependencies = [
"send2trash",
"waitress>=2.0.0",
"pywin32; sys.platform == 'win32'",
"anki-mac-helper>=0.1.1; sys.platform == 'darwin'",
"anki-mac-helper; sys.platform == 'darwin'",
"pip-system-certs!=5.1",
"pyqt6>=6.2",
"pyqt6-webengine>=6.2",
@ -40,8 +40,8 @@ qt67 = [
qt = [
"pyqt6==6.9.1",
"pyqt6-qt6==6.9.1",
"pyqt6-webengine==6.8.0",
"pyqt6-webengine-qt6==6.8.2",
"pyqt6-webengine==6.9.0",
"pyqt6-webengine-qt6==6.9.1",
"pyqt6_sip==13.10.2",
]
qt68 = [

View file

@ -81,7 +81,6 @@ pin-project.workspace = true
prost.workspace = true
pulldown-cmark.workspace = true
rand.workspace = true
rayon.workspace = true
regex.workspace = true
reqwest.workspace = true
rusqlite.workspace = true

View file

@ -22,7 +22,6 @@ inflections.workspace = true
anki_io.workspace = true
anyhow.workspace = true
itertools.workspace = true
regex.workspace = true
[dependencies]
fluent.workspace = true

View file

@ -4,5 +4,6 @@
// Include auto-generated content
#![allow(clippy::all)]
#![allow(text_direction_codepoint_in_literal)]
include!(concat!(env!("OUT_DIR"), "/strings.rs"));

View file

@ -195,30 +195,12 @@ pub(crate) const {lang_name}: phf::Map<&str, &str> = phf::phf_map! {{",
.unwrap();
for (module, contents) in modules {
let escaped_contents = escape_unicode_control_chars(contents);
writeln!(
buf,
r###" "{module}" => r##"{escaped_contents}"##,"###
)
.unwrap();
writeln!(buf, r###" "{module}" => r##"{contents}"##,"###).unwrap();
}
buf.push_str("};\n");
}
fn escape_unicode_control_chars(input: &str) -> String {
use regex::Regex;
static RE: std::sync::OnceLock<Regex> = std::sync::OnceLock::new();
let re = RE.get_or_init(|| Regex::new(r"[\u{202a}-\u{202e}\u{2066}-\u{2069}]").unwrap());
re.replace_all(input, |caps: &regex::Captures| {
let c = caps.get(0).unwrap().as_str().chars().next().unwrap();
format!("\\u{{{:04x}}}", c as u32)
})
.into_owned()
}
fn lang_constant_name(lang: &str) -> String {
lang.to_ascii_uppercase().replace('-', "_")
}

View file

@ -42,14 +42,14 @@ enum CheckableUrl {
}
impl CheckableUrl {
fn url(&self) -> Cow<'_, str> {
fn url(&self) -> Cow<str> {
match *self {
Self::HelpPage(page) => help_page_to_link(page).into(),
Self::String(s) => s.into(),
}
}
fn anchor(&self) -> Cow<'_, str> {
fn anchor(&self) -> Cow<str> {
match *self {
Self::HelpPage(page) => help_page_link_suffix(page).into(),
Self::String(s) => s.split('#').next_back().unwrap_or_default().into(),

View file

@ -11,24 +11,6 @@ use snafu::ensure;
use snafu::ResultExt;
use snafu::Snafu;
#[derive(Debug)]
pub struct CodeDisplay(Option<i32>);
impl std::fmt::Display for CodeDisplay {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.0 {
Some(code) => write!(f, "{code}"),
None => write!(f, "?"),
}
}
}
impl From<Option<i32>> for CodeDisplay {
fn from(code: Option<i32>) -> Self {
CodeDisplay(code)
}
}
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Failed to execute: {cmdline}"))]
@ -36,15 +18,8 @@ pub enum Error {
cmdline: String,
source: std::io::Error,
},
#[snafu(display("Failed to run ({code}): {cmdline}"))]
ReturnedError { cmdline: String, code: CodeDisplay },
#[snafu(display("Failed to run ({code}): {cmdline}: {stdout}{stderr}"))]
ReturnedWithOutputError {
cmdline: String,
code: CodeDisplay,
stdout: String,
stderr: String,
},
#[snafu(display("Failed with code {code:?}: {cmdline}"))]
ReturnedError { cmdline: String, code: Option<i32> },
#[snafu(display("Couldn't decode stdout/stderr as utf8"))]
InvalidUtf8 {
cmdline: String,
@ -96,36 +71,31 @@ impl CommandExt for Command {
status.success(),
ReturnedSnafu {
cmdline: get_cmdline(self),
code: CodeDisplay::from(status.code()),
code: status.code(),
}
);
Ok(self)
}
fn utf8_output(&mut self) -> Result<Utf8Output> {
let cmdline = get_cmdline(self);
let output = self.output().with_context(|_| DidNotExecuteSnafu {
cmdline: cmdline.clone(),
cmdline: get_cmdline(self),
})?;
let stdout = String::from_utf8(output.stdout).with_context(|_| InvalidUtf8Snafu {
cmdline: cmdline.clone(),
})?;
let stderr = String::from_utf8(output.stderr).with_context(|_| InvalidUtf8Snafu {
cmdline: cmdline.clone(),
})?;
ensure!(
output.status.success(),
ReturnedWithOutputSnafu {
cmdline,
code: CodeDisplay::from(output.status.code()),
stdout: stdout.clone(),
stderr: stderr.clone(),
ReturnedSnafu {
cmdline: get_cmdline(self),
code: output.status.code(),
}
);
Ok(Utf8Output { stdout, stderr })
Ok(Utf8Output {
stdout: String::from_utf8(output.stdout).with_context(|_| InvalidUtf8Snafu {
cmdline: get_cmdline(self),
})?,
stderr: String::from_utf8(output.stderr).with_context(|_| InvalidUtf8Snafu {
cmdline: get_cmdline(self),
})?,
})
}
fn ensure_spawn(&mut self) -> Result<std::process::Child> {
@ -165,10 +135,7 @@ mod test {
#[cfg(not(windows))]
assert!(matches!(
Command::new("false").ensure_success(),
Err(Error::ReturnedError {
code: CodeDisplay(_),
..
})
Err(Error::ReturnedError { code: Some(1), .. })
));
}
}

View file

@ -94,7 +94,7 @@ impl BackendCollectionService for Backend {
}
impl Backend {
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
pub(super) fn lock_open_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
let guard = self.col.lock().unwrap();
guard
.is_some()
@ -102,7 +102,7 @@ impl Backend {
.ok_or(AnkiError::CollectionNotOpen)
}
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<'_, Option<Collection>>> {
pub(super) fn lock_closed_collection(&self) -> Result<MutexGuard<Option<Collection>>> {
let guard = self.col.lock().unwrap();
guard
.is_none()

View file

@ -105,8 +105,7 @@ impl Card {
/// Returns true if the card has a due date in terms of days.
fn is_due_in_days(&self) -> bool {
self.ctype != CardType::New && self.original_or_current_due() <= 365_000 // keep consistent with SQL
|| matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
matches!(self.queue, CardQueue::DayLearn | CardQueue::Review)
|| (self.ctype == CardType::Review && self.is_undue_queue())
}
@ -126,20 +125,22 @@ impl Card {
}
}
/// If last_review_date isn't stored in the card, this uses card.due and
/// card.ivl to infer the elapsed time, which won't be accurate if
/// 'set due date' or an add-on has changed the due date.
pub(crate) fn seconds_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
/// This uses card.due and card.ivl to infer the elapsed time. If 'set due
/// date' or an add-on has changed the due date, this won't be accurate.
pub(crate) fn days_since_last_review(&self, timing: &SchedTimingToday) -> Option<u32> {
if let Some(last_review_time) = self.last_review_time {
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
} else if self.is_due_in_days() {
Some(timing.next_day_at.elapsed_days_since(last_review_time) as u32)
} else if !self.is_due_in_days() {
Some(
(timing.next_day_at.0 as u32).saturating_sub(self.original_or_current_due() as u32)
/ 86_400,
)
} else {
self.due_time(timing).map(|due| {
(due.adding_secs(-86_400 * self.interval as i64)
.elapsed_secs()) as u32
.elapsed_secs()
/ 86_400) as u32
})
} else {
let last_review_time = TimestampSecs(self.original_or_current_due() as i64);
Some(timing.now.elapsed_secs_since(last_review_time) as u32)
}
}
}
@ -542,12 +543,12 @@ impl RowContext {
self.cards[0]
.memory_state
.as_ref()
.zip(self.cards[0].seconds_since_last_review(&self.timing))
.zip(self.cards[0].days_since_last_review(&self.timing))
.zip(Some(self.cards[0].decay.unwrap_or(FSRS5_DEFAULT_DECAY)))
.map(|((state, seconds), decay)| {
let r = FSRS::new(None).unwrap().current_retrievability_seconds(
.map(|((state, days_elapsed), decay)| {
let r = FSRS::new(None).unwrap().current_retrievability(
(*state).into(),
seconds,
days_elapsed,
decay,
);
format!("{:.0}%", r * 100.)

View file

@ -34,7 +34,7 @@ pub fn prettify_av_tags<S: Into<String> + AsRef<str>>(txt: S) -> String {
/// Parse `txt` into [CardNodes] and return the result,
/// or [None] if it only contains text nodes.
fn nodes_or_text_only(txt: &str) -> Option<CardNodes<'_>> {
fn nodes_or_text_only(txt: &str) -> Option<CardNodes> {
let nodes = CardNodes::parse(txt);
(!nodes.text_only).then_some(nodes)
}

View file

@ -103,13 +103,13 @@ fn is_not0<'parser, 'arr: 'parser, 's: 'parser>(
move |s| alt((is_not(arr), success(""))).parse(s)
}
fn node(s: &str) -> IResult<'_, Node<'_>> {
fn node(s: &str) -> IResult<Node> {
alt((sound_node, tag_node, text_node)).parse(s)
}
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
/// video file.
fn sound_node(s: &str) -> IResult<'_, Node<'_>> {
fn sound_node(s: &str) -> IResult<Node> {
map(
delimited(tag("[sound:"), is_not("]"), tag("]")),
Node::SoundOrVideo,
@ -117,7 +117,7 @@ fn sound_node(s: &str) -> IResult<'_, Node<'_>> {
.parse(s)
}
fn take_till_potential_tag_start(s: &str) -> IResult<'_, &str> {
fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
Ok(match after.find('[') {
@ -127,9 +127,9 @@ fn take_till_potential_tag_start(s: &str) -> IResult<'_, &str> {
}
/// An Anki tag `[anki:tag...]...[/anki:tag]`.
fn tag_node(s: &str) -> IResult<'_, Node<'_>> {
fn tag_node(s: &str) -> IResult<Node> {
/// Match the start of an opening tag and return its name.
fn name(s: &str) -> IResult<'_, &str> {
fn name(s: &str) -> IResult<&str> {
preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s)
}
@ -139,12 +139,12 @@ fn tag_node(s: &str) -> IResult<'_, Node<'_>> {
) -> impl FnMut(&'s str) -> IResult<'s, Vec<(&'s str, &'s str)>> + 'name {
/// List of whitespace-separated `key=val` tuples, where `val` may be
/// empty.
fn options(s: &str) -> IResult<'_, Vec<(&str, &str)>> {
fn key(s: &str) -> IResult<'_, &str> {
fn options(s: &str) -> IResult<Vec<(&str, &str)>> {
fn key(s: &str) -> IResult<&str> {
is_not("] \t\r\n=").parse(s)
}
fn val(s: &str) -> IResult<'_, &str> {
fn val(s: &str) -> IResult<&str> {
alt((
delimited(tag("\""), is_not0("\""), tag("\"")),
is_not0("] \t\r\n\""),
@ -197,7 +197,7 @@ fn tag_node(s: &str) -> IResult<'_, Node<'_>> {
.parse(s)
}
fn text_node(s: &str) -> IResult<'_, Node<'_>> {
fn text_node(s: &str) -> IResult<Node> {
map(take_till_potential_tag_start, Node::Text).parse(s)
}

View file

@ -54,8 +54,8 @@ enum Token<'a> {
}
/// Tokenize string
fn tokenize(mut text: &str) -> impl Iterator<Item = Token<'_>> {
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
fn open_cloze(text: &str) -> IResult<&str, Token> {
// opening brackets and 'c'
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
// following number
@ -75,12 +75,12 @@ fn tokenize(mut text: &str) -> impl Iterator<Item = Token<'_>> {
Ok((text, Token::OpenCloze(digits)))
}
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
fn close_cloze(text: &str) -> IResult<&str, Token> {
map(tag("}}"), |_| Token::CloseCloze).parse(text)
}
/// Match a run of text until an open/close marker is encountered.
fn normal_text(text: &str) -> IResult<&str, Token<'_>> {
fn normal_text(text: &str) -> IResult<&str, Token> {
if text.is_empty() {
return Err(nom::Err::Error(nom::error::make_error(
text,
@ -132,7 +132,7 @@ impl ExtractedCloze<'_> {
self.hint.unwrap_or("...")
}
fn clozed_text(&self) -> Cow<'_, str> {
fn clozed_text(&self) -> Cow<str> {
// happy efficient path?
if self.nodes.len() == 1 {
if let TextOrCloze::Text(text) = self.nodes.last().unwrap() {
@ -353,7 +353,7 @@ pub fn parse_image_occlusions(text: &str) -> Vec<ImageOcclusion> {
.collect()
}
pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<str> {
let mut buf = String::new();
let mut active_cloze_found_in_text = false;
for node in &parse_text_with_clozes(text) {
@ -376,7 +376,7 @@ pub fn reveal_cloze_text(text: &str, cloze_ord: u16, question: bool) -> Cow<'_,
}
}
pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<'_, str> {
pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow<str> {
let mut output = Vec::new();
for node in &parse_text_with_clozes(text) {
reveal_cloze_text_in_nodes(node, cloze_ord, question, &mut output);
@ -384,7 +384,7 @@ pub fn reveal_cloze_text_only(text: &str, cloze_ord: u16, question: bool) -> Cow
output.join(", ").into()
}
pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<'_, str> {
pub fn extract_cloze_for_typing(text: &str, cloze_ord: u16) -> Cow<str> {
let mut output = Vec::new();
for node in &parse_text_with_clozes(text) {
reveal_cloze_text_in_nodes(node, cloze_ord, false, &mut output);
@ -460,7 +460,7 @@ pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> {
CLOZE.replace_all(text, "$1")
}
fn strip_html_inside_mathjax(text: &str) -> Cow<'_, str> {
fn strip_html_inside_mathjax(text: &str) -> Cow<str> {
MATHJAX.replace_all(text, |caps: &Captures| -> String {
format!(
"{}{}{}",

View file

@ -24,7 +24,6 @@ use crate::notetype::NotetypeId;
use crate::notetype::NotetypeKind;
use crate::prelude::*;
use crate::progress::ThrottlingProgressHandler;
use crate::storage::card::CardFixStats;
use crate::timestamp::TimestampMillis;
use crate::timestamp::TimestampSecs;
@ -41,7 +40,6 @@ pub struct CheckDatabaseOutput {
notetypes_recovered: usize,
invalid_utf8: usize,
invalid_ids: usize,
card_last_review_time_empty: usize,
}
#[derive(Debug, Clone, Copy, Default)]
@ -71,11 +69,6 @@ impl CheckDatabaseOutput {
if self.card_properties_invalid > 0 {
probs.push(tr.database_check_card_properties(self.card_properties_invalid));
}
if self.card_last_review_time_empty > 0 {
probs.push(
tr.database_check_card_last_review_time_empty(self.card_last_review_time_empty),
);
}
if self.cards_missing_note > 0 {
probs.push(tr.database_check_card_missing_note(self.cards_missing_note));
}
@ -165,25 +158,14 @@ impl Collection {
fn check_card_properties(&mut self, out: &mut CheckDatabaseOutput) -> Result<()> {
let timing = self.timing_today()?;
let CardFixStats {
new_cards_fixed,
other_cards_fixed,
last_review_time_fixed,
} = self.storage.fix_card_properties(
let (new_cnt, other_cnt) = self.storage.fix_card_properties(
timing.days_elapsed,
TimestampSecs::now(),
self.usn()?,
self.scheduler_version() == SchedulerVersion::V1,
)?;
out.card_position_too_high = new_cards_fixed;
out.card_properties_invalid += other_cards_fixed;
out.card_last_review_time_empty = last_review_time_fixed;
// Trigger one-way sync if last_review_time was updated to avoid conflicts
if last_review_time_fixed > 0 {
self.set_schema_modified()?;
}
out.card_position_too_high = new_cnt;
out.card_properties_invalid += other_cnt;
Ok(())
}

View file

@ -1,10 +1,6 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashMap;
use anki_proto::generic;
use rayon::iter::IntoParallelIterator;
use rayon::iter::ParallelIterator;
use crate::collection::Collection;
use crate::deckconfig::DeckConfSchema11;
@ -13,7 +9,6 @@ use crate::deckconfig::DeckConfigId;
use crate::deckconfig::UpdateDeckConfigsRequest;
use crate::error::Result;
use crate::scheduler::fsrs::params::ignore_revlogs_before_date_to_ms;
use crate::scheduler::fsrs::simulator::is_included_card;
impl crate::services::DeckConfigService for Collection {
fn add_or_update_deck_config_legacy(
@ -106,43 +101,68 @@ impl crate::services::DeckConfigService for Collection {
&mut self,
input: anki_proto::deck_config::GetRetentionWorkloadRequest,
) -> Result<anki_proto::deck_config::GetRetentionWorkloadResponse> {
let days_elapsed = self.timing_today().unwrap().days_elapsed as i32;
const LEARN_SPAN: usize = 100_000_000;
const TERMINATION_PROB: f32 = 0.001;
// the default values are from https://github.com/open-spaced-repetition/Anki-button-usage/blob/881009015c2a85ac911021d76d0aacb124849937/analysis.ipynb
const DEFAULT_LEARN_COST: f32 = 19.4698;
const DEFAULT_PASS_COST: f32 = 7.8454;
const DEFAULT_FAIL_COST: f32 = 23.185;
const DEFAULT_INITIAL_PASS_RATE: f32 = 0.7645;
let guard =
self.search_cards_into_table(&input.search, crate::search::SortMode::NoOrder)?;
let costs = guard.col.storage.get_costs_for_retention()?;
let revlogs = guard
.col
.storage
.get_revlog_entries_for_searched_cards_in_card_order()?;
fn smoothing(obs: f32, default: f32, count: u32) -> f32 {
let alpha = count as f32 / (50.0 + count as f32);
obs * alpha + default * (1.0 - alpha)
}
let mut config = guard.col.get_optimal_retention_parameters(revlogs)?;
let cards = guard
.col
.storage
.all_searched_cards()?
.into_iter()
.filter(is_included_card)
.filter_map(|c| crate::card::Card::convert(c.clone(), days_elapsed, c.memory_state?))
.collect::<Vec<fsrs::Card>>();
let cost_success = smoothing(
costs.average_pass_time_ms / 1000.0,
DEFAULT_PASS_COST,
costs.pass_count,
);
let cost_failure = smoothing(
costs.average_fail_time_ms / 1000.0,
DEFAULT_FAIL_COST,
costs.fail_count,
);
let cost_learn = smoothing(
costs.average_learn_time_ms / 1000.0,
DEFAULT_LEARN_COST,
costs.learn_count,
);
let initial_pass_rate = smoothing(
costs.initial_pass_rate,
DEFAULT_INITIAL_PASS_RATE,
costs.pass_count,
);
config.deck_size = guard.cards;
let costs = (70u32..=99u32)
.into_par_iter()
.map(|dr| {
Ok((
dr,
fsrs::expected_workload_with_existing_cards(
let before = fsrs::expected_workload(
&input.w,
dr as f32 / 100.,
&config,
&cards,
)?,
))
})
.collect::<Result<HashMap<_, _>>>()?;
input.before,
LEARN_SPAN,
cost_success,
cost_failure,
cost_learn,
initial_pass_rate,
TERMINATION_PROB,
)?;
let after = fsrs::expected_workload(
&input.w,
input.after,
LEARN_SPAN,
cost_success,
cost_failure,
cost_learn,
initial_pass_rate,
TERMINATION_PROB,
)?;
Ok(anki_proto::deck_config::GetRetentionWorkloadResponse { costs })
Ok(anki_proto::deck_config::GetRetentionWorkloadResponse {
factor: after / before,
})
}
}

View file

@ -212,10 +212,10 @@ impl Collection {
if fsrs_toggled {
self.set_config_bool_inner(BoolKey::Fsrs, req.fsrs)?;
}
let mut deck_desired_retention: HashMap<DeckId, f32> = Default::default();
for deck in self.storage.get_all_decks()? {
if let Ok(normal) = deck.normal() {
let deck_id = deck.id;
// previous order & params
let previous_config_id = DeckConfigId(normal.config_id);
let previous_config = configs_before_update.get(&previous_config_id);
@ -223,23 +223,21 @@ impl Collection {
.map(|c| c.inner.new_card_insert_order())
.unwrap_or_default();
let previous_params = previous_config.map(|c| c.fsrs_params());
let previous_preset_dr = previous_config.map(|c| c.inner.desired_retention);
let previous_deck_dr = normal.desired_retention;
let previous_dr = previous_deck_dr.or(previous_preset_dr);
let previous_retention = previous_config.map(|c| c.inner.desired_retention);
let previous_easy_days = previous_config.map(|c| &c.inner.easy_days_percentages);
// if a selected (sub)deck, or its old config was removed, update deck to point
// to new config
let (current_config_id, current_deck_dr) = if selected_deck_ids.contains(&deck.id)
let current_config_id = if selected_deck_ids.contains(&deck.id)
|| !configs_after_update.contains_key(&previous_config_id)
{
let mut updated = deck.clone();
updated.normal_mut()?.config_id = selected_config.id.0;
update_deck_limits(updated.normal_mut()?, &req.limits, today);
self.update_deck_inner(&mut updated, deck, usn)?;
(selected_config.id, updated.normal()?.desired_retention)
selected_config.id
} else {
(previous_config_id, previous_deck_dr)
previous_config_id
};
// if new order differs, deck needs re-sorting
@ -253,12 +251,11 @@ impl Collection {
// if params differ, memory state needs to be recomputed
let current_params = current_config.map(|c| c.fsrs_params());
let current_preset_dr = current_config.map(|c| c.inner.desired_retention);
let current_dr = current_deck_dr.or(current_preset_dr);
let current_retention = current_config.map(|c| c.inner.desired_retention);
let current_easy_days = current_config.map(|c| &c.inner.easy_days_percentages);
if fsrs_toggled
|| previous_params != current_params
|| previous_dr != current_dr
|| previous_retention != current_retention
|| (req.fsrs_reschedule && previous_easy_days != current_easy_days)
{
decks_needing_memory_recompute
@ -266,9 +263,7 @@ impl Collection {
.or_default()
.push(deck_id);
}
if let Some(desired_retention) = current_deck_dr {
deck_desired_retention.insert(deck_id, desired_retention);
}
self.adjust_remaining_steps_in_deck(deck_id, previous_config, current_config, usn)?;
}
}
@ -282,11 +277,10 @@ impl Collection {
if req.fsrs {
Some(UpdateMemoryStateRequest {
params: c.fsrs_params().clone(),
preset_desired_retention: c.inner.desired_retention,
desired_retention: c.inner.desired_retention,
max_interval: c.inner.maximum_review_interval,
reschedule: req.fsrs_reschedule,
historical_retention: c.inner.historical_retention,
deck_desired_retention: deck_desired_retention.clone(),
})
} else {
None
@ -415,7 +409,6 @@ fn normal_deck_to_limits(deck: &NormalDeck, today: u32) -> Limits {
.new_limit_today
.map(|limit| limit.today == today)
.unwrap_or_default(),
desired_retention: deck.desired_retention,
}
}
@ -424,7 +417,6 @@ fn update_deck_limits(deck: &mut NormalDeck, limits: &Limits, today: u32) {
deck.new_limit = limits.new;
update_day_limit(&mut deck.review_limit_today, limits.review_today, today);
update_day_limit(&mut deck.new_limit_today, limits.new_today, today);
deck.desired_retention = limits.desired_retention;
}
fn update_day_limit(day_limit: &mut Option<DayLimit>, new_limit: Option<u32>, today: u32) {

View file

@ -31,7 +31,6 @@ pub(crate) use name::immediate_parent_name;
pub use name::NativeDeckName;
pub use schema11::DeckSchema11;
use crate::deckconfig::DeckConfig;
use crate::define_newtype;
use crate::error::FilteredDeckError;
use crate::markdown::render_markdown;
@ -90,16 +89,6 @@ impl Deck {
}
}
/// Get the effective desired retention value for a deck.
/// Returns deck-specific desired retention if available, otherwise falls
/// back to config default.
pub fn effective_desired_retention(&self, config: &DeckConfig) -> f32 {
self.normal()
.ok()
.and_then(|d| d.desired_retention)
.unwrap_or(config.inner.desired_retention)
}
// used by tests at the moment
#[allow(dead_code)]

View file

@ -191,7 +191,7 @@ fn invalid_char_for_deck_component(c: char) -> bool {
c.is_ascii_control()
}
fn normalized_deck_name_component(comp: &str) -> Cow<'_, str> {
fn normalized_deck_name_component(comp: &str) -> Cow<str> {
let mut out = normalize_to_nfc(comp);
if out.contains(invalid_char_for_deck_component) {
out = out.replace(invalid_char_for_deck_component, "").into();

View file

@ -135,8 +135,6 @@ pub struct NormalDeckSchema11 {
review_limit_today: Option<DayLimit>,
#[serde(default, deserialize_with = "default_on_invalid")]
new_limit_today: Option<DayLimit>,
#[serde(default, deserialize_with = "default_on_invalid")]
desired_retention: Option<u32>,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
@ -251,7 +249,6 @@ impl Default for NormalDeckSchema11 {
new_limit: None,
review_limit_today: None,
new_limit_today: None,
desired_retention: None,
}
}
}
@ -328,7 +325,6 @@ impl From<NormalDeckSchema11> for NormalDeck {
new_limit: deck.new_limit,
review_limit_today: deck.review_limit_today,
new_limit_today: deck.new_limit_today,
desired_retention: deck.desired_retention.map(|v| v as f32 / 100.0),
}
}
}
@ -370,7 +366,6 @@ impl From<Deck> for DeckSchema11 {
new_limit: norm.new_limit,
review_limit_today: norm.review_limit_today,
new_limit_today: norm.new_limit_today,
desired_retention: norm.desired_retention.map(|v| (v * 100.0) as u32),
common: deck.into(),
}),
DeckKind::Filtered(ref filt) => DeckSchema11::Filtered(FilteredDeckSchema11 {
@ -435,8 +430,7 @@ static RESERVED_DECK_KEYS: Set<&'static str> = phf_set! {
"browserCollapsed",
"extendRev",
"id",
"collapsed",
"desiredRetention",
"collapsed"
};
impl From<&Deck> for DeckTodaySchema11 {

View file

@ -231,10 +231,7 @@ fn svg_getter(notetypes: &[Notetype]) -> impl Fn(NotetypeId) -> bool {
}
impl Collection {
fn gather_notes(
&mut self,
search: impl TryIntoSearch,
) -> Result<(Vec<Note>, NoteTableGuard<'_>)> {
fn gather_notes(&mut self, search: impl TryIntoSearch) -> Result<(Vec<Note>, NoteTableGuard)> {
let guard = self.search_notes_into_table(search)?;
guard
.col
@ -243,7 +240,7 @@ impl Collection {
.map(|notes| (notes, guard))
}
fn gather_cards(&mut self) -> Result<(Vec<Card>, CardTableGuard<'_>)> {
fn gather_cards(&mut self) -> Result<(Vec<Card>, CardTableGuard)> {
let guard = self.search_cards_of_notes_into_table()?;
guard
.col

View file

@ -664,7 +664,7 @@ mod test {
self
}
fn import(self, col: &mut Collection) -> NoteContext<'_> {
fn import(self, col: &mut Collection) -> NoteContext {
let mut progress_handler = col.new_progress_handler();
let media_map = Box::leak(Box::new(self.media_map));
let mut ctx = NoteContext::new(

View file

@ -154,7 +154,7 @@ pub(super) fn extract_media_entries(
}
}
pub(super) fn safe_normalized_file_name(name: &str) -> Result<Cow<'_, str>> {
pub(super) fn safe_normalized_file_name(name: &str) -> Result<Cow<str>> {
if !filename_is_safe(name) {
Err(AnkiError::ImportError {
source: ImportError::Corrupt,

View file

@ -147,7 +147,7 @@ fn rendered_nodes_to_str(nodes: &[RenderedNode]) -> String {
.join("")
}
fn field_to_record_field(field: &str, with_html: bool) -> Cow<'_, str> {
fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
let mut text = strip_redundant_sections(field);
if !with_html {
text = text.map_cow(|t| html_to_text_line(t, false));
@ -155,7 +155,7 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow<'_, str> {
text
}
fn strip_redundant_sections(text: &str) -> Cow<'_, str> {
fn strip_redundant_sections(text: &str) -> Cow<str> {
static RE: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?isx)
@ -169,7 +169,7 @@ fn strip_redundant_sections(text: &str) -> Cow<'_, str> {
RE.replace_all(text.as_ref(), "")
}
fn strip_answer_side_question(text: &str) -> Cow<'_, str> {
fn strip_answer_side_question(text: &str) -> Cow<str> {
static RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
RE.replace_all(text.as_ref(), "")
@ -251,7 +251,7 @@ impl NoteContext {
.chain(self.tags(note))
}
fn notetype_name(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
fn notetype_name(&self, note: &Note) -> Option<Cow<[u8]>> {
self.with_notetype.then(|| {
self.notetypes
.get(&note.notetype_id)
@ -259,7 +259,7 @@ impl NoteContext {
})
}
fn deck_name(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
fn deck_name(&self, note: &Note) -> Option<Cow<[u8]>> {
self.with_deck.then(|| {
self.deck_ids
.get(&note.id)
@ -268,7 +268,7 @@ impl NoteContext {
})
}
fn tags(&self, note: &Note) -> Option<Cow<'_, [u8]>> {
fn tags(&self, note: &Note) -> Option<Cow<[u8]>> {
self.with_tags
.then(|| Cow::from(note.tags.join(" ").into_bytes()))
}

View file

@ -511,7 +511,7 @@ impl NoteContext<'_> {
}
impl Note {
fn first_field_stripped(&self) -> Cow<'_, str> {
fn first_field_stripped(&self) -> Cow<str> {
strip_html_preserving_media_filenames(&self.fields()[0])
}
}
@ -623,7 +623,7 @@ impl ForeignNote {
.all(|(opt, field)| opt.as_ref().map(|s| s == field).unwrap_or(true))
}
fn first_field_stripped(&self) -> Option<Cow<'_, str>> {
fn first_field_stripped(&self) -> Option<Cow<str>> {
self.fields
.first()
.and_then(|s| s.as_ref())

View file

@ -48,7 +48,7 @@ pub struct ExtractedLatex {
pub(crate) fn extract_latex_expanding_clozes(
text: &str,
svg: bool,
) -> (Cow<'_, str>, Vec<ExtractedLatex>) {
) -> (Cow<str>, Vec<ExtractedLatex>) {
if text.contains("{{c") {
let expanded = expand_clozes_to_reveal_latex(text);
let (text, extracts) = extract_latex(&expanded, svg);
@ -60,7 +60,7 @@ pub(crate) fn extract_latex_expanding_clozes(
/// Extract LaTeX from the provided text.
/// Expects cloze deletions to already be expanded.
pub fn extract_latex(text: &str, svg: bool) -> (Cow<'_, str>, Vec<ExtractedLatex>) {
pub fn extract_latex(text: &str, svg: bool) -> (Cow<str>, Vec<ExtractedLatex>) {
let mut extracted = vec![];
let new_text = LATEX.replace_all(text, |caps: &Captures| {
@ -84,7 +84,7 @@ pub fn extract_latex(text: &str, svg: bool) -> (Cow<'_, str>, Vec<ExtractedLatex
(new_text, extracted)
}
fn strip_html_for_latex(html: &str) -> Cow<'_, str> {
fn strip_html_for_latex(html: &str) -> Cow<str> {
let mut out: Cow<str> = html.into();
if let Cow::Owned(o) = LATEX_NEWLINES.replace_all(html, "\n") {
out = o.into();

View file

@ -91,7 +91,7 @@ fn nonbreaking_space(char: char) -> bool {
/// - Any problem characters are removed.
/// - Windows device names like CON and PRN have '_' appended
/// - The filename is limited to 120 bytes.
pub(crate) fn normalize_filename(fname: &str) -> Cow<'_, str> {
pub(crate) fn normalize_filename(fname: &str) -> Cow<str> {
let mut output = Cow::Borrowed(fname);
if !is_nfc(output.as_ref()) {
@ -102,7 +102,7 @@ pub(crate) fn normalize_filename(fname: &str) -> Cow<'_, str> {
}
/// See normalize_filename(). This function expects NFC-normalized input.
pub(crate) fn normalize_nfc_filename(mut fname: Cow<'_, str>) -> Cow<'_, str> {
pub(crate) fn normalize_nfc_filename(mut fname: Cow<str>) -> Cow<str> {
if fname.contains(disallowed_char) {
fname = fname.replace(disallowed_char, "").into()
}
@ -137,7 +137,7 @@ pub(crate) fn normalize_nfc_filename(mut fname: Cow<'_, str>) -> Cow<'_, str> {
/// but can be accessed as NFC. On these devices, if the filename
/// is otherwise valid, the filename is returned as NFC.
#[allow(clippy::collapsible_else_if)]
pub(crate) fn filename_if_normalized(fname: &str) -> Option<Cow<'_, str>> {
pub(crate) fn filename_if_normalized(fname: &str) -> Option<Cow<str>> {
if cfg!(target_vendor = "apple") {
if !is_nfc(fname) {
let as_nfc = fname.chars().nfc().collect::<String>();
@ -208,7 +208,7 @@ pub(crate) fn add_hash_suffix_to_file_stem(fname: &str, hash: &Sha1Hash) -> Stri
}
/// If filename is longer than max_bytes, truncate it.
fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<'_, str> {
fn truncate_filename(fname: &str, max_bytes: usize) -> Cow<str> {
if fname.len() <= max_bytes {
return Cow::Borrowed(fname);
}

View file

@ -87,7 +87,7 @@ impl TryFrom<anki_proto::notes::AddNoteRequest> for AddNoteRequest {
}
impl Collection {
pub fn add_note(&mut self, note: &mut Note, did: DeckId) -> Result<OpOutput<usize>> {
pub fn add_note(&mut self, note: &mut Note, did: DeckId) -> Result<OpOutput<()>> {
self.transact(Op::AddNote, |col| col.add_note_inner(note, did))
}
@ -372,7 +372,7 @@ impl Collection {
Ok(())
}
pub(crate) fn add_note_inner(&mut self, note: &mut Note, did: DeckId) -> Result<usize> {
pub(crate) fn add_note_inner(&mut self, note: &mut Note, did: DeckId) -> Result<()> {
let nt = self
.get_notetype(note.notetype_id)?
.or_invalid("missing note type")?;
@ -383,11 +383,10 @@ impl Collection {
note.prepare_for_update(ctx.notetype, normalize_text)?;
note.set_modified(ctx.usn);
self.add_note_only_undoable(note)?;
let count = self.generate_cards_for_new_note(&ctx, note, did)?;
self.generate_cards_for_new_note(&ctx, note, did)?;
self.set_last_deck_for_notetype(note.notetype_id, did)?;
self.set_last_notetype_for_deck(did, note.notetype_id)?;
self.set_current_notetype_id(note.notetype_id)?;
Ok(count)
self.set_current_notetype_id(note.notetype_id)
}
pub fn update_note(&mut self, note: &mut Note) -> Result<OpOutput<()>> {

View file

@ -215,7 +215,7 @@ impl Collection {
ctx: &CardGenContext<impl Deref<Target = Notetype>>,
note: &Note,
target_deck_id: DeckId,
) -> Result<usize> {
) -> Result<()> {
self.generate_cards_for_note(
ctx,
note,
@ -231,8 +231,7 @@ impl Collection {
note: &Note,
) -> Result<()> {
let existing = self.storage.existing_cards_for_note(note.id)?;
self.generate_cards_for_note(ctx, note, &existing, ctx.last_deck, &mut Default::default())?;
Ok(())
self.generate_cards_for_note(ctx, note, &existing, ctx.last_deck, &mut Default::default())
}
fn generate_cards_for_note(
@ -242,13 +241,12 @@ impl Collection {
existing: &[AlreadyGeneratedCardInfo],
target_deck_id: Option<DeckId>,
cache: &mut CardGenCache,
) -> Result<usize> {
) -> Result<()> {
let cards = ctx.new_cards_required(note, existing, true);
if cards.is_empty() {
return Ok(0);
return Ok(());
}
self.add_generated_cards(note.id, &cards, target_deck_id, cache)?;
Ok(cards.len())
self.add_generated_cards(note.id, &cards, target_deck_id, cache)
}
pub(crate) fn generate_cards_for_notetype(

View file

@ -25,7 +25,7 @@ pub struct RenderCardOutput {
impl RenderCardOutput {
/// The question text. This is only valid to call when partial_render=false.
pub fn question(&self) -> Cow<'_, str> {
pub fn question(&self) -> Cow<str> {
match self.qnodes.as_slice() {
[RenderedNode::Text { text }] => text.into(),
_ => "not fully rendered".into(),
@ -33,7 +33,7 @@ impl RenderCardOutput {
}
/// The answer text. This is only valid to call when partial_render=false.
pub fn answer(&self) -> Cow<'_, str> {
pub fn answer(&self) -> Cow<str> {
match self.anodes.as_slice() {
[RenderedNode::Text { text }] => text.into(),
_ => "not fully rendered".into(),

Some files were not shown because too many files have changed in this diff Show more