Merge remote-tracking branch 'upstream/main' into svelte-reviewer-bottom

This commit is contained in:
Luc Mcgrady 2025-10-02 18:03:30 +01:00
commit b8678424cf
No known key found for this signature in database
GPG key ID: 4F3D7A0B17CC3D9C
64 changed files with 1109 additions and 295 deletions

View file

@ -16,6 +16,7 @@ if [ "$CLEAR_RUST" = "1" ]; then
rm -rf $BUILD_ROOT/rust
fi
rm -f out/build.ninja
./ninja pylib qt check
echo "--- Ensure libs importable"

View file

@ -1 +1 @@
25.08b5
25.09.2

View file

@ -1 +1,2 @@
nodeLinker: node-modules
enableScripts: false

View file

@ -49,6 +49,7 @@ Sander Santema <github.com/sandersantema/>
Thomas Brownback <https://github.com/brownbat/>
Andrew Gaul <andrew@gaul.org>
kenden
Emil Hamrin <github.com/e-hamrin>
Nickolay Yudin <kelciour@gmail.com>
neitrinoweb <github.com/neitrinoweb/>
Andreas Reis <github.com/nwwt>
@ -240,7 +241,12 @@ Thomas Rixen <thomas.rixen@student.uclouvain.be>
Siyuan Mattuwu Yan <syan4@ualberta.ca>
Lee Doughty <32392044+leedoughty@users.noreply.github.com>
memchr <memchr@proton.me>
Max Romanowski <maxr777@proton.me>
Aldlss <ayaldlss@gmail.com>
Hanna Nilsén <hanni614@student.liu.se>
Elias Johansson Lara <elias.johanssonlara@gmail.com>
Toby Penner <tobypenner01@gmail.com>
Danilo Spillebeen <spillebeendanilo@gmail.com>
********************

39
Cargo.lock generated
View file

@ -46,9 +46,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "ammonia"
version = "4.1.1"
version = "4.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6b346764dd0814805de8abf899fe03065bcee69bb1a4771c785817e39f3978f"
checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6"
dependencies = [
"cssparser",
"html5ever 0.35.0",
@ -3555,6 +3555,7 @@ dependencies = [
name = "launcher"
version = "1.0.0"
dependencies = [
"anki_i18n",
"anki_io",
"anki_process",
"anyhow",
@ -3563,6 +3564,7 @@ dependencies = [
"embed-resource",
"libc",
"libc-stdhandle",
"locale_config",
"serde_json",
"widestring",
"windows 0.61.3",
@ -3702,6 +3704,19 @@ version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed"
[[package]]
name = "locale_config"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d2c35b16f4483f6c26f0e4e9550717a2f6575bcd6f12a53ff0c490a94a6934"
dependencies = [
"lazy_static",
"objc",
"objc-foundation",
"regex",
"winapi",
]
[[package]]
name = "lock_api"
version = "0.4.13"
@ -4380,6 +4395,26 @@ dependencies = [
"malloc_buf",
]
[[package]]
name = "objc-foundation"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9"
dependencies = [
"block",
"objc",
"objc_id",
]
[[package]]
name = "objc_id"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b"
dependencies = [
"objc",
]
[[package]]
name = "object"
version = "0.36.7"

View file

@ -51,7 +51,7 @@ ninja_gen = { "path" = "build/ninja_gen" }
unicase = "=2.6.0" # any changes could invalidate sqlite indexes
# normal
ammonia = "4.1.0"
ammonia = "4.1.2"
anyhow = "1.0.98"
async-compression = { version = "0.4.24", features = ["zstd", "tokio"] }
async-stream = "0.3.6"
@ -92,6 +92,7 @@ itertools = "0.14.0"
junction = "1.2.0"
libc = "0.2"
libc-stdhandle = "0.1"
locale_config = "0.3.0"
maplit = "1.0.2"
nom = "8.0.0"
num-format = "0.4.4"

View file

@ -28,7 +28,11 @@ pub fn setup_yarn(args: YarnArgs) {
.arg("--ignore-scripts"),
);
} else {
run_command(Command::new(&args.yarn_bin).arg("install"));
run_command(
Command::new(&args.yarn_bin)
.arg("install")
.arg("--immutable"),
);
}
std::fs::write(args.stamp, b"").unwrap();

View file

@ -2226,7 +2226,7 @@
{
"authors": "Ibraheem Ahmed <ibraheem@ibraheem.ca>",
"description": "A high performance, zero-copy URL router.",
"license": "MIT AND BSD-3-Clause",
"license": "BSD-3-Clause AND MIT",
"license_file": null,
"name": "matchit",
"repository": "https://github.com/ibraheemdev/matchit"
@ -4154,7 +4154,7 @@
{
"authors": "David Tolnay <dtolnay@gmail.com>",
"description": "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31",
"license": "(MIT OR Apache-2.0) AND Unicode-3.0",
"license": "(Apache-2.0 OR MIT) AND Unicode-3.0",
"license_file": null,
"name": "unicode-ident",
"repository": "https://github.com/dtolnay/unicode-ident"

View file

@ -1,35 +1,78 @@
# This Dockerfile uses three stages.
# 1. Compile anki (and dependencies) and build python wheels.
# 2. Create a virtual environment containing anki and its dependencies.
# 3. Create a final image that only includes anki's virtual environment and required
# system packages.
# This is a user-contributed Dockerfile. No official support is available.
ARG PYTHON_VERSION="3.9"
ARG DEBIAN_FRONTEND="noninteractive"
# Build anki.
FROM python:$PYTHON_VERSION AS build
RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \
> /usr/local/bin/bazel \
&& chmod +x /usr/local/bin/bazel \
# Bazel expects /usr/bin/python
&& ln -s /usr/local/bin/python /usr/bin/python
FROM ubuntu:24.04 AS build
WORKDIR /opt/anki
COPY . .
# Build python wheels.
ENV PYTHON_VERSION="3.13"
# System deps
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
git \
build-essential \
pkg-config \
libssl-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
libffi-dev \
zlib1g-dev \
liblzma-dev \
ca-certificates \
ninja-build \
rsync \
libglib2.0-0 \
libgl1 \
libx11-6 \
libxext6 \
libxrender1 \
libxkbcommon0 \
libxkbcommon-x11-0 \
libxcb1 \
libxcb-render0 \
libxcb-shm0 \
libxcb-icccm4 \
libxcb-image0 \
libxcb-keysyms1 \
libxcb-randr0 \
libxcb-shape0 \
libxcb-xfixes0 \
libxcb-xinerama0 \
libxcb-xinput0 \
libsm6 \
libice6 \
&& rm -rf /var/lib/apt/lists/*
# install rust with rustup
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
# Install uv and Python 3.13 with uv
RUN curl -LsSf https://astral.sh/uv/install.sh | sh \
&& ln -s /root/.local/bin/uv /usr/local/bin/uv
ENV PATH="/root/.local/bin:${PATH}"
RUN uv python install ${PYTHON_VERSION} --default
COPY . .
RUN ./tools/build
# Install pre-compiled Anki.
FROM python:${PYTHON_VERSION}-slim as installer
FROM python:3.13-slim AS installer
WORKDIR /opt/anki/
COPY --from=build /opt/anki/wheels/ wheels/
COPY --from=build /opt/anki/out/wheels/ wheels/
# Use virtual environment.
RUN python -m venv venv \
&& ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \
&& ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl
# We use another build stage here so we don't include the wheels in the final image.
FROM python:${PYTHON_VERSION}-slim as final
FROM python:3.13-slim AS final
COPY --from=installer /opt/anki/venv /opt/anki/venv
ENV PATH=/opt/anki/venv/bin:$PATH
# Install run-time dependencies.
@ -59,9 +102,9 @@ RUN apt-get update \
libxrender1 \
libxtst6 \
&& rm -rf /var/lib/apt/lists/*
# Add non-root user.
RUN useradd --create-home anki
USER anki
WORKDIR /work
ENTRYPOINT ["/opt/anki/venv/bin/anki"]
LABEL maintainer="Jakub Kaczmarzyk <jakub.kaczmarzyk@gmail.com>"
ENTRYPOINT ["/opt/anki/venv/bin/anki"]

@ -1 +1 @@
Subproject commit 5897ef3a4589c123b7fa4c7fbd67f84d0b7ee13e
Subproject commit ec5e4cad6242e538cacf52265243668f0de5da80

View file

@ -498,6 +498,7 @@ deck-config-desired-retention-below-optimal = Your desired retention is below op
# cards that can be recalled or retrieved on a specific date.
deck-config-fsrs-simulator-experimental = FSRS Simulator (Experimental)
deck-config-fsrs-simulate-desired-retention-experimental = FSRS Desired Retention Simulator (Experimental)
deck-config-fsrs-simulate-save-preset = After optimizing, please save your deck preset before running the simulator.
deck-config-fsrs-desired-retention-help-me-decide-experimental = Help Me Decide (Experimental)
deck-config-additional-new-cards-to-simulate = Additional new cards to simulate
deck-config-simulate = Simulate

37
ftl/core/launcher.ftl Normal file
View file

@ -0,0 +1,37 @@
launcher-title = Anki Launcher
launcher-press-enter-to-start = Press enter to start Anki.
launcher-anki-will-start-shortly = Anki will start shortly.
launcher-you-can-close-this-window = You can close this window.
launcher-updating-anki = Updating Anki...
launcher-latest-anki = Latest Anki (just press Enter)
launcher-choose-a-version = Choose a version
launcher-sync-project-changes = Sync project changes
launcher-keep-existing-version = Keep existing version ({ $current })
launcher-revert-to-previous = Revert to previous version ({ $prev })
launcher-allow-betas = Allow betas: { $state }
launcher-on = on
launcher-off = off
launcher-cache-downloads = Cache downloads: { $state }
launcher-download-mirror = Download mirror: { $state }
launcher-uninstall = Uninstall
launcher-invalid-input = Invalid input. Please try again.
launcher-latest-releases = Latest releases: { $releases }
launcher-enter-the-version-you-want = Enter the version you want to install:
launcher-versions-before-cant-be-installed = Versions before 2.1.50 can't be installed.
launcher-invalid-version = Invalid version.
launcher-unable-to-check-for-versions = Unable to check for Anki versions. Please check your internet connection.
launcher-checking-for-updates = Checking for updates...
launcher-uninstall-confirm = Uninstall Anki's program files? (y/n)
launcher-uninstall-cancelled = Uninstall cancelled.
launcher-program-files-removed = Program files removed.
launcher-remove-all-profiles-confirm = Remove all profiles/cards? (y/n)
launcher-user-data-removed = User data removed.
launcher-download-mirror-options = Download mirror options:
launcher-mirror-no-mirror = No mirror
launcher-mirror-china = China
launcher-mirror-disabled = Mirror disabled.
launcher-mirror-china-enabled = China mirror enabled.
launcher-beta-releases-enabled = Beta releases enabled.
launcher-beta-releases-disabled = Beta releases disabled.
launcher-download-caching-enabled = Download caching enabled.
launcher-download-caching-disabled = Download caching disabled and cache cleared.

View file

@ -46,6 +46,20 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val }
studying-unbury = Unbury
studying-what-would-you-like-to-unbury = What would you like to unbury?
studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet.
studying-card-studied-in-minute =
{ $cards ->
[one] { $cards } card
*[other] { $cards } cards
} studied in
{ $minutes ->
[one] { $minutes } minute.
*[other] { $minutes } minutes.
}
studying-question-time-elapsed = Question time elapsed
studying-answer-time-elapsed = Answer time elapsed
## OBSOLETE; you do not need to translate this
studying-card-studied-in =
{ $count ->
[one] { $count } card studied in
@ -56,5 +70,3 @@ studying-minute =
[one] { $count } minute.
*[other] { $count } minutes.
}
studying-question-time-elapsed = Question time elapsed
studying-answer-time-elapsed = Answer time elapsed

@ -1 +1 @@
Subproject commit dad4e2736a2b53dcdb52d79b5703dd464c05d666
Subproject commit 0b7c530233390d73b706f012bbe7489539925c7d

View file

@ -20,6 +20,7 @@ service CollectionService {
rpc LatestProgress(generic.Empty) returns (Progress);
rpc SetWantsAbort(generic.Empty) returns (generic.Empty);
rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges);
rpc GetCustomColours(generic.Empty) returns (GetCustomColoursResponse);
}
// Implicitly includes any of the above methods that are not listed in the
@ -163,3 +164,7 @@ message CreateBackupRequest {
bool force = 2;
bool wait_for_completion = 3;
}
message GetCustomColoursResponse {
repeated string colours = 1;
}

View file

@ -27,6 +27,9 @@ service FrontendService {
rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty);
// Warns python that the deck option web view is ready to receive requests.
rpc deckOptionsReady(generic.Empty) returns (generic.Empty);
// Save colour picker's custom colour palette
rpc SaveCustomColours(generic.Empty) returns (generic.Empty);
}
service BackendFrontendService {}

View file

@ -74,10 +74,15 @@ message SearchNode {
repeated SearchNode nodes = 1;
Joiner joiner = 2;
}
enum FieldSearchMode {
FIELD_SEARCH_MODE_NORMAL = 0;
FIELD_SEARCH_MODE_REGEX = 1;
FIELD_SEARCH_MODE_NOCOMBINING = 2;
}
message Field {
string field_name = 1;
string text = 2;
bool is_re = 3;
FieldSearchMode mode = 3;
}
oneof filter {

View file

@ -37,6 +37,8 @@ message CardStatsResponse {
uint32 ease = 5;
float taken_secs = 6;
optional cards.FsrsMemoryState memory_state = 7;
// seconds
uint32 last_interval = 8;
}
repeated StatsRevlogEntry revlog = 1;
int64 card_id = 2;

View file

@ -18,7 +18,7 @@ from anki._legacy import DeprecatedNamesMixinForModule
TR = anki._fluent.LegacyTranslationEnum
FormatTimeSpan = _pb.FormatTimespanRequest
# When adding new languages here, check lang_to_disk_lang() below
langs = sorted(
[
("Afrikaans", "af_ZA"),
@ -38,6 +38,7 @@ langs = sorted(
("Italiano", "it_IT"),
("lo jbobau", "jbo_EN"),
("Lenga d'òc", "oc_FR"),
("Қазақша", "kk_KZ"),
("Magyar", "hu_HU"),
("Nederlands", "nl_NL"),
("Norsk", "nb_NO"),
@ -64,6 +65,7 @@ langs = sorted(
("Українська мова", "uk_UA"),
("Հայերեն", "hy_AM"),
("עִבְרִית", "he_IL"),
("ייִדיש", "yi"),
("العربية", "ar_SA"),
("فارسی", "fa_IR"),
("ภาษาไทย", "th_TH"),
@ -104,6 +106,7 @@ compatMap = {
"it": "it_IT",
"ja": "ja_JP",
"jbo": "jbo_EN",
"kk": "kk_KZ",
"ko": "ko_KR",
"la": "la_LA",
"mn": "mn_MN",
@ -126,6 +129,7 @@ compatMap = {
"uk": "uk_UA",
"uz": "uz_UZ",
"vi": "vi_VN",
"yi": "yi",
}
@ -233,7 +237,7 @@ def get_def_lang(user_lang: str | None = None) -> tuple[int, str]:
def is_rtl(lang: str) -> bool:
return lang in ("he", "ar", "fa", "ug")
return lang in ("he", "ar", "fa", "ug", "yi")
# strip off unicode isolation markers from a translated string

View file

@ -32,6 +32,7 @@ def test_find_cards():
note = col.newNote()
note["Front"] = "cat"
note["Back"] = "sheep"
note.tags.append("conjunção größte")
col.addNote(note)
catCard = note.cards()[0]
m = col.models.current()
@ -68,6 +69,8 @@ def test_find_cards():
col.tags.bulk_remove(col.db.list("select id from notes"), "foo")
assert len(col.find_cards("tag:foo")) == 0
assert len(col.find_cards("tag:bar")) == 5
assert len(col.find_cards("tag:conjuncao tag:groste")) == 0
assert len(col.find_cards("tag:nc:conjuncao tag:nc:groste")) == 1
# text searches
assert len(col.find_cards("cat")) == 2
assert len(col.find_cards("cat -dog")) == 1

View file

@ -226,6 +226,7 @@ def show(mw: aqt.AnkiQt) -> QDialog:
"Anon_0000",
"Bilolbek Normuminov",
"Sagiv Marzini",
"Zhanibek Rassululy",
)
)

View file

@ -521,7 +521,7 @@ class Browser(QMainWindow):
self.search()
def current_search(self) -> str:
return self._line_edit().text()
return self._line_edit().text().replace("\n", " ")
def search(self) -> None:
"""Search triggered programmatically. Caller must have saved note first."""

View file

@ -13,7 +13,7 @@ import aqt.browser
from anki.cards import Card
from anki.collection import Config
from anki.tags import MARKED_TAG
from aqt import AnkiQt, gui_hooks
from aqt import AnkiQt, gui_hooks, is_mac
from aqt.qt import (
QCheckBox,
QDialog,
@ -81,10 +81,15 @@ class Previewer(QDialog):
qconnect(self.finished, self._on_finished)
self.silentlyClose = True
self.vbox = QVBoxLayout()
spacing = 6
self.vbox.setContentsMargins(0, 0, 0, 0)
self.vbox.setSpacing(spacing)
self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER)
self.vbox.addWidget(self._web)
self.bbox = QDialogButtonBox()
self.bbox.setContentsMargins(
spacing, spacing if is_mac else 0, spacing, spacing
)
self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight)
gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER)

View file

@ -151,6 +151,7 @@ class Editor:
self.add_webview()
self.setupWeb()
self.setupShortcuts()
self.setupColourPalette()
gui_hooks.editor_did_init(self)
# Initial setup
@ -349,6 +350,14 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too
keys, fn, _ = row
QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore
def setupColourPalette(self) -> None:
if not (colors := self.mw.col.get_config("customColorPickerPalette")):
return
for i, colour in enumerate(colors[: QColorDialog.customCount()]):
if not QColor.isValidColorName(colour):
continue
QColorDialog.setCustomColor(i, QColor.fromString(colour))
def _addFocusCheck(self, fn: Callable) -> Callable:
def checkFocus() -> None:
if self.currentField is None:

View file

@ -170,13 +170,42 @@ def favicon() -> Response:
def _mime_for_path(path: str) -> str:
"Mime type for provided path/filename."
if path.endswith(".css"):
# some users may have invalid mime type in the Windows registry
return "text/css"
elif path.endswith(".js") or path.endswith(".mjs"):
return "application/javascript"
_, ext = os.path.splitext(path)
ext = ext.lower()
# Badly-behaved apps on Windows can alter the standard mime types in the registry, which can completely
# break Anki's UI. So we hard-code the most common extensions.
mime_types = {
".css": "text/css",
".js": "application/javascript",
".mjs": "application/javascript",
".html": "text/html",
".htm": "text/html",
".svg": "image/svg+xml",
".png": "image/png",
".jpg": "image/jpeg",
".jpeg": "image/jpeg",
".gif": "image/gif",
".webp": "image/webp",
".ico": "image/x-icon",
".json": "application/json",
".woff": "font/woff",
".woff2": "font/woff2",
".ttf": "font/ttf",
".otf": "font/otf",
".mp3": "audio/mpeg",
".mp4": "video/mp4",
".webm": "video/webm",
".ogg": "audio/ogg",
".pdf": "application/pdf",
".txt": "text/plain",
}
if mime := mime_types.get(ext):
return mime
else:
# autodetect
# fallback to mimetypes, which may consult the registry
mime, _encoding = mimetypes.guess_type(path)
return mime or "application/octet-stream"
@ -600,6 +629,15 @@ def deck_options_ready() -> bytes:
return b""
def save_custom_colours() -> bytes:
colors = [
QColorDialog.customColor(i).name(QColor.NameFormat.HexRgb)
for i in range(QColorDialog.customCount())
]
aqt.mw.col.set_config("customColorPickerPalette", colors)
return b""
post_handler_list = [
congrats_info,
get_deck_configs_for_update,
@ -615,12 +653,14 @@ post_handler_list = [
search_in_browser,
deck_options_require_close,
deck_options_ready,
save_custom_colours,
]
exposed_backend_list = [
# CollectionService
"latest_progress",
"get_custom_colours",
# DeckService
"get_deck_names",
# I18nService

View file

@ -17,6 +17,7 @@ import aqt.browser
import aqt.operations
from anki.cards import Card, CardId
from anki.collection import Config, OpChanges, OpChangesWithCount
from anki.lang import with_collapsed_whitespace
from anki.scheduler.base import ScheduleCardsAsNew
from anki.scheduler.v3 import (
CardAnswer,
@ -966,11 +967,15 @@ timerStopped = false;
elapsed = self.mw.col.timeboxReached()
if elapsed:
assert not isinstance(elapsed, bool)
part1 = tr.studying_card_studied_in(count=elapsed[1])
mins = int(round(elapsed[0] / 60))
part2 = tr.studying_minute(count=mins)
cards_val = elapsed[1]
minutes_val = int(round(elapsed[0] / 60))
message = with_collapsed_whitespace(
tr.studying_card_studied_in_minute(
cards=cards_val, minutes=str(minutes_val)
)
)
fin = tr.studying_finish()
diag = askUserDialog(f"{part1} {part2}", [tr.studying_continue(), fin])
diag = askUserDialog(message, [tr.studying_continue(), fin])
diag.setIcon(QMessageBox.Icon.Information)
if diag.run() == fin:
self.mw.moveToState("deckBrowser")

View file

@ -631,18 +631,44 @@ class QtAudioInputRecorder(Recorder):
self.mw = mw
self._parent = parent
from PyQt6.QtMultimedia import QAudioFormat, QAudioSource # type: ignore
from PyQt6.QtMultimedia import QAudioSource, QMediaDevices # type: ignore
format = QAudioFormat()
format.setChannelCount(2)
format.setSampleRate(44100)
format.setSampleFormat(QAudioFormat.SampleFormat.Int16)
# Get the default audio input device
device = QMediaDevices.defaultAudioInput()
source = QAudioSource(format, parent)
# Try to use Int16 format first (avoids conversion)
preferred_format = device.preferredFormat()
int16_format = preferred_format
int16_format.setSampleFormat(preferred_format.SampleFormat.Int16)
if device.isFormatSupported(int16_format):
# Use Int16 if supported
format = int16_format
else:
# Fall back to device's preferred format
format = preferred_format
# Create the audio source with the chosen format
source = QAudioSource(device, format, parent)
# Store the actual format being used
self._format = source.format()
self._audio_input = source
def _convert_float_to_int16(self, float_buffer: bytearray) -> bytes:
"""Convert float32 audio samples to int16 format for WAV output."""
import struct
float_count = len(float_buffer) // 4 # 4 bytes per float32
floats = struct.unpack(f"{float_count}f", float_buffer)
# Convert to int16 range, clipping and scaling in one step
int16_samples = [
max(-32768, min(32767, int(max(-1.0, min(1.0, f)) * 32767))) for f in floats
]
return struct.pack(f"{len(int16_samples)}h", *int16_samples)
def start(self, on_done: Callable[[], None]) -> None:
self._iodevice = self._audio_input.start()
self._buffer = bytearray()
@ -665,18 +691,32 @@ class QtAudioInputRecorder(Recorder):
return
def write_file() -> None:
# swallow the first 300ms to allow audio device to quiesce
wait = int(44100 * self.STARTUP_DELAY)
if len(self._buffer) <= wait:
return
self._buffer = self._buffer[wait:]
from PyQt6.QtMultimedia import QAudioFormat
# write out the wave file
# swallow the first 300ms to allow audio device to quiesce
bytes_per_frame = self._format.bytesPerFrame()
frames_to_skip = int(self._format.sampleRate() * self.STARTUP_DELAY)
bytes_to_skip = frames_to_skip * bytes_per_frame
if len(self._buffer) <= bytes_to_skip:
return
self._buffer = self._buffer[bytes_to_skip:]
# Check if we need to convert float samples to int16
if self._format.sampleFormat() == QAudioFormat.SampleFormat.Float:
audio_data = self._convert_float_to_int16(self._buffer)
sample_width = 2 # int16 is 2 bytes
else:
# For integer formats, use the data as-is
audio_data = bytes(self._buffer)
sample_width = self._format.bytesPerSample()
# write out the wave file with the correct format parameters
wf = wave.open(self.output_path, "wb")
wf.setnchannels(self._format.channelCount())
wf.setsampwidth(2)
wf.setsampwidth(sample_width)
wf.setframerate(self._format.sampleRate())
wf.writeframes(self._buffer)
wf.writeframes(audio_data)
wf.close()
def and_then(fut: Future) -> None:

View file

@ -180,7 +180,7 @@ class CustomStyles:
QPushButton {{
margin: 1px;
}}
QPushButton:focus {{
QPushButton:focus, QPushButton:default:hover {{
border: 2px solid {tm.var(colors.BORDER_FOCUS)};
outline: none;
margin: 0px;
@ -199,9 +199,6 @@ class CustomStyles:
)
};
}}
QPushButton:default:hover {{
border-width: 2px;
}}
QPushButton:pressed,
QPushButton:checked,
QSpinBox::up-button:pressed,

View file

@ -8,11 +8,13 @@ publish = false
rust-version.workspace = true
[dependencies]
anki_i18n.workspace = true
anki_io.workspace = true
anki_process.workspace = true
anyhow.workspace = true
camino.workspace = true
dirs.workspace = true
locale_config.workspace = true
serde_json.workspace = true
[target.'cfg(all(unix, not(target_os = "macos")))'.dependencies]

View file

@ -10,6 +10,7 @@ use std::process::Command;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use anki_i18n::I18n;
use anki_io::copy_file;
use anki_io::create_dir_all;
use anki_io::modified_time;
@ -31,6 +32,7 @@ use crate::platform::respawn_launcher;
mod platform;
struct State {
tr: I18n<anki_i18n::Launcher>,
current_version: Option<String>,
prerelease_marker: std::path::PathBuf,
uv_install_root: std::path::PathBuf,
@ -100,7 +102,14 @@ fn run() -> Result<()> {
let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?;
let locale = locale_config::Locale::user_default().to_string();
let mut state = State {
tr: I18n::new(&[if !locale.is_empty() {
locale
} else {
"en".to_owned()
}]),
current_version: None,
prerelease_marker: uv_install_root.join("prerelease"),
uv_install_root: uv_install_root.clone(),
@ -160,7 +169,7 @@ fn run() -> Result<()> {
}
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
println!("\x1B[1mAnki Launcher\x1B[0m\n");
println!("\x1B[1m{}\x1B[0m\n", state.tr.launcher_title());
ensure_os_supported()?;
@ -178,15 +187,18 @@ fn run() -> Result<()> {
}
if cfg!(unix) && !cfg!(target_os = "macos") {
println!("\nPress enter to start Anki.");
println!("\n{}", state.tr.launcher_press_enter_to_start());
let mut input = String::new();
let _ = stdin().read_line(&mut input);
} else {
// on Windows/macOS, the user needs to close the terminal/console
// currently, but ideas on how we can avoid this would be good!
println!();
println!("Anki will start shortly.");
println!("\x1B[1mYou can close this window.\x1B[0m\n");
println!("{}", state.tr.launcher_anki_will_start_shortly());
println!(
"\x1B[1m{}\x1B[0m\n",
state.tr.launcher_you_can_close_this_window()
);
}
// respawn the launcher as a disconnected subprocess for normal startup
@ -258,7 +270,7 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
// Remove sync marker before attempting sync
let _ = remove_file(&state.sync_complete_marker);
println!("Updating Anki...\n");
println!("{}\n", state.tr.launcher_updating_anki());
let python_version_trimmed = if state.user_python_version_path.exists() {
let python_version = read_file(&state.user_python_version_path)?;
@ -378,10 +390,10 @@ fn main_menu_loop(state: &State) -> Result<()> {
// Toggle beta prerelease file
if state.prerelease_marker.exists() {
let _ = remove_file(&state.prerelease_marker);
println!("Beta releases disabled.");
println!("{}", state.tr.launcher_beta_releases_disabled());
} else {
write_file(&state.prerelease_marker, "")?;
println!("Beta releases enabled.");
println!("{}", state.tr.launcher_beta_releases_enabled());
}
println!();
continue;
@ -390,14 +402,14 @@ fn main_menu_loop(state: &State) -> Result<()> {
// Toggle cache disable file
if state.no_cache_marker.exists() {
let _ = remove_file(&state.no_cache_marker);
println!("Download caching enabled.");
println!("{}", state.tr.launcher_download_caching_enabled());
} else {
write_file(&state.no_cache_marker, "")?;
// Delete the cache directory and everything in it
if state.uv_cache_dir.exists() {
let _ = anki_io::remove_dir_all(&state.uv_cache_dir);
}
println!("Download caching disabled and cache cleared.");
println!("{}", state.tr.launcher_download_caching_disabled());
}
println!();
continue;
@ -440,44 +452,62 @@ fn file_timestamp_secs(path: &std::path::Path) -> i64 {
fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
loop {
println!("1) Latest Anki (press Enter)");
println!("2) Choose a version");
println!("1) {}", state.tr.launcher_latest_anki());
println!("2) {}", state.tr.launcher_choose_a_version());
if let Some(current_version) = &state.current_version {
let normalized_current = normalize_version(current_version);
if state.pyproject_modified_by_user {
println!("3) Sync project changes");
println!("3) {}", state.tr.launcher_sync_project_changes());
} else {
println!("3) Keep existing version ({normalized_current})");
println!(
"3) {}",
state.tr.launcher_keep_existing_version(normalized_current)
);
}
}
if let Some(prev_version) = &state.previous_version {
if state.current_version.as_ref() != Some(prev_version) {
let normalized_prev = normalize_version(prev_version);
println!("4) Revert to previous version ({normalized_prev})");
println!(
"4) {}",
state.tr.launcher_revert_to_previous(normalized_prev)
);
}
}
println!();
let betas_enabled = state.prerelease_marker.exists();
println!(
"5) Allow betas: {}",
if betas_enabled { "on" } else { "off" }
"5) {}",
state.tr.launcher_allow_betas(if betas_enabled {
state.tr.launcher_on()
} else {
state.tr.launcher_off()
})
);
let cache_enabled = !state.no_cache_marker.exists();
println!(
"6) Cache downloads: {}",
if cache_enabled { "on" } else { "off" }
"6) {}",
state.tr.launcher_cache_downloads(if cache_enabled {
state.tr.launcher_on()
} else {
state.tr.launcher_off()
})
);
let mirror_enabled = is_mirror_enabled(state);
println!(
"7) Download mirror: {}",
if mirror_enabled { "on" } else { "off" }
"7) {}",
state.tr.launcher_download_mirror(if mirror_enabled {
state.tr.launcher_on()
} else {
state.tr.launcher_off()
})
);
println!();
println!("8) Uninstall");
println!("8) {}", state.tr.launcher_uninstall());
print!("> ");
let _ = stdout().flush();
@ -499,7 +529,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
if state.current_version.is_some() {
MainMenuChoice::KeepExisting
} else {
println!("Invalid input. Please try again.\n");
println!("{}\n", state.tr.launcher_invalid_input());
continue;
}
}
@ -511,7 +541,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
}
}
}
println!("Invalid input. Please try again.\n");
println!("{}\n", state.tr.launcher_invalid_input());
continue;
}
"5" => MainMenuChoice::ToggleBetas,
@ -519,7 +549,7 @@ fn get_main_menu_choice(state: &State) -> Result<MainMenuChoice> {
"7" => MainMenuChoice::DownloadMirror,
"8" => MainMenuChoice::Uninstall,
_ => {
println!("Invalid input. Please try again.");
println!("{}\n", state.tr.launcher_invalid_input());
continue;
}
});
@ -534,9 +564,9 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
.map(|v| v.as_str())
.collect::<Vec<_>>()
.join(", ");
println!("Latest releases: {releases_str}");
println!("{}", state.tr.launcher_latest_releases(releases_str));
println!("Enter the version you want to install:");
println!("{}", state.tr.launcher_enter_the_version_you_want());
print!("> ");
let _ = stdout().flush();
@ -560,11 +590,11 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
Ok(Some(version_kind))
}
(None, true) => {
println!("Versions before 2.1.50 can't be installed.");
println!("{}", state.tr.launcher_versions_before_cant_be_installed());
Ok(None)
}
_ => {
println!("Invalid version.\n");
println!("{}\n", state.tr.launcher_invalid_version());
Ok(None)
}
}
@ -700,7 +730,7 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
let output = match cmd.utf8_output() {
Ok(output) => output,
Err(e) => {
print!("Unable to check for Anki versions. Please check your internet connection.\n\n");
print!("{}\n\n", state.tr.launcher_unable_to_check_for_versions());
return Err(e.into());
}
};
@ -709,7 +739,7 @@ fn fetch_versions(state: &State) -> Result<Vec<String>> {
}
fn get_releases(state: &State) -> Result<Releases> {
println!("Checking for updates...");
println!("{}", state.tr.launcher_checking_for_updates());
let include_prereleases = state.prerelease_marker.exists();
let all_versions = fetch_versions(state)?;
let all_versions = filter_and_normalize_versions(all_versions, include_prereleases);
@ -911,7 +941,7 @@ fn get_anki_addons21_path() -> Result<std::path::PathBuf> {
}
fn handle_uninstall(state: &State) -> Result<bool> {
println!("Uninstall Anki's program files? (y/n)");
println!("{}", state.tr.launcher_uninstall_confirm());
print!("> ");
let _ = stdout().flush();
@ -920,7 +950,7 @@ fn handle_uninstall(state: &State) -> Result<bool> {
let input = input.trim().to_lowercase();
if input != "y" {
println!("Uninstall cancelled.");
println!("{}", state.tr.launcher_uninstall_cancelled());
println!();
return Ok(false);
}
@ -928,11 +958,11 @@ fn handle_uninstall(state: &State) -> Result<bool> {
// Remove program files
if state.uv_install_root.exists() {
anki_io::remove_dir_all(&state.uv_install_root)?;
println!("Program files removed.");
println!("{}", state.tr.launcher_program_files_removed());
}
println!();
println!("Remove all profiles/cards? (y/n)");
println!("{}", state.tr.launcher_remove_all_profiles_confirm());
print!("> ");
let _ = stdout().flush();
@ -942,7 +972,7 @@ fn handle_uninstall(state: &State) -> Result<bool> {
if input == "y" && state.anki_base_folder.exists() {
anki_io::remove_dir_all(&state.anki_base_folder)?;
println!("User data removed.");
println!("{}", state.tr.launcher_user_data_removed());
}
println!();
@ -966,10 +996,13 @@ fn uv_command(state: &State) -> Result<Command> {
// remove UV_* environment variables to avoid interference
for (key, _) in std::env::vars() {
if key.starts_with("UV_") || key == "VIRTUAL_ENV" {
if key.starts_with("UV_") {
command.env_remove(key);
}
}
command
.env_remove("VIRTUAL_ENV")
.env_remove("SSLKEYLOGFILE");
// Add mirror environment variable if enabled
if let Some((python_mirror, pypi_mirror)) = get_mirror_urls(state)? {
@ -1003,6 +1036,7 @@ fn build_python_command(state: &State, args: &[String]) -> Result<Command> {
// Set UV and Python paths for the Python code
cmd.env("ANKI_LAUNCHER_UV", state.uv_path.utf8()?.as_str());
cmd.env("UV_PROJECT", state.uv_install_root.utf8()?.as_str());
cmd.env_remove("SSLKEYLOGFILE");
Ok(cmd)
}
@ -1032,9 +1066,9 @@ fn get_mirror_urls(state: &State) -> Result<Option<(String, String)>> {
fn show_mirror_submenu(state: &State) -> Result<()> {
loop {
println!("Download mirror options:");
println!("1) No mirror");
println!("2) China");
println!("{}", state.tr.launcher_download_mirror_options());
println!("1) {}", state.tr.launcher_mirror_no_mirror());
println!("2) {}", state.tr.launcher_mirror_china());
print!("> ");
let _ = stdout().flush();
@ -1048,14 +1082,14 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
if state.mirror_path.exists() {
let _ = remove_file(&state.mirror_path);
}
println!("Mirror disabled.");
println!("{}", state.tr.launcher_mirror_disabled());
break;
}
"2" => {
// Write China mirror URLs
let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/";
write_file(&state.mirror_path, china_mirrors)?;
println!("China mirror enabled.");
println!("{}", state.tr.launcher_mirror_china_enabled());
break;
}
"" => {
@ -1063,7 +1097,7 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
break;
}
_ => {
println!("Invalid input. Please try again.");
println!("{}", state.tr.launcher_invalid_input());
continue;
}
}

View file

@ -64,6 +64,7 @@ pub fn relaunch_in_terminal() -> Result<()> {
Command::new("open")
.args(["-na", "Terminal"])
.arg(current_exe)
.env_remove("ANKI_LAUNCHER_WANT_TERMINAL")
.ensure_spawn()?;
std::process::exit(0);
}

View file

@ -134,5 +134,8 @@ pub fn ensure_os_supported() -> Result<()> {
#[cfg(all(unix, not(target_os = "macos")))]
unix::ensure_glibc_supported()?;
#[cfg(target_os = "windows")]
windows::ensure_windows_version_supported()?;
Ok(())
}

View file

@ -38,6 +38,26 @@ fn is_windows_10() -> bool {
}
}
/// Ensures Windows 10 version 1809 or later
pub fn ensure_windows_version_supported() -> Result<()> {
unsafe {
let mut info = OSVERSIONINFOW {
dwOSVersionInfoSize: std::mem::size_of::<OSVERSIONINFOW>() as u32,
..Default::default()
};
if RtlGetVersion(&mut info).is_err() {
anyhow::bail!("Failed to get Windows version information");
}
if info.dwBuildNumber >= 17763 {
return Ok(());
}
anyhow::bail!("Windows 10 version 1809 or later is required.")
}
}
pub fn ensure_terminal_shown() -> Result<()> {
unsafe {
if !GetConsoleWindow().is_invalid() {

View file

@ -23,10 +23,10 @@ use write_strings::write_strings;
fn main() -> Result<()> {
// generate our own requirements
let map = get_ftl_data();
let mut map = get_ftl_data();
check(&map);
let modules = get_modules(&map);
write_strings(&map, &modules);
let mut modules = get_modules(&map);
write_strings(&map, &modules, "strings.rs", "All");
typescript::write_ts_interface(&modules)?;
python::write_py_interface(&modules)?;
@ -41,5 +41,12 @@ fn main() -> Result<()> {
write_file_if_changed(path, meta_json)?;
}
}
// generate strings for the launcher
map.iter_mut()
.for_each(|(_, modules)| modules.retain(|module, _| module == "launcher"));
modules.retain(|module| module.name == "launcher");
write_strings(&map, &modules, "strings_launcher.rs", "Launcher");
Ok(())
}

View file

@ -1,8 +1,15 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
// Include auto-generated content
#![allow(clippy::all)]
#[derive(Clone)]
pub struct All;
// Include auto-generated content
include!(concat!(env!("OUT_DIR"), "/strings.rs"));
impl Translations for All {
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS;
const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE;
}

View file

@ -0,0 +1,15 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
#![allow(clippy::all)]
#[derive(Clone)]
pub struct Launcher;
// Include auto-generated content
include!(concat!(env!("OUT_DIR"), "/strings_launcher.rs"));
impl Translations for Launcher {
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS;
const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE;
}

View file

@ -2,8 +2,10 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
mod generated;
mod generated_launcher;
use std::borrow::Cow;
use std::marker::PhantomData;
use std::sync::Arc;
use std::sync::Mutex;
@ -12,8 +14,6 @@ use fluent::FluentArgs;
use fluent::FluentResource;
use fluent::FluentValue;
use fluent_bundle::bundle::FluentBundle as FluentBundleOrig;
use generated::KEYS_BY_MODULE;
use generated::STRINGS;
use num_format::Locale;
use serde::Serialize;
use unic_langid::LanguageIdentifier;
@ -22,6 +22,9 @@ type FluentBundle<T> = FluentBundleOrig<T, intl_memoizer::concurrent::IntlLangMe
pub use fluent::fluent_args as tr_args;
pub use crate::generated::All;
pub use crate::generated_launcher::Launcher;
pub trait Number: Into<FluentNumber> {
fn round(self) -> Self;
}
@ -187,20 +190,67 @@ fn get_bundle_with_extra(
get_bundle(text, extra_text, &locales)
}
pub trait Translations {
const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>>;
const KEYS_BY_MODULE: &[&[&str]];
}
#[derive(Clone)]
pub struct I18n {
pub struct I18n<P: Translations = All> {
inner: Arc<Mutex<I18nInner>>,
_translations_type: std::marker::PhantomData<P>,
}
fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
KEYS_BY_MODULE
.get(module_idx)
.and_then(|translations| translations.get(translation_idx))
.cloned()
.unwrap_or("invalid-module-or-translation-index")
}
impl<P: Translations> I18n<P> {
fn get_key(module_idx: usize, translation_idx: usize) -> &'static str {
P::KEYS_BY_MODULE
.get(module_idx)
.and_then(|translations| translations.get(translation_idx))
.cloned()
.unwrap_or("invalid-module-or-translation-index")
}
fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec<String> {
langs
.iter()
.cloned()
.map(|lang| {
let mut buf = String::new();
let lang_name = remapped_lang_name(&lang);
if let Some(strings) = P::STRINGS.get(lang_name) {
if desired_modules.is_empty() {
// empty list, provide all modules
for value in strings.values() {
buf.push_str(value)
}
} else {
for module_name in desired_modules {
if let Some(text) = strings.get(module_name.as_str()) {
buf.push_str(text);
}
}
}
}
buf
})
.collect()
}
/// This temporarily behaves like the older code; in the future we could
/// either access each &str separately, or load them on demand.
fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<String> {
let lang = remapped_lang_name(lang);
if let Some(module) = P::STRINGS.get(lang) {
let mut text = String::new();
for module_text in module.values() {
text.push_str(module_text)
}
Some(text)
} else {
None
}
}
impl I18n {
pub fn template_only() -> Self {
Self::new::<&str>(&[])
}
@ -225,7 +275,7 @@ impl I18n {
let mut output_langs = vec![];
for lang in input_langs {
// if the language is bundled in the binary
if let Some(text) = ftl_localized_text(&lang).or_else(|| {
if let Some(text) = Self::ftl_localized_text(&lang).or_else(|| {
// when testing, allow missing translations
if cfg!(test) {
Some(String::new())
@ -244,7 +294,7 @@ impl I18n {
// add English templates
let template_lang = "en-US".parse().unwrap();
let template_text = ftl_localized_text(&template_lang).unwrap();
let template_text = Self::ftl_localized_text(&template_lang).unwrap();
let template_bundle = get_bundle_with_extra(&template_text, None).unwrap();
bundles.push(template_bundle);
output_langs.push(template_lang);
@ -261,6 +311,7 @@ impl I18n {
bundles,
langs: output_langs,
})),
_translations_type: PhantomData,
}
}
@ -270,7 +321,7 @@ impl I18n {
message_index: usize,
args: FluentArgs,
) -> String {
let key = get_key(module_index, message_index);
let key = Self::get_key(module_index, message_index);
self.translate(key, Some(args)).into()
}
@ -305,7 +356,7 @@ impl I18n {
/// implementation.
pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript {
let inner = self.inner.lock().unwrap();
let resources = get_modules(&inner.langs, desired_modules);
let resources = Self::get_modules(&inner.langs, desired_modules);
ResourcesForJavascript {
langs: inner.langs.iter().map(ToString::to_string).collect(),
resources,
@ -313,47 +364,6 @@ impl I18n {
}
}
fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec<String> {
langs
.iter()
.cloned()
.map(|lang| {
let mut buf = String::new();
let lang_name = remapped_lang_name(&lang);
if let Some(strings) = STRINGS.get(lang_name) {
if desired_modules.is_empty() {
// empty list, provide all modules
for value in strings.values() {
buf.push_str(value)
}
} else {
for module_name in desired_modules {
if let Some(text) = strings.get(module_name.as_str()) {
buf.push_str(text);
}
}
}
}
buf
})
.collect()
}
/// This temporarily behaves like the older code; in the future we could either
/// access each &str separately, or load them on demand.
fn ftl_localized_text(lang: &LanguageIdentifier) -> Option<String> {
let lang = remapped_lang_name(lang);
if let Some(module) = STRINGS.get(lang) {
let mut text = String::new();
for module_text in module.values() {
text.push_str(module_text)
}
Some(text)
} else {
None
}
}
struct I18nInner {
// bundles in preferred language order, with template English as the
// last element
@ -490,7 +500,7 @@ mod test {
#[test]
fn i18n() {
// English template
let tr = I18n::new(&["zz"]);
let tr = I18n::<All>::new(&["zz"]);
assert_eq!(tr.translate("valid-key", None), "a valid key");
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
@ -513,7 +523,7 @@ mod test {
);
// Another language
let tr = I18n::new(&["ja_JP"]);
let tr = I18n::<All>::new(&["ja_JP"]);
assert_eq!(tr.translate("valid-key", None), "キー");
assert_eq!(tr.translate("only-in-english", None), "not translated");
assert_eq!(tr.translate("invalid-key", None), "invalid-key");
@ -524,7 +534,7 @@ mod test {
);
// Decimal separator
let tr = I18n::new(&["pl-PL"]);
let tr = I18n::<All>::new(&["pl-PL"]);
// Polish will use a comma if the string is translated
assert_eq!(
tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])),

View file

@ -15,7 +15,7 @@ use crate::extract::VariableKind;
use crate::gather::TranslationsByFile;
use crate::gather::TranslationsByLang;
pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
pub fn write_strings(map: &TranslationsByLang, modules: &[Module], out_fn: &str, tag: &str) {
let mut buf = String::new();
// lang->module map
@ -25,23 +25,25 @@ pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) {
// ordered list of translations by module
write_translation_key_index(modules, &mut buf);
// methods to generate messages
write_methods(modules, &mut buf);
write_methods(modules, &mut buf, tag);
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
let path = dir.join("strings.rs");
let path = dir.join(out_fn);
fs::write(path, buf).unwrap();
}
fn write_methods(modules: &[Module], buf: &mut String) {
fn write_methods(modules: &[Module], buf: &mut String, tag: &str) {
buf.push_str(
r#"
use crate::{I18n,Number};
#[allow(unused_imports)]
use crate::{I18n,Number,Translations};
#[allow(unused_imports)]
use fluent::{FluentValue, FluentArgs};
use std::borrow::Cow;
impl I18n {
"#,
);
writeln!(buf, "impl I18n<{tag}> {{").unwrap();
for module in modules {
for translation in &module.translations {
let func = translation.key.to_snake_case();
@ -142,7 +144,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
writeln!(
buf,
"pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [",
"pub(crate) const _KEYS_BY_MODULE: [&[&str]; {count}] = [",
count = modules.len(),
)
.unwrap();
@ -162,7 +164,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) {
fn write_lang_map(map: &TranslationsByLang, buf: &mut String) {
buf.push_str(
"
pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
pub(crate) const _STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! {
",
);

View file

@ -10,6 +10,7 @@ use std::sync::LazyLock;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
use htmlescape::encode_attribute;
use itertools::Itertools;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::bytes::complete::take_while;
@ -26,7 +27,7 @@ use crate::template::RenderContext;
use crate::text::strip_html_preserving_entities;
static CLOZE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap());
LazyLock::new(|| Regex::new(r"(?s)\{\{c[\d,]+::(.*?)(::.*?)?\}\}").unwrap());
static MATHJAX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
@ -48,7 +49,7 @@ mod mathjax_caps {
#[derive(Debug)]
enum Token<'a> {
// The parameter is the cloze number as is appears in the field content.
OpenCloze(u16),
OpenCloze(Vec<u16>),
Text(&'a str),
CloseCloze,
}
@ -58,21 +59,24 @@ fn tokenize(mut text: &str) -> impl Iterator<Item = Token<'_>> {
fn open_cloze(text: &str) -> IResult<&str, Token<'_>> {
// opening brackets and 'c'
let (text, _opening_brackets_and_c) = tag("{{c")(text)?;
// following number
let (text, digits) = take_while(|c: char| c.is_ascii_digit())(text)?;
let digits: u16 = match digits.parse() {
Ok(digits) => digits,
Err(_) => {
// not a valid number; fail to recognize
return Err(nom::Err::Error(nom::error::make_error(
text,
nom::error::ErrorKind::Digit,
)));
}
};
// following comma-seperated numbers
let (text, ordinals) = take_while(|c: char| c.is_ascii_digit() || c == ',')(text)?;
let ordinals: Vec<u16> = ordinals
.split(',')
.filter_map(|s| s.parse().ok())
.collect::<HashSet<_>>() // deduplicate
.into_iter()
.sorted() // set conversion can de-order
.collect();
if ordinals.is_empty() {
return Err(nom::Err::Error(nom::error::make_error(
text,
nom::error::ErrorKind::Digit,
)));
}
// ::
let (text, _colons) = tag("::")(text)?;
Ok((text, Token::OpenCloze(digits)))
Ok((text, Token::OpenCloze(ordinals)))
}
fn close_cloze(text: &str) -> IResult<&str, Token<'_>> {
@ -121,11 +125,20 @@ enum TextOrCloze<'a> {
#[derive(Debug)]
struct ExtractedCloze<'a> {
// `ordinal` is the cloze number as is appears in the field content.
ordinal: u16,
ordinals: Vec<u16>,
nodes: Vec<TextOrCloze<'a>>,
hint: Option<&'a str>,
}
/// Generate a string representation of the ordinals for HTML
fn ordinals_str(ordinals: &[u16]) -> String {
ordinals
.iter()
.map(|o| o.to_string())
.collect::<Vec<_>>()
.join(",")
}
impl ExtractedCloze<'_> {
/// Return the cloze's hint, or "..." if none was provided.
fn hint(&self) -> &str {
@ -151,6 +164,11 @@ impl ExtractedCloze<'_> {
buf.into()
}
/// Checks if this cloze is active for a given ordinal
fn contains_ordinal(&self, ordinal: u16) -> bool {
self.ordinals.contains(&ordinal)
}
/// If cloze starts with image-occlusion:, return the text following that.
fn image_occlusion(&self) -> Option<&str> {
let TextOrCloze::Text(text) = self.nodes.first()? else {
@ -165,10 +183,10 @@ fn parse_text_with_clozes(text: &str) -> Vec<TextOrCloze<'_>> {
let mut output = vec![];
for token in tokenize(text) {
match token {
Token::OpenCloze(ordinal) => {
Token::OpenCloze(ordinals) => {
if open_clozes.len() < 10 {
open_clozes.push(ExtractedCloze {
ordinal,
ordinals,
nodes: Vec::with_capacity(1), // common case
hint: None,
})
@ -214,7 +232,7 @@ fn reveal_cloze_text_in_nodes(
output: &mut Vec<String>,
) {
if let TextOrCloze::Cloze(cloze) = node {
if cloze.ordinal == cloze_ord {
if cloze.contains_ordinal(cloze_ord) {
if question {
output.push(cloze.hint().into())
} else {
@ -234,14 +252,16 @@ fn reveal_cloze(
active_cloze_found_in_text: &mut bool,
buf: &mut String,
) {
let active = cloze.ordinal == cloze_ord;
let active = cloze.contains_ordinal(cloze_ord);
*active_cloze_found_in_text |= active;
if let Some(image_occlusion_text) = cloze.image_occlusion() {
buf.push_str(&render_image_occlusion(
image_occlusion_text,
question,
active,
cloze.ordinal,
cloze_ord,
&cloze.ordinals,
));
return;
}
@ -265,7 +285,7 @@ fn reveal_cloze(
buf,
r#"<span class="cloze" data-cloze="{}" data-ordinal="{}">[{}]</span>"#,
encode_attribute(&content_buf),
cloze.ordinal,
ordinals_str(&cloze.ordinals),
cloze.hint()
)
.unwrap();
@ -274,7 +294,7 @@ fn reveal_cloze(
write!(
buf,
r#"<span class="cloze" data-ordinal="{}">"#,
cloze.ordinal
ordinals_str(&cloze.ordinals)
)
.unwrap();
for node in &cloze.nodes {
@ -292,7 +312,7 @@ fn reveal_cloze(
write!(
buf,
r#"<span class="cloze-inactive" data-ordinal="{}">"#,
cloze.ordinal
ordinals_str(&cloze.ordinals)
)
.unwrap();
for node in &cloze.nodes {
@ -308,23 +328,29 @@ fn reveal_cloze(
}
}
fn render_image_occlusion(text: &str, question_side: bool, active: bool, ordinal: u16) -> String {
fn render_image_occlusion(
text: &str,
question_side: bool,
active: bool,
ordinal: u16,
ordinals: &[u16],
) -> String {
if (question_side && active) || ordinal == 0 {
format!(
r#"<div class="cloze" data-ordinal="{}" {}></div>"#,
ordinal,
ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else if !active {
format!(
r#"<div class="cloze-inactive" data-ordinal="{}" {}></div>"#,
ordinal,
ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else if !question_side && active {
format!(
r#"<div class="cloze-highlight" data-ordinal="{}" {}></div>"#,
ordinal,
ordinals_str(ordinals),
&get_image_cloze_data(text)
)
} else {
@ -338,7 +364,10 @@ pub fn parse_image_occlusions(text: &str) -> Vec<ImageOcclusion> {
if let TextOrCloze::Cloze(cloze) = node {
if cloze.image_occlusion().is_some() {
if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) {
occlusions.entry(cloze.ordinal).or_default().push(shape);
// Associate this occlusion with all ordinals in this cloze
for &ordinal in &cloze.ordinals {
occlusions.entry(ordinal).or_default().push(shape.clone());
}
}
}
}
@ -420,7 +449,7 @@ pub fn expand_clozes_to_reveal_latex(text: &str) -> String {
pub(crate) fn contains_cloze(text: &str) -> bool {
parse_text_with_clozes(text)
.iter()
.any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinal != 0))
.any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinals.iter().any(|&o| o != 0)))
}
/// Returns the set of cloze number as they appear in the fields's content.
@ -433,10 +462,12 @@ pub fn cloze_numbers_in_string(html: &str) -> HashSet<u16> {
fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet<u16>) {
for node in nodes {
if let TextOrCloze::Cloze(cloze) = node {
if cloze.ordinal != 0 {
set.insert(cloze.ordinal);
add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
for &ordinal in &cloze.ordinals {
if ordinal != 0 {
set.insert(ordinal);
}
}
add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set);
}
}
}
@ -654,4 +685,160 @@ mod test {
)
);
}
#[test]
fn multi_card_card_generation() {
let text = "{{c1,2,3::multi}}";
assert_eq!(
cloze_number_in_fields(vec![text]),
vec![1, 2, 3].into_iter().collect::<HashSet<u16>>()
);
}
#[test]
fn multi_card_cloze_basic() {
let text = "{{c1,2::shared}} word and {{c1::first}} vs {{c2::second}}";
assert_eq!(
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
"[...] word and [...] vs second"
);
assert_eq!(
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
"[...] word and first vs [...]"
);
assert_eq!(
strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
"shared word and first vs second"
);
assert_eq!(
strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
"shared word and first vs second"
);
assert_eq!(
cloze_numbers_in_string(text),
vec![1, 2].into_iter().collect::<HashSet<u16>>()
);
}
#[test]
fn multi_card_cloze_html_attributes() {
let text = "{{c1,2,3::multi}}";
let card1_html = reveal_cloze_text(text, 1, true);
assert!(card1_html.contains(r#"data-ordinal="1,2,3""#));
let card2_html = reveal_cloze_text(text, 2, true);
assert!(card2_html.contains(r#"data-ordinal="1,2,3""#));
let card3_html = reveal_cloze_text(text, 3, true);
assert!(card3_html.contains(r#"data-ordinal="1,2,3""#));
}
#[test]
fn multi_card_cloze_with_hints() {
let text = "{{c1,2::answer::hint}}";
assert_eq!(
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
"[hint]"
);
assert_eq!(
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
"[hint]"
);
assert_eq!(
strip_html(&reveal_cloze_text(text, 1, false)).as_ref(),
"answer"
);
assert_eq!(
strip_html(&reveal_cloze_text(text, 2, false)).as_ref(),
"answer"
);
}
#[test]
fn multi_card_cloze_edge_cases() {
assert_eq!(
cloze_numbers_in_string("{{c1,1,2::test}}"),
vec![1, 2].into_iter().collect::<HashSet<u16>>()
);
assert_eq!(
cloze_numbers_in_string("{{c0,1,2::test}}"),
vec![1, 2].into_iter().collect::<HashSet<u16>>()
);
assert_eq!(
cloze_numbers_in_string("{{c1,,3::test}}"),
vec![1, 3].into_iter().collect::<HashSet<u16>>()
);
}
#[test]
fn multi_card_cloze_only_filter() {
let text = "{{c1,2::shared}} and {{c1::first}} vs {{c2::second}}";
assert_eq!(reveal_cloze_text_only(text, 1, true), "..., ...");
assert_eq!(reveal_cloze_text_only(text, 2, true), "..., ...");
assert_eq!(reveal_cloze_text_only(text, 1, false), "shared, first");
assert_eq!(reveal_cloze_text_only(text, 2, false), "shared, second");
}
#[test]
fn multi_card_nested_cloze() {
let text = "{{c1,2::outer {{c3::inner}}}}";
assert_eq!(
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
"[...]"
);
assert_eq!(
strip_html(&reveal_cloze_text(text, 2, true)).as_ref(),
"[...]"
);
assert_eq!(
strip_html(&reveal_cloze_text(text, 3, true)).as_ref(),
"outer [...]"
);
assert_eq!(
cloze_numbers_in_string(text),
vec![1, 2, 3].into_iter().collect::<HashSet<u16>>()
);
}
#[test]
fn nested_parent_child_card_same_cloze() {
let text = "{{c1::outer {{c1::inner}}}}";
assert_eq!(
strip_html(&reveal_cloze_text(text, 1, true)).as_ref(),
"[...]"
);
assert_eq!(
cloze_numbers_in_string(text),
vec![1].into_iter().collect::<HashSet<u16>>()
);
}
#[test]
fn multi_card_image_occlusion() {
let text = "{{c1,2::image-occlusion:rect:left=10:top=20:width=30:height=40}}";
let occlusions = parse_image_occlusions(text);
assert_eq!(occlusions.len(), 2);
assert!(occlusions.iter().any(|o| o.ordinal == 1));
assert!(occlusions.iter().any(|o| o.ordinal == 2));
let card1_html = reveal_cloze_text(text, 1, true);
assert!(card1_html.contains(r#"data-ordinal="1,2""#));
let card2_html = reveal_cloze_text(text, 2, true);
assert!(card2_html.contains(r#"data-ordinal="1,2""#));
}
}

View file

@ -1,8 +1,10 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::collection::GetCustomColoursResponse;
use anki_proto::generic;
use crate::collection::Collection;
use crate::config::ConfigKey;
use crate::error;
use crate::prelude::BoolKey;
use crate::prelude::Op;
@ -62,4 +64,13 @@ impl crate::services::CollectionService for Collection {
})
.map(Into::into)
}
fn get_custom_colours(
&mut self,
) -> error::Result<anki_proto::collection::GetCustomColoursResponse> {
let colours = self
.get_config_optional(ConfigKey::CustomColorPickerPalette)
.unwrap_or_default();
Ok(GetCustomColoursResponse { colours })
}
}

View file

@ -71,6 +71,7 @@ pub(crate) enum ConfigKey {
NextNewCardPosition,
#[strum(to_string = "schedVer")]
SchedulerVersion,
CustomColorPickerPalette,
}
#[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)]

View file

@ -85,6 +85,15 @@ impl RevlogEntry {
.unwrap()
}
pub(crate) fn last_interval_secs(&self) -> u32 {
u32::try_from(if self.last_interval > 0 {
self.last_interval.saturating_mul(86_400)
} else {
self.last_interval.saturating_mul(-1)
})
.unwrap()
}
/// Returns true if this entry represents a reset operation.
/// These entries are created when a card is reset using
/// [`Collection::reschedule_cards_as_new`].

View file

@ -136,6 +136,19 @@ impl Collection {
let deckconfig_id = deck.config_id().unwrap();
// reschedule it
let original_interval = card.interval;
let min_interval = |interval: u32| {
let previous_interval =
last_info.previous_interval.unwrap_or(0);
if interval > previous_interval {
// interval grew; don't allow fuzzed interval to
// be less than previous+1
previous_interval + 1
} else {
// interval shrunk; don't restrict negative fuzz
0
}
.max(1)
};
let interval = fsrs.next_interval(
Some(state.stability),
desired_retention,
@ -146,7 +159,7 @@ impl Collection {
.and_then(|r| {
r.find_interval(
interval,
1,
min_interval(interval as u32),
req.max_interval,
days_elapsed as u32,
deckconfig_id,
@ -157,7 +170,7 @@ impl Collection {
with_review_fuzz(
card.get_fuzz_factor(true),
interval,
1,
min_interval(interval as u32),
req.max_interval,
)
});
@ -310,6 +323,9 @@ pub(crate) struct LastRevlogInfo {
/// reviewed the card and now, so that we can determine an accurate period
/// when the card has subsequently been rescheduled to a different day.
pub(crate) last_reviewed_at: Option<TimestampSecs>,
/// The interval before the latest review. Used to prevent fuzz from going
/// backwards when rescheduling the card
pub(crate) previous_interval: Option<u32>,
}
/// Return a map of cards to info about last review.
@ -321,14 +337,27 @@ pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap<CardId, L
.into_iter()
.for_each(|(card_id, group)| {
let mut last_reviewed_at = None;
let mut previous_interval = None;
for e in group.into_iter() {
if e.has_rating_and_affects_scheduling() {
last_reviewed_at = Some(e.id.as_secs());
previous_interval = if e.last_interval >= 0 && e.button_chosen > 1 {
Some(e.last_interval as u32)
} else {
None
};
} else if e.is_reset() {
last_reviewed_at = None;
previous_interval = None;
}
}
out.insert(card_id, LastRevlogInfo { last_reviewed_at });
out.insert(
card_id,
LastRevlogInfo {
last_reviewed_at,
previous_interval,
},
);
});
out
}

View file

@ -174,7 +174,7 @@ impl Collection {
}
}
let health_check_passed = if health_check {
let health_check_passed = if health_check && input.train_set.len() > 300 {
let fsrs = FSRS::new(None)?;
fsrs.evaluate_with_time_series_splits(input, |_| true)
.ok()
@ -478,27 +478,42 @@ pub(crate) fn reviews_for_fsrs(
}))
.collect_vec();
let skip = if training { 1 } else { 0 };
// Convert the remaining entries into separate FSRSItems, where each item
// contains all reviews done until then.
let items: Vec<(RevlogId, FSRSItem)> = entries
.iter()
.enumerate()
.skip(skip)
.map(|(outer_idx, entry)| {
let reviews = entries
.iter()
.take(outer_idx + 1)
.enumerate()
.map(|(inner_idx, r)| FSRSReview {
rating: r.button_chosen as u32,
delta_t: delta_ts[inner_idx],
})
.collect();
(entry.id, FSRSItem { reviews })
})
.filter(|(_, item)| !training || item.reviews.last().unwrap().delta_t > 0)
.collect_vec();
let items = if training {
// Convert the remaining entries into separate FSRSItems, where each item
// contains all reviews done until then.
let mut items = Vec::with_capacity(entries.len());
let mut current_reviews = Vec::with_capacity(entries.len());
for (idx, (entry, &delta_t)) in entries.iter().zip(delta_ts.iter()).enumerate() {
current_reviews.push(FSRSReview {
rating: entry.button_chosen as u32,
delta_t,
});
if idx >= 1 && delta_t > 0 {
items.push((
entry.id,
FSRSItem {
reviews: current_reviews.clone(),
},
));
}
}
items
} else {
// When not training, we only need the final FSRS item, which represents
// the complete history of the card. This avoids expensive clones in a loop.
let reviews = entries
.iter()
.zip(delta_ts.iter())
.map(|(entry, &delta_t)| FSRSReview {
rating: entry.button_chosen as u32,
delta_t,
})
.collect();
let last_entry = entries.last().unwrap();
vec![(last_entry.id, FSRSItem { reviews })]
};
if items.is_empty() {
None
} else {
@ -738,7 +753,7 @@ pub(crate) mod tests {
],
false,
),
fsrs_items!([review(0)], [review(0), review(1)])
fsrs_items!([review(0), review(1)])
);
}
@ -809,7 +824,7 @@ pub(crate) mod tests {
// R | A X R
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9)),
fsrs_items!([review(0)], [review(0), review(2)])
fsrs_items!([review(0), review(2)])
);
}
@ -828,6 +843,9 @@ pub(crate) mod tests {
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9))
.unwrap()
.last()
.unwrap()
.reviews
.len(),
2
);
@ -849,6 +867,9 @@ pub(crate) mod tests {
assert_eq!(
convert_ignore_before(revlogs, false, days_ago_ms(9))
.unwrap()
.last()
.unwrap()
.reviews
.len(),
2
);

View file

@ -115,13 +115,14 @@ impl Rescheduler {
pub fn find_interval(
&self,
interval: f32,
minimum: u32,
maximum: u32,
minimum_interval: u32,
maximum_interval: u32,
days_elapsed: u32,
deckconfig_id: DeckConfigId,
fuzz_seed: Option<u64>,
) -> Option<u32> {
let (before_days, after_days) = constrained_fuzz_bounds(interval, minimum, maximum);
let (before_days, after_days) =
constrained_fuzz_bounds(interval, minimum_interval, maximum_interval);
// Don't reschedule the card when it's overdue
if after_days < days_elapsed {

View file

@ -6,6 +6,7 @@ use std::mem;
use itertools::Itertools;
use super::writer::write_nodes;
use super::FieldSearchMode;
use super::Node;
use super::SearchNode;
use super::StateKind;
@ -174,7 +175,7 @@ impl SearchNode {
pub fn from_tag_name(name: &str) -> Self {
Self::Tag {
tag: escape_anki_wildcards_for_search_node(name),
is_re: false,
mode: FieldSearchMode::Normal,
}
}

View file

@ -13,6 +13,7 @@ pub use builder::JoinSearches;
pub use builder::Negated;
pub use builder::SearchBuilder;
pub use parser::parse as parse_search;
pub use parser::FieldSearchMode;
pub use parser::Node;
pub use parser::PropertyKind;
pub use parser::RatingKind;

View file

@ -3,6 +3,7 @@
use std::sync::LazyLock;
use anki_proto::search::search_node::FieldSearchMode as FieldSearchModeProto;
use nom::branch::alt;
use nom::bytes::complete::escaped;
use nom::bytes::complete::is_not;
@ -27,7 +28,6 @@ use crate::error::ParseError;
use crate::error::Result;
use crate::error::SearchErrorKind as FailKind;
use crate::prelude::*;
type IResult<'a, O> = std::result::Result<(&'a str, O), nom::Err<ParseError<'a>>>;
type ParseResult<'a, O> = std::result::Result<O, nom::Err<ParseError<'a>>>;
@ -48,6 +48,23 @@ pub enum Node {
Search(SearchNode),
}
#[derive(Copy, Debug, PartialEq, Eq, Clone)]
pub enum FieldSearchMode {
Normal,
Regex,
NoCombining,
}
impl From<FieldSearchModeProto> for FieldSearchMode {
fn from(mode: FieldSearchModeProto) -> Self {
match mode {
FieldSearchModeProto::Normal => Self::Normal,
FieldSearchModeProto::Regex => Self::Regex,
FieldSearchModeProto::Nocombining => Self::NoCombining,
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum SearchNode {
// text without a colon
@ -56,7 +73,7 @@ pub enum SearchNode {
SingleField {
field: String,
text: String,
is_re: bool,
mode: FieldSearchMode,
},
AddedInDays(u32),
EditedInDays(u32),
@ -77,7 +94,7 @@ pub enum SearchNode {
},
Tag {
tag: String,
is_re: bool,
mode: FieldSearchMode,
},
Duplicates {
notetype_id: NotetypeId,
@ -373,12 +390,17 @@ fn parse_tag(s: &str) -> ParseResult<'_, SearchNode> {
Ok(if let Some(re) = s.strip_prefix("re:") {
SearchNode::Tag {
tag: unescape_quotes(re),
is_re: true,
mode: FieldSearchMode::Regex,
}
} else if let Some(nc) = s.strip_prefix("nc:") {
SearchNode::Tag {
tag: unescape(nc)?,
mode: FieldSearchMode::NoCombining,
}
} else {
SearchNode::Tag {
tag: unescape(s)?,
is_re: false,
mode: FieldSearchMode::Normal,
}
})
}
@ -670,13 +692,19 @@ fn parse_single_field<'a>(key: &'a str, val: &'a str) -> ParseResult<'a, SearchN
SearchNode::SingleField {
field: unescape(key)?,
text: unescape_quotes(stripped),
is_re: true,
mode: FieldSearchMode::Regex,
}
} else if let Some(stripped) = val.strip_prefix("nc:") {
SearchNode::SingleField {
field: unescape(key)?,
text: unescape_quotes(stripped),
mode: FieldSearchMode::NoCombining,
}
} else {
SearchNode::SingleField {
field: unescape(key)?,
text: unescape(val)?,
is_re: false,
mode: FieldSearchMode::Normal,
}
})
}
@ -806,7 +834,7 @@ mod test {
Search(SingleField {
field: "foo".into(),
text: "bar baz".into(),
is_re: false,
mode: FieldSearchMode::Normal,
})
]))),
Or,
@ -819,7 +847,16 @@ mod test {
vec![Search(SingleField {
field: "foo".into(),
text: "bar".into(),
is_re: true
mode: FieldSearchMode::Regex,
})]
);
assert_eq!(
parse("foo:nc:bar")?,
vec![Search(SingleField {
field: "foo".into(),
text: "bar".into(),
mode: FieldSearchMode::NoCombining,
})]
);
@ -829,7 +866,7 @@ mod test {
vec![Search(SingleField {
field: "field".into(),
text: "va\"lue".into(),
is_re: false
mode: FieldSearchMode::Normal,
})]
);
assert_eq!(parse(r#""field:va\"lue""#)?, parse(r#"field:"va\"lue""#)?,);
@ -906,14 +943,14 @@ mod test {
parse("tag:hard")?,
vec![Search(Tag {
tag: "hard".into(),
is_re: false
mode: FieldSearchMode::Normal
})]
);
assert_eq!(
parse(r"tag:re:\\")?,
vec![Search(Tag {
tag: r"\\".into(),
is_re: true
mode: FieldSearchMode::Regex
})]
);
assert_eq!(

View file

@ -6,6 +6,7 @@ use itertools::Itertools;
use crate::prelude::*;
use crate::search::parse_search;
use crate::search::FieldSearchMode;
use crate::search::Negated;
use crate::search::Node;
use crate::search::PropertyKind;
@ -40,7 +41,7 @@ impl TryFrom<anki_proto::search::SearchNode> for Node {
Filter::FieldName(s) => Node::Search(SearchNode::SingleField {
field: escape_anki_wildcards_for_search_node(&s),
text: "_*".to_string(),
is_re: false,
mode: FieldSearchMode::Normal,
}),
Filter::Rated(rated) => Node::Search(SearchNode::Rated {
days: rated.days,
@ -107,7 +108,7 @@ impl TryFrom<anki_proto::search::SearchNode> for Node {
Filter::Field(field) => Node::Search(SearchNode::SingleField {
field: escape_anki_wildcards(&field.field_name),
text: escape_anki_wildcards(&field.text),
is_re: field.is_re,
mode: field.mode().into(),
}),
Filter::LiteralText(text) => {
let text = escape_anki_wildcards(&text);

View file

@ -7,6 +7,7 @@ use std::ops::Range;
use itertools::Itertools;
use super::parser::FieldSearchMode;
use super::parser::Node;
use super::parser::PropertyKind;
use super::parser::RatingKind;
@ -138,8 +139,8 @@ impl SqlWriter<'_> {
false,
)?
}
SearchNode::SingleField { field, text, is_re } => {
self.write_field(&norm(field), &self.norm_note(text), *is_re)?
SearchNode::SingleField { field, text, mode } => {
self.write_field(&norm(field), &self.norm_note(text), *mode)?
}
SearchNode::Duplicates { notetype_id, text } => {
self.write_dupe(*notetype_id, &self.norm_note(text))?
@ -180,7 +181,7 @@ impl SqlWriter<'_> {
SearchNode::Notetype(notetype) => self.write_notetype(&norm(notetype)),
SearchNode::Rated { days, ease } => self.write_rated(">", -i64::from(*days), ease)?,
SearchNode::Tag { tag, is_re } => self.write_tag(&norm(tag), *is_re),
SearchNode::Tag { tag, mode } => self.write_tag(&norm(tag), *mode),
SearchNode::State(state) => self.write_state(state)?,
SearchNode::Flag(flag) => {
write!(self.sql, "(c.flags & 7) == {flag}").unwrap();
@ -296,8 +297,8 @@ impl SqlWriter<'_> {
Ok(())
}
fn write_tag(&mut self, tag: &str, is_re: bool) {
if is_re {
fn write_tag(&mut self, tag: &str, mode: FieldSearchMode) {
if mode == FieldSearchMode::Regex {
self.args.push(format!("(?i){tag}"));
write!(self.sql, "regexp_tags(?{}, n.tags)", self.args.len()).unwrap();
} else {
@ -310,8 +311,19 @@ impl SqlWriter<'_> {
}
s if s.contains(' ') => write!(self.sql, "false").unwrap(),
text => {
write!(self.sql, "n.tags regexp ?").unwrap();
let re = &to_custom_re(text, r"\S");
let text = if mode == FieldSearchMode::Normal {
write!(self.sql, "n.tags regexp ?").unwrap();
Cow::from(text)
} else {
write!(
self.sql,
"coalesce(process_text(n.tags, {}), n.tags) regexp ?",
ProcessTextFlags::NoCombining.bits()
)
.unwrap();
without_combining(text)
};
let re = &to_custom_re(&text, r"\S");
self.args.push(format!("(?i).* {re}(::| ).*"));
}
}
@ -567,16 +579,18 @@ impl SqlWriter<'_> {
}
}
fn write_field(&mut self, field_name: &str, val: &str, is_re: bool) -> Result<()> {
fn write_field(&mut self, field_name: &str, val: &str, mode: FieldSearchMode) -> Result<()> {
if matches!(field_name, "*" | "_*" | "*_") {
if is_re {
if mode == FieldSearchMode::Regex {
self.write_all_fields_regexp(val);
} else {
self.write_all_fields(val);
}
Ok(())
} else if is_re {
} else if mode == FieldSearchMode::Regex {
self.write_single_field_regexp(field_name, val)
} else if mode == FieldSearchMode::NoCombining {
self.write_single_field_nc(field_name, val)
} else {
self.write_single_field(field_name, val)
}
@ -592,6 +606,58 @@ impl SqlWriter<'_> {
write!(self.sql, "regexp_fields(?{}, n.flds)", self.args.len()).unwrap();
}
fn write_single_field_nc(&mut self, field_name: &str, val: &str) -> Result<()> {
let field_indicies_by_notetype = self.num_fields_and_fields_indices_by_notetype(
field_name,
matches!(val, "*" | "_*" | "*_"),
)?;
if field_indicies_by_notetype.is_empty() {
write!(self.sql, "false").unwrap();
return Ok(());
}
let val = to_sql(val);
let val = without_combining(&val);
self.args.push(val.into());
let arg_idx = self.args.len();
let field_idx_str = format!("' || ?{arg_idx} || '");
let other_idx_str = "%".to_string();
let notetype_clause = |ctx: &FieldQualifiedSearchContext| -> String {
let field_index_clause = |range: &Range<u32>| {
let f = (0..ctx.total_fields_in_note)
.filter_map(|i| {
if i as u32 == range.start {
Some(&field_idx_str)
} else if range.contains(&(i as u32)) {
None
} else {
Some(&other_idx_str)
}
})
.join("\x1f");
format!(
"coalesce(process_text(n.flds, {}), n.flds) like '{f}' escape '\\'",
ProcessTextFlags::NoCombining.bits()
)
};
let all_field_clauses = ctx
.field_ranges_to_search
.iter()
.map(field_index_clause)
.join(" or ");
format!("(n.mid = {mid} and ({all_field_clauses}))", mid = ctx.ntid)
};
let all_notetype_clauses = field_indicies_by_notetype
.iter()
.map(notetype_clause)
.join(" or ");
write!(self.sql, "({all_notetype_clauses})").unwrap();
Ok(())
}
fn write_single_field_regexp(&mut self, field_name: &str, val: &str) -> Result<()> {
let field_indicies_by_notetype = self.fields_indices_by_notetype(field_name)?;
if field_indicies_by_notetype.is_empty() {
@ -1116,6 +1182,20 @@ mod test {
vec!["(?i)te.*st".into()]
)
);
// field search with no-combine
assert_eq!(
s(ctx, "front:nc:frânçais"),
(
concat!(
"(((n.mid = 1581236385344 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\')) or ",
"(n.mid = 1581236385345 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%\u{1f}%' escape '\\')) or ",
"(n.mid = 1581236385346 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\')) or ",
"(n.mid = 1581236385347 and (coalesce(process_text(n.flds, 1), n.flds) like '' || ?1 || '\u{1f}%' escape '\\'))))"
)
.into(),
vec!["francais".into()]
)
);
// all field search
assert_eq!(
s(ctx, "*:te*st"),

View file

@ -9,6 +9,7 @@ use regex::Regex;
use crate::notetype::NotetypeId as NotetypeIdType;
use crate::prelude::*;
use crate::search::parser::parse;
use crate::search::parser::FieldSearchMode;
use crate::search::parser::Node;
use crate::search::parser::PropertyKind;
use crate::search::parser::RatingKind;
@ -69,7 +70,7 @@ fn write_search_node(node: &SearchNode) -> String {
use SearchNode::*;
match node {
UnqualifiedText(s) => maybe_quote(&s.replace(':', "\\:")),
SingleField { field, text, is_re } => write_single_field(field, text, *is_re),
SingleField { field, text, mode } => write_single_field(field, text, *mode),
AddedInDays(u) => format!("added:{u}"),
EditedInDays(u) => format!("edited:{u}"),
IntroducedInDays(u) => format!("introduced:{u}"),
@ -81,7 +82,7 @@ fn write_search_node(node: &SearchNode) -> String {
NotetypeId(NotetypeIdType(i)) => format!("mid:{i}"),
Notetype(s) => maybe_quote(&format!("note:{s}")),
Rated { days, ease } => write_rated(days, ease),
Tag { tag, is_re } => write_single_field("tag", tag, *is_re),
Tag { tag, mode } => write_single_field("tag", tag, *mode),
Duplicates { notetype_id, text } => write_dupe(notetype_id, text),
State(k) => write_state(k),
Flag(u) => format!("flag:{u}"),
@ -116,14 +117,25 @@ fn needs_quotation(txt: &str) -> bool {
}
/// Also used by tag search, which has the same syntax.
fn write_single_field(field: &str, text: &str, is_re: bool) -> String {
let re = if is_re { "re:" } else { "" };
let text = if !is_re && text.starts_with("re:") {
fn write_single_field(field: &str, text: &str, mode: FieldSearchMode) -> String {
let prefix = match mode {
FieldSearchMode::Normal => "",
FieldSearchMode::Regex => "re:",
FieldSearchMode::NoCombining => "nc:",
};
let text = if mode == FieldSearchMode::Normal
&& (text.starts_with("re:") || text.starts_with("nc:"))
{
text.replacen(':', "\\:", 1)
} else {
text.to_string()
};
maybe_quote(&format!("{}:{}{}", field.replace(':', "\\:"), re, &text))
maybe_quote(&format!(
"{}:{}{}",
field.replace(':', "\\:"),
prefix,
&text
))
}
fn write_template(template: &TemplateKind) -> String {

View file

@ -76,8 +76,15 @@ impl Collection {
note_id: card.note_id.into(),
deck: deck.human_name(),
added: card.id.as_secs().0,
first_review: revlog.first().map(|entry| entry.id.as_secs().0),
latest_review: revlog.last().map(|entry| entry.id.as_secs().0),
first_review: revlog
.iter()
.find(|entry| entry.has_rating())
.map(|entry| entry.id.as_secs().0),
// last_review_time is not used to ensure cram revlogs are included.
latest_review: revlog
.iter()
.rfind(|entry| entry.has_rating())
.map(|entry| entry.id.as_secs().0),
due_date: self.due_date(&card)?,
due_position: self.position(&card),
interval: card.interval,
@ -220,6 +227,7 @@ fn stats_revlog_entry(
ease: entry.ease_factor,
taken_secs: entry.taken_millis as f32 / 1000.,
memory_state: None,
last_interval: entry.last_interval_secs(),
}
}

View file

@ -255,9 +255,7 @@ fn check_for_unstaged_changes() {
}
fn generate_licences() -> Result<String> {
if which::which("cargo-license").is_err() {
Command::run("cargo install cargo-license@0.5.1")?;
}
Command::run("cargo install cargo-license@0.7.0")?;
let output = Command::run_with_output([
"cargo-license",
"--features",

View file

@ -4,6 +4,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
-->
<script lang="ts">
import Shortcut from "$lib/components/Shortcut.svelte";
import { saveCustomColours } from "@generated/backend";
export let keyCombination: string | null = null;
export let value: string;
@ -11,7 +12,15 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
let inputRef: HTMLInputElement;
</script>
<input bind:this={inputRef} tabindex="-1" type="color" bind:value on:input on:change />
<input
bind:this={inputRef}
tabindex="-1"
type="color"
bind:value
on:input
on:change
on:click={() => saveCustomColours({})}
/>
{#if keyCombination}
<Shortcut {keyCombination} on:action={() => inputRef.click()} />

View file

@ -19,6 +19,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import ColorPicker from "./ColorPicker.svelte";
import { context as editorToolbarContext } from "./EditorToolbar.svelte";
import WithColorHelper from "./WithColorHelper.svelte";
import { saveCustomColours } from "@generated/backend";
export let color: string;
@ -134,7 +135,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
color = setColor(event);
bridgeCommand(`lastHighlightColor:${color}`);
}}
on:change={() => setTextColor()}
on:change={() => {
setTextColor();
saveCustomColours({});
}}
/>
</IconButton>
</WithColorHelper>

View file

@ -22,6 +22,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import ColorPicker from "./ColorPicker.svelte";
import { context as editorToolbarContext } from "./EditorToolbar.svelte";
import WithColorHelper from "./WithColorHelper.svelte";
import { saveCustomColours } from "@generated/backend";
export let color: string;
@ -158,6 +159,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
setTimeout(() => {
setTextColor();
}, 200);
saveCustomColours({});
}}
/>
</IconButton>

View file

@ -23,6 +23,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
export let title: string;
export let url: string;
export let linkLabel: string | undefined = undefined;
export let startIndex = 0;
export let helpSections: HelpItem[];
export let fsrs = false;
@ -106,11 +107,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
<div class="chapter-redirect">
{@html renderMarkdown(
tr.helpForMoreInfo({
link: `<a href="${url}" title="${tr.helpOpenManualChapter(
{
name: title,
},
)}">${title}</a>`,
link: `<a href="${url}" title="${tr.helpOpenManualChapter({ name: linkLabel ?? title })}">${linkLabel ?? title}</a>`,
}),
)}
</div>

View file

@ -27,7 +27,8 @@ export const HelpPage = {
limitsFromTop: "https://docs.ankiweb.net/deck-options.html#limits-start-from-top",
dailyLimits: "https://docs.ankiweb.net/deck-options.html#daily-limits",
audio: "https://docs.ankiweb.net/deck-options.html#audio",
fsrs: "http://docs.ankiweb.net/deck-options.html#fsrs",
fsrs: "https://docs.ankiweb.net/deck-options.html#fsrs",
desiredRetention: "https://docs.ankiweb.net/deck-options.html#desired-retention",
},
Leeches: {
leeches: "https://docs.ankiweb.net/leeches.html#leeches",

View file

@ -10,6 +10,9 @@ export function allImagesLoaded(): Promise<void[]> {
}
function imageLoaded(img: HTMLImageElement): Promise<void> {
if (!img.getAttribute("decoding")) {
img.decoding = "async";
}
return img.complete
? Promise.resolve()
: new Promise((resolve) => {

View file

@ -53,6 +53,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
let desiredRetentionFocused = false;
let desiredRetentionEverFocused = false;
let optimized = false;
const initialParams = [...fsrsParams($config)];
$: if (desiredRetentionFocused) {
desiredRetentionEverFocused = true;
}
@ -338,6 +339,14 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
state.save(UpdateDeckConfigsMode.COMPUTE_ALL_PARAMS);
}
function showSimulatorModal(modal: Modal) {
if (fsrsParams($config).toString() === initialParams.toString()) {
modal?.show();
} else {
alert(tr.deckConfigFsrsSimulateSavePreset());
}
}
let simulatorModal: Modal;
let workloadModal: Modal;
</script>
@ -368,7 +377,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
class="btn btn-primary"
on:click={() => {
simulateFsrsRequest.reviewLimit = 9999;
workloadModal?.show();
showSimulatorModal(workloadModal);
}}
>
{tr.deckConfigFsrsDesiredRetentionHelpMeDecideExperimental()}
@ -455,7 +464,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
<hr />
<div class="m-1">
<button class="btn btn-primary" on:click={() => simulatorModal?.show()}>
<button class="btn btn-primary" on:click={() => showSimulatorModal(simulatorModal)}>
{tr.deckConfigFsrsSimulatorExperimental()}
</button>
</div>

View file

@ -72,7 +72,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
>
<HelpModal
title={tr.statisticsTrueRetentionTitle()}
url={HelpPage.DeckOptions.fsrs}
url={HelpPage.DeckOptions.desiredRetention}
linkLabel={tr.deckConfigDesiredRetention()}
{helpSections}
on:mount={(e) => {
modal = e.detail.modal;

View file

@ -32,6 +32,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
saveNeededStore,
opacityStateStore,
} from "./store";
import { get } from "svelte/store";
import { drawEllipse, drawPolygon, drawRectangle, drawText } from "./tools/index";
import { makeMaskTransparent, SHAPE_MASK_COLOR } from "./tools/lib";
import { enableSelectable, stopDraw } from "./tools/lib";
@ -55,6 +56,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
onWheelDragX,
} from "./tools/tool-zoom";
import { fillMask } from "./tools/tool-fill";
import { getCustomColours, saveCustomColours } from "@generated/backend";
export let canvas;
export let iconSize;
@ -76,6 +78,16 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
let colourRef: HTMLInputElement | undefined;
const colour = writable(SHAPE_MASK_COLOR);
const customColorPickerPalette = writable<string[]>([]);
async function loadCustomColours() {
customColorPickerPalette.set(
(await getCustomColours({})).colours.filter(
(hex) => !hex.startsWith("#ffffff"),
),
);
}
function onClick(event: MouseEvent) {
const upperCanvas = document.querySelector(".upper-canvas");
if (event.target == upperCanvas) {
@ -222,7 +234,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
}
onMount(() => {
opacityStateStore.set(maskOpacity);
maskOpacity = get(opacityStateStore);
removeHandlers = singleCallback(
on(document, "click", onClick),
on(window, "mousemove", onMousemove),
@ -233,6 +245,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
on(document, "touchstart", onTouchstart),
on(document, "mousemove", onMousemoveDocument),
);
loadCustomColours();
});
onDestroy(() => {
@ -241,7 +254,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
</script>
<datalist id="colour-palette">
<option value={SHAPE_MASK_COLOR}></option>
<option>{SHAPE_MASK_COLOR}</option>
{#each $customColorPickerPalette as colour}
<option>{colour}</option>
{/each}
</datalist>
<input
@ -251,6 +267,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
list="colour-palette"
value={SHAPE_MASK_COLOR}
on:input={(e) => ($colour = e.currentTarget!.value)}
on:change={() => saveCustomColours({})}
/>
<div class="tool-bar-container" style:--fill-tool-colour={$colour}>

View file

@ -8,10 +8,22 @@ import { fabric } from "fabric";
import { get } from "svelte/store";
import { optimumCssSizeForCanvas } from "./canvas-scale";
import { hideAllGuessOne, notesDataStore, saveNeededStore, tagsWritable, textEditingState } from "./store";
import {
hideAllGuessOne,
notesDataStore,
opacityStateStore,
saveNeededStore,
tagsWritable,
textEditingState,
} from "./store";
import Toast from "./Toast.svelte";
import { addShapesToCanvasFromCloze } from "./tools/add-from-cloze";
import { enableSelectable, makeShapesRemainInCanvas, moveShapeToCanvasBoundaries } from "./tools/lib";
import {
enableSelectable,
makeMaskTransparent,
makeShapesRemainInCanvas,
moveShapeToCanvasBoundaries,
} from "./tools/lib";
import { modifiedPolygon } from "./tools/tool-polygon";
import { undoStack } from "./tools/tool-undo-redo";
import { enablePinchZoom, onResize, setCanvasSize } from "./tools/tool-zoom";
@ -83,6 +95,7 @@ export const setupMaskEditorForEdit = async (
window.requestAnimationFrame(() => {
onImageLoaded({ noteId: BigInt(noteId) });
});
if (get(opacityStateStore)) { makeMaskTransparent(canvas, true); }
};
return canvas;

View file

@ -6939,8 +6939,8 @@ __metadata:
linkType: hard
"vite@npm:6":
version: 6.3.5
resolution: "vite@npm:6.3.5"
version: 6.3.6
resolution: "vite@npm:6.3.6"
dependencies:
esbuild: "npm:^0.25.0"
fdir: "npm:^6.4.4"
@ -6989,7 +6989,7 @@ __metadata:
optional: true
bin:
vite: bin/vite.js
checksum: 10c0/df70201659085133abffc6b88dcdb8a57ef35f742a01311fc56a4cfcda6a404202860729cc65a2c401a724f6e25f9ab40ce4339ed4946f550541531ced6fe41c
checksum: 10c0/add701f1e72596c002275782e38d0389ab400c1be330c93a3009804d62db68097a936ca1c53c3301df3aaacfe5e328eab547060f31ef9c49a277ae50df6ad4fb
languageName: node
linkType: hard