diff --git a/.version b/.version index a38238a29..3135abe4c 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -25.06b5 +25.06b6 diff --git a/CLAUDE.md b/CLAUDE.md index 6ec6db642..3be5cc70b 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -21,7 +21,7 @@ Please do this as a final step before marking a task as completed. During development, you can build/check subsections of our code: - Rust: 'cargo check' -- Python: './tools/dmypy' +- Python: './tools/dmypy', and if wheel-related, './ninja wheels' - TypeScript/Svelte: './ninja check:svelte' Be mindful that some changes (such as modifications to .proto files) may diff --git a/CONTRIBUTORS b/CONTRIBUTORS index d334540fb..fc3bc44e6 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -63,6 +63,7 @@ Jakub Kaczmarzyk Akshara Balachandra lukkea David Allison +David Allison <62114487+david-allison@users.noreply.github.com> Tsung-Han Yu Piotr Kubowicz RumovZ diff --git a/Cargo.lock b/Cargo.lock index 03f9e63c8..04e7c6c76 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3543,6 +3543,7 @@ dependencies = [ "anki_io", "anki_process", "anyhow", + "camino", "dirs 6.0.0", "embed-resource", "libc", diff --git a/Cargo.toml b/Cargo.toml index d2ce2ce2a..61cca8649 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -138,7 +138,7 @@ unic-ucd-category = "0.9.0" unicode-normalization = "0.1.24" walkdir = "2.5.0" which = "8.0.0" -winapi = { version = "0.3", features = ["wincon", "errhandlingapi", "consoleapi"] } +winapi = { version = "0.3", features = ["wincon"] } windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams"] } wiremock = "0.6.3" xz2 = "0.1.7" diff --git a/LICENSE b/LICENSE index 033dc2a0a..456a7cfd6 100644 --- a/LICENSE +++ b/LICENSE @@ -6,8 +6,6 @@ The following included source code items use a license other than AGPL3: In the pylib folder: - * The SuperMemo importer: GPL3 and 0BSD. - * The Pauker importer: BSD-3. * statsbg.py: CC BY 4.0. In the qt folder: diff --git a/build/configure/src/aqt.rs b/build/configure/src/aqt.rs index 35cd626a5..cad07ae4d 100644 --- a/build/configure/src/aqt.rs +++ b/build/configure/src/aqt.rs @@ -337,7 +337,12 @@ fn build_wheel(build: &mut Build) -> Result<()> { name: "aqt", version: anki_version(), platform: None, - deps: inputs![":qt:aqt", glob!("qt/aqt/**"), "qt/pyproject.toml"], + deps: inputs![ + ":qt:aqt", + glob!("qt/aqt/**"), + "qt/pyproject.toml", + "qt/hatch_build.py" + ], }, ) } diff --git a/build/configure/src/pylib.rs b/build/configure/src/pylib.rs index bcef1ecc4..21820ae8b 100644 --- a/build/configure/src/pylib.rs +++ b/build/configure/src/pylib.rs @@ -68,7 +68,8 @@ pub fn build_pylib(build: &mut Build) -> Result<()> { deps: inputs![ ":pylib:anki", glob!("pylib/anki/**"), - "pylib/pyproject.toml" + "pylib/pyproject.toml", + "pylib/hatch_build.py" ], }, )?; diff --git a/build/ninja_gen/src/python.rs b/build/ninja_gen/src/python.rs index 7ac65e85f..a799bd517 100644 --- a/build/ninja_gen/src/python.rs +++ b/build/ninja_gen/src/python.rs @@ -159,6 +159,10 @@ impl BuildAction for PythonEnvironment { } build.add_output_stamp(format!("{}/.stamp", self.venv_folder)); } + + fn check_output_timestamps(&self) -> bool { + true + } } pub struct PythonTypecheck { diff --git a/build/runner/src/build.rs b/build/runner/src/build.rs index 5e3042aba..535254736 100644 --- a/build/runner/src/build.rs +++ b/build/runner/src/build.rs @@ -67,7 +67,10 @@ pub fn run_build(args: BuildArgs) { "MYPY_CACHE_DIR", build_root.join("tests").join("mypy").into_string(), ) - .env("PYTHONPYCACHEPREFIX", build_root.join("pycache")) + .env( + "PYTHONPYCACHEPREFIX", + std::path::absolute(build_root.join("pycache")).unwrap(), + ) // commands will not show colors by default, as we do not provide a tty .env("FORCE_COLOR", "1") .env("MYPY_FORCE_COLOR", "1") diff --git a/build/runner/src/pyenv.rs b/build/runner/src/pyenv.rs index 0bd5ec662..d64c8fb3f 100644 --- a/build/runner/src/pyenv.rs +++ b/build/runner/src/pyenv.rs @@ -35,7 +35,7 @@ pub fn setup_pyenv(args: PyenvArgs) { run_command( Command::new(args.uv_bin) .env("UV_PROJECT_ENVIRONMENT", args.pyenv_folder.clone()) - .args(["sync", "--frozen"]) + .args(["sync", "--locked"]) .args(args.extra_args), ); diff --git a/ftl/core-repo b/ftl/core-repo index 2f8c9d956..cc56464ab 160000 --- a/ftl/core-repo +++ b/ftl/core-repo @@ -1 +1 @@ -Subproject commit 2f8c9d9566aef8b86e3326fe9ff007d594b7ec83 +Subproject commit cc56464ab6354d4f1ad87ab3cc5c071c076b662d diff --git a/ftl/core/card-templates.ftl b/ftl/core/card-templates.ftl index 7ecda1968..edb2433f9 100644 --- a/ftl/core/card-templates.ftl +++ b/ftl/core/card-templates.ftl @@ -60,7 +60,6 @@ card-templates-this-will-create-card-proceed = } card-templates-type-boxes-warning = Only one typing box per card template is supported. card-templates-restore-to-default = Restore to Default -card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default - values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed? +card-templates-restore-to-default-confirmation = This will reset all fields and templates in this note type to their default values, removing any extra fields/templates and their content, and any custom styling. Do you wish to proceed? card-templates-restored-to-default = Note type has been restored to its original state. diff --git a/ftl/core/importing.ftl b/ftl/core/importing.ftl index 70bc5f4d1..3b9f7c401 100644 --- a/ftl/core/importing.ftl +++ b/ftl/core/importing.ftl @@ -65,7 +65,6 @@ importing-with-deck-configs-help = If enabled, any deck options that the deck sharer included will also be imported. Otherwise, all decks will be assigned the default preset. importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip) -importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz) # the '|' character importing-pipe = Pipe # Warning displayed when the csv import preview table is clipped (some columns were hidden) @@ -78,7 +77,6 @@ importing-rows-had-num1d-fields-expected-num2d = '{ $row }' had { $found } field importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual. importing-semicolon = Semicolon importing-skipped = Skipped -importing-supermemo-xml-export-xml = Supermemo XML export (*.xml) importing-tab = Tab importing-tag-modified-notes = Tag modified notes: importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*) @@ -252,3 +250,5 @@ importing-importing-collection = Importing collection... importing-unable-to-import-filename = Unable to import { $filename }: file type not supported importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val } importing-added = Added +importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz) +importing-supermemo-xml-export-xml = Supermemo XML export (*.xml) diff --git a/ftl/qt-repo b/ftl/qt-repo index 69f2dbaeb..5f9a9ceb6 160000 --- a/ftl/qt-repo +++ b/ftl/qt-repo @@ -1 +1 @@ -Subproject commit 69f2dbaeba6f72ac62da0b35881f320603da5124 +Subproject commit 5f9a9ceb6e8a9aade26c1ad9f1c936f5cc4d9e2a diff --git a/ftl/qt/qt-accel.ftl b/ftl/qt/qt-accel.ftl index 327cd6c46..3ab54eb24 100644 --- a/ftl/qt/qt-accel.ftl +++ b/ftl/qt/qt-accel.ftl @@ -1,4 +1,5 @@ qt-accel-about = &About +qt-accel-about-mac = About Anki... qt-accel-cards = &Cards qt-accel-check-database = &Check Database qt-accel-check-media = Check &Media @@ -45,3 +46,4 @@ qt-accel-zoom-editor-in = Zoom Editor &In qt-accel-zoom-editor-out = Zoom Editor &Out qt-accel-create-backup = Create &Backup qt-accel-load-backup = &Revert to Backup +qt-accel-upgrade-downgrade = Upgrade/Downgrade diff --git a/ftl/qt/qt-misc.ftl b/ftl/qt/qt-misc.ftl index 60c22ef8b..d7bbef990 100644 --- a/ftl/qt/qt-misc.ftl +++ b/ftl/qt/qt-misc.ftl @@ -73,7 +73,7 @@ qt-misc-second = qt-misc-layout-auto-enabled = Responsive layout enabled qt-misc-layout-vertical-enabled = Vertical layout enabled qt-misc-layout-horizontal-enabled = Horizontal layout enabled -qt-misc-please-restart-to-update-anki = Please restart Anki to update to the latest version. +qt-misc-open-anki-launcher = Change to a different Anki version? ## deprecated- these strings will be removed in the future, and do not need ## to be translated diff --git a/proto/anki/scheduler.proto b/proto/anki/scheduler.proto index 1b7d44a83..5e568aa92 100644 --- a/proto/anki/scheduler.proto +++ b/proto/anki/scheduler.proto @@ -402,6 +402,31 @@ message SimulateFsrsReviewRequest { repeated float easy_days_percentages = 10; deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11; optional uint32 suspend_after_lapse_count = 12; + // For CMRR + message CMRRTarget { + message Memorized { + float loss_aversion = 1; + }; + + message Stability {}; + + message FutureMemorized { + int32 days = 1; + }; + + message AverageFutureMemorized { + int32 days = 1; + }; + + oneof kind { + Memorized memorized = 1; + Stability stability = 2; + FutureMemorized future_memorized = 3; + AverageFutureMemorized average_future_memorized = 4; + }; + }; + + optional CMRRTarget target = 13; } message SimulateFsrsReviewResponse { diff --git a/pylib/anki/importing/__init__.py b/pylib/anki/importing/__init__.py index cfc2cac3f..d4fccc643 100644 --- a/pylib/anki/importing/__init__.py +++ b/pylib/anki/importing/__init__.py @@ -11,8 +11,6 @@ from anki.importing.apkg import AnkiPackageImporter from anki.importing.base import Importer from anki.importing.csvfile import TextImporter from anki.importing.mnemo import MnemosyneImporter -from anki.importing.pauker import PaukerImporter -from anki.importing.supermemo_xml import SupermemoXmlImporter # type: ignore from anki.lang import TR @@ -24,8 +22,6 @@ def importers(col: Collection) -> Sequence[tuple[str, type[Importer]]]: AnkiPackageImporter, ), (col.tr.importing_mnemosyne_20_deck_db(), MnemosyneImporter), - (col.tr.importing_supermemo_xml_export_xml(), SupermemoXmlImporter), - (col.tr.importing_pauker_18_lesson_paugz(), PaukerImporter), ] anki.hooks.importing_importers(importers) return importers diff --git a/pylib/anki/importing/pauker.py b/pylib/anki/importing/pauker.py deleted file mode 100644 index ea5c45082..000000000 --- a/pylib/anki/importing/pauker.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright: Andreas Klauer -# License: BSD-3 - -# pylint: disable=invalid-name - -import gzip -import html -import math -import random -import time -import xml.etree.ElementTree as ET - -from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter -from anki.stdmodels import _legacy_add_forward_reverse - -ONE_DAY = 60 * 60 * 24 - - -class PaukerImporter(NoteImporter): - """Import Pauker 1.8 Lesson (*.pau.gz)""" - - needMapper = False - allowHTML = True - - def run(self): - model = _legacy_add_forward_reverse(self.col) - model["name"] = "Pauker" - self.col.models.save(model, updateReqs=False) - self.col.models.set_current(model) - self.model = model - self.initMapping() - NoteImporter.run(self) - - def fields(self): - """Pauker is Front/Back""" - return 2 - - def foreignNotes(self): - """Build and return a list of notes.""" - notes = [] - - try: - f = gzip.open(self.file) - tree = ET.parse(f) # type: ignore - lesson = tree.getroot() - assert lesson.tag == "Lesson" - finally: - f.close() - - index = -4 - - for batch in lesson.findall("./Batch"): - index += 1 - - for card in batch.findall("./Card"): - # Create a note for this card. - front = card.findtext("./FrontSide/Text") - back = card.findtext("./ReverseSide/Text") - note = ForeignNote() - assert front and back - note.fields = [ - html.escape(x.strip()) - .replace("\n", "
") - .replace(" ", "  ") - for x in [front, back] - ] - notes.append(note) - - # Determine due date for cards. - frontdue = card.find("./FrontSide[@LearnedTimestamp]") - backdue = card.find("./ReverseSide[@Batch][@LearnedTimestamp]") - - if frontdue is not None: - note.cards[0] = self._learnedCard( - index, int(frontdue.attrib["LearnedTimestamp"]) - ) - - if backdue is not None: - note.cards[1] = self._learnedCard( - int(backdue.attrib["Batch"]), - int(backdue.attrib["LearnedTimestamp"]), - ) - - return notes - - def _learnedCard(self, batch, timestamp): - ivl = math.exp(batch) - now = time.time() - due = ivl - (now - timestamp / 1000.0) / ONE_DAY - fc = ForeignCard() - fc.due = self.col.sched.today + int(due + 0.5) - fc.ivl = random.randint(int(ivl * 0.90), int(ivl + 0.5)) - fc.factor = random.randint(1500, 2500) - return fc diff --git a/pylib/anki/importing/supermemo_xml.py b/pylib/anki/importing/supermemo_xml.py deleted file mode 100644 index 202592c2e..000000000 --- a/pylib/anki/importing/supermemo_xml.py +++ /dev/null @@ -1,484 +0,0 @@ -# Copyright: petr.michalec@gmail.com -# License: GNU GPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -# pytype: disable=attribute-error -# type: ignore -# pylint: disable=C -from __future__ import annotations - -import re -import sys -import time -import unicodedata -from string import capwords -from xml.dom import minidom -from xml.dom.minidom import Element, Text - -from anki.collection import Collection -from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter -from anki.stdmodels import _legacy_add_basic_model - - -class SmartDict(dict): - """ - See http://www.peterbe.com/plog/SmartDict - Copyright 2005, Peter Bengtsson, peter@fry-it.com - 0BSD - - A smart dict can be instantiated either from a pythonic dict - or an instance object (eg. SQL recordsets) but it ensures that you can - do all the convenient lookups such as x.first_name, x['first_name'] or - x.get('first_name'). - """ - - def __init__(self, *a, **kw) -> None: - if a: - if isinstance(type(a[0]), dict): - kw.update(a[0]) - elif isinstance(type(a[0]), object): - kw.update(a[0].__dict__) - elif hasattr(a[0], "__class__") and a[0].__class__.__name__ == "SmartDict": - kw.update(a[0].__dict__) - - dict.__init__(self, **kw) - self.__dict__ = self - - -class SuperMemoElement(SmartDict): - "SmartDict wrapper to store SM Element data" - - def __init__(self, *a, **kw) -> None: - SmartDict.__init__(self, *a, **kw) - # default content - self.__dict__["lTitle"] = None - self.__dict__["Title"] = None - self.__dict__["Question"] = None - self.__dict__["Answer"] = None - self.__dict__["Count"] = None - self.__dict__["Type"] = None - self.__dict__["ID"] = None - self.__dict__["Interval"] = None - self.__dict__["Lapses"] = None - self.__dict__["Repetitions"] = None - self.__dict__["LastRepetiton"] = None - self.__dict__["AFactor"] = None - self.__dict__["UFactor"] = None - - -# This is an AnkiImporter -class SupermemoXmlImporter(NoteImporter): - needMapper = False - allowHTML = True - - """ - Supermemo XML export's to Anki parser. - Goes through a SM collection and fetch all elements. - - My SM collection was a big mess where topics and items were mixed. - I was unable to parse my content in a regular way like for loop on - minidom.getElementsByTagName() etc. My collection had also an - limitation, topics were splited into branches with max 100 items - on each. Learning themes were in deep structure. I wanted to have - full title on each element to be stored in tags. - - Code should be upgrade to support importing of SM2006 exports. - """ - - def __init__(self, col: Collection, file: str) -> None: - """Initialize internal variables. - Pameters to be exposed to GUI are stored in self.META""" - NoteImporter.__init__(self, col, file) - m = _legacy_add_basic_model(self.col) - m["name"] = "Supermemo" - self.col.models.save(m) - self.initMapping() - - self.lines = None - self.numFields = int(2) - - # SmXmlParse VARIABLES - self.xmldoc = None - self.pieces = [] - self.cntBuf = [] # to store last parsed data - self.cntElm = [] # to store SM Elements data - self.cntCol = [] # to store SM Colections data - - # store some meta info related to parse algorithm - # SmartDict works like dict / class wrapper - self.cntMeta = SmartDict() - self.cntMeta.popTitles = False - self.cntMeta.title = [] - - # META stores controls of import script, should be - # exposed to import dialog. These are default values. - self.META = SmartDict() - self.META.resetLearningData = False # implemented - self.META.onlyMemorizedItems = False # implemented - self.META.loggerLevel = 2 # implemented 0no,1info,2error,3debug - self.META.tagAllTopics = True - self.META.pathsToBeTagged = [ - "English for beginners", - "Advanced English 97", - "Phrasal Verbs", - ] # path patterns to be tagged - in gui entered like 'Advanced English 97|My Vocablary' - self.META.tagMemorizedItems = True # implemented - self.META.logToStdOutput = False # implemented - - self.notes = [] - - ## TOOLS - - def _fudgeText(self, text: str) -> str: - "Replace sm syntax to Anki syntax" - text = text.replace("\n\r", "
") - text = text.replace("\n", "
") - return text - - def _unicode2ascii(self, str: str) -> str: - "Remove diacritic punctuation from strings (titles)" - return "".join( - [ - c - for c in unicodedata.normalize("NFKD", str) - if not unicodedata.combining(c) - ] - ) - - def _decode_htmlescapes(self, html: str) -> str: - """Unescape HTML code.""" - # In case of bad formatted html you can import MinimalSoup etc.. see BeautifulSoup source code - from bs4 import BeautifulSoup - - # my sm2004 also ecaped & char in escaped sequences. - html = re.sub("&", "&", html) - - # https://anki.tenderapp.com/discussions/ankidesktop/39543-anki-is-replacing-the-character-by-when-i-exit-the-html-edit-mode-ctrlshiftx - if html.find(">") < 0: - return html - - # unescaped solitary chars < or > that were ok for minidom confuse btfl soup - # html = re.sub(u'>',u'>',html) - # html = re.sub(u'<',u'<',html) - - return str(BeautifulSoup(html, "html.parser")) - - def _afactor2efactor(self, af: float) -> float: - # Adapted from - - # Ranges for A-factors and E-factors - af_min = 1.2 - af_max = 6.9 - ef_min = 1.3 - ef_max = 3.3 - - # Sanity checks for the A-factor - if af < af_min: - af = af_min - elif af > af_max: - af = af_max - - # Scale af to the range 0..1 - af_scaled = (af - af_min) / (af_max - af_min) - # Rescale to the interval ef_min..ef_max - ef = ef_min + af_scaled * (ef_max - ef_min) - - return ef - - ## DEFAULT IMPORTER METHODS - - def foreignNotes(self) -> list[ForeignNote]: - # Load file and parse it by minidom - self.loadSource(self.file) - - # Migrating content / time consuming part - # addItemToCards is called for each sm element - self.logger("Parsing started.") - self.parse() - self.logger("Parsing done.") - - # Return imported cards - self.total = len(self.notes) - self.log.append("%d cards imported." % self.total) - return self.notes - - def fields(self) -> int: - return 2 - - ## PARSER METHODS - - def addItemToCards(self, item: SuperMemoElement) -> None: - "This method actually do conversion" - - # new anki card - note = ForeignNote() - - # clean Q and A - note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Question))) - note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Answer))) - note.tags = [] - - # pre-process scheduling data - # convert learning data - if ( - not self.META.resetLearningData - and int(item.Interval) >= 1 - and getattr(item, "LastRepetition", None) - ): - # migration of LearningData algorithm - tLastrep = time.mktime(time.strptime(item.LastRepetition, "%d.%m.%Y")) - tToday = time.time() - card = ForeignCard() - card.ivl = int(item.Interval) - card.lapses = int(item.Lapses) - card.reps = int(item.Repetitions) + int(item.Lapses) - nextDue = tLastrep + (float(item.Interval) * 86400.0) - remDays = int((nextDue - time.time()) / 86400) - card.due = self.col.sched.today + remDays - card.factor = int( - self._afactor2efactor(float(item.AFactor.replace(",", "."))) * 1000 - ) - note.cards[0] = card - - # categories & tags - # it's worth to have every theme (tree structure of sm collection) stored in tags, but sometimes not - # you can deceide if you are going to tag all toppics or just that containing some pattern - tTaggTitle = False - for pattern in self.META.pathsToBeTagged: - if ( - item.lTitle is not None - and pattern.lower() in " ".join(item.lTitle).lower() - ): - tTaggTitle = True - break - if tTaggTitle or self.META.tagAllTopics: - # normalize - remove diacritic punctuation from unicode chars to ascii - item.lTitle = [self._unicode2ascii(topic) for topic in item.lTitle] - - # Transform xyz / aaa / bbb / ccc on Title path to Tag xyzAaaBbbCcc - # clean things like [999] or [111-2222] from title path, example: xyz / [1000-1200] zyx / xyz - # clean whitespaces - # set Capital letters for first char of the word - tmp = list( - {re.sub(r"(\[[0-9]+\])", " ", i).replace("_", " ") for i in item.lTitle} - ) - tmp = list({re.sub(r"(\W)", " ", i) for i in tmp}) - tmp = list({re.sub("^[0-9 ]+$", "", i) for i in tmp}) - tmp = list({capwords(i).replace(" ", "") for i in tmp}) - tags = [j[0].lower() + j[1:] for j in tmp if j.strip() != ""] - - note.tags += tags - - if self.META.tagMemorizedItems and int(item.Interval) > 0: - note.tags.append("Memorized") - - self.logger("Element tags\t- " + repr(note.tags), level=3) - - self.notes.append(note) - - def logger(self, text: str, level: int = 1) -> None: - "Wrapper for Anki logger" - - dLevels = {0: "", 1: "Info", 2: "Verbose", 3: "Debug"} - if level <= self.META.loggerLevel: - # self.deck.updateProgress(_(text)) - - if self.META.logToStdOutput: - print( - self.__class__.__name__ - + " - " - + dLevels[level].ljust(9) - + " -\t" - + text - ) - - # OPEN AND LOAD - def openAnything(self, source): - """Open any source / actually only opening of files is used - @return an open handle which must be closed after use, i.e., handle.close()""" - - if source == "-": - return sys.stdin - - # try to open with urllib (if source is http, ftp, or file URL) - import urllib.error - import urllib.parse - import urllib.request - - try: - return urllib.request.urlopen(source) - except OSError: - pass - - # try to open with native open function (if source is pathname) - try: - return open(source, encoding="utf8") - except OSError: - pass - - # treat source as string - import io - - return io.StringIO(str(source)) - - def loadSource(self, source: str) -> None: - """Load source file and parse with xml.dom.minidom""" - self.source = source - self.logger("Load started...") - sock = open(self.source, encoding="utf8") - self.xmldoc = minidom.parse(sock).documentElement - sock.close() - self.logger("Load done.") - - # PARSE - def parse(self, node: Text | Element | None = None) -> None: - "Parse method - parses document elements" - - if node is None and self.xmldoc is not None: - node = self.xmldoc - - _method = "parse_%s" % node.__class__.__name__ - if hasattr(self, _method): - parseMethod = getattr(self, _method) - parseMethod(node) - else: - self.logger("No handler for method %s" % _method, level=3) - - def parse_Document(self, node): - "Parse XML document" - - self.parse(node.documentElement) - - def parse_Element(self, node: Element) -> None: - "Parse XML element" - - _method = "do_%s" % node.tagName - if hasattr(self, _method): - handlerMethod = getattr(self, _method) - handlerMethod(node) - else: - self.logger("No handler for method %s" % _method, level=3) - # print traceback.print_exc() - - def parse_Text(self, node: Text) -> None: - "Parse text inside elements. Text is stored into local buffer." - - text = node.data - self.cntBuf.append(text) - - # def parse_Comment(self, node): - # """ - # Source can contain XML comments, but we ignore them - # """ - # pass - - # DO - def do_SuperMemoCollection(self, node: Element) -> None: - "Process SM Collection" - - for child in node.childNodes: - self.parse(child) - - def do_SuperMemoElement(self, node: Element) -> None: - "Process SM Element (Type - Title,Topics)" - - self.logger("=" * 45, level=3) - - self.cntElm.append(SuperMemoElement()) - self.cntElm[-1]["lTitle"] = self.cntMeta["title"] - - # parse all child elements - for child in node.childNodes: - self.parse(child) - - # strip all saved strings, just for sure - for key in list(self.cntElm[-1].keys()): - if hasattr(self.cntElm[-1][key], "strip"): - self.cntElm[-1][key] = self.cntElm[-1][key].strip() - - # pop current element - smel = self.cntElm.pop() - - # Process cntElm if is valid Item (and not an Topic etc..) - # if smel.Lapses != None and smel.Interval != None and smel.Question != None and smel.Answer != None: - if smel.Title is None and smel.Question is not None and smel.Answer is not None: - if smel.Answer.strip() != "" and smel.Question.strip() != "": - # migrate only memorized otherway skip/continue - if self.META.onlyMemorizedItems and not (int(smel.Interval) > 0): - self.logger("Element skipped \t- not memorized ...", level=3) - else: - # import sm element data to Anki - self.addItemToCards(smel) - self.logger("Import element \t- " + smel["Question"], level=3) - - # print element - self.logger("-" * 45, level=3) - for key in list(smel.keys()): - self.logger( - "\t{} {}".format((key + ":").ljust(15), smel[key]), level=3 - ) - else: - self.logger("Element skipped \t- no valid Q and A ...", level=3) - - else: - # now we know that item was topic - # parsing of whole node is now finished - - # test if it's really topic - if smel.Title is not None: - # remove topic from title list - t = self.cntMeta["title"].pop() - self.logger("End of topic \t- %s" % (t), level=2) - - def do_Content(self, node: Element) -> None: - "Process SM element Content" - - for child in node.childNodes: - if hasattr(child, "tagName") and child.firstChild is not None: - self.cntElm[-1][child.tagName] = child.firstChild.data - - def do_LearningData(self, node: Element) -> None: - "Process SM element LearningData" - - for child in node.childNodes: - if hasattr(child, "tagName") and child.firstChild is not None: - self.cntElm[-1][child.tagName] = child.firstChild.data - - # It's being processed in do_Content now - # def do_Question(self, node): - # for child in node.childNodes: self.parse(child) - # self.cntElm[-1][node.tagName]=self.cntBuf.pop() - - # It's being processed in do_Content now - # def do_Answer(self, node): - # for child in node.childNodes: self.parse(child) - # self.cntElm[-1][node.tagName]=self.cntBuf.pop() - - def do_Title(self, node: Element) -> None: - "Process SM element Title" - - t = self._decode_htmlescapes(node.firstChild.data) - self.cntElm[-1][node.tagName] = t - self.cntMeta["title"].append(t) - self.cntElm[-1]["lTitle"] = self.cntMeta["title"] - self.logger("Start of topic \t- " + " / ".join(self.cntMeta["title"]), level=2) - - def do_Type(self, node: Element) -> None: - "Process SM element Type" - - if len(self.cntBuf) >= 1: - self.cntElm[-1][node.tagName] = self.cntBuf.pop() - - -# if __name__ == '__main__': - -# for testing you can start it standalone - -# file = u'/home/epcim/hg2g/dev/python/sm2anki/ADVENG2EXP.xxe.esc.zaloha_FINAL.xml' -# file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_noOEM.xml' -# file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_oem_1250.xml' -# file = str(sys.argv[1]) -# impo = SupermemoXmlImporter(Deck(),file) -# impo.foreignCards() - -# sys.exit(1) - -# vim: ts=4 sts=2 ft=python diff --git a/pylib/hatch_build.py b/pylib/hatch_build.py index c3539da56..9e8ee9799 100644 --- a/pylib/hatch_build.py +++ b/pylib/hatch_build.py @@ -35,8 +35,16 @@ class CustomBuildHook(BuildHookInterface): assert generated_root.exists(), "you should build with --wheel" for path in generated_root.rglob("*"): - if path.is_file(): + if path.is_file() and not self._should_exclude(path): relative_path = path.relative_to(generated_root) # Place files under anki/ in the distribution dist_path = "anki" / relative_path force_include[str(path)] = str(dist_path) + + def _should_exclude(self, path: Path) -> bool: + """Check if a file should be excluded from the wheel.""" + # Exclude __pycache__ + path_str = str(path) + if "/__pycache__/" in path_str: + return True + return False diff --git a/pylib/pyproject.toml b/pylib/pyproject.toml index 555f30c86..23e10077f 100644 --- a/pylib/pyproject.toml +++ b/pylib/pyproject.toml @@ -4,19 +4,15 @@ dynamic = ["version"] requires-python = ">=3.9" license = "AGPL-3.0-or-later" dependencies = [ - "beautifulsoup4", "decorator", "markdown", "orjson", "protobuf>=4.21", "requests[socks]", + # remove after we update to min python 3.11+ "typing_extensions", - "types-protobuf", - "types-requests", - "types-orjson", # platform-specific dependencies "distro; sys_platform != 'darwin' and sys_platform != 'win32'", - "psutil; sys_platform == 'win32'", ] [build-system] diff --git a/pylib/tests/test_importing.py b/pylib/tests/test_importing.py index 191de51f4..b7b63de26 100644 --- a/pylib/tests/test_importing.py +++ b/pylib/tests/test_importing.py @@ -13,7 +13,6 @@ from anki.importing import ( Anki2Importer, AnkiPackageImporter, MnemosyneImporter, - SupermemoXmlImporter, TextImporter, ) from tests.shared import getEmptyCol, getUpgradeDeckPath @@ -306,22 +305,6 @@ def test_csv_tag_only_if_modified(): col.close() -@pytest.mark.filterwarnings("ignore:Using or importing the ABCs") -def test_supermemo_xml_01_unicode(): - col = getEmptyCol() - file = str(os.path.join(testDir, "support", "supermemo1.xml")) - i = SupermemoXmlImporter(col, file) - # i.META.logToStdOutput = True - i.run() - assert i.total == 1 - cid = col.db.scalar("select id from cards") - c = col.get_card(cid) - # Applies A Factor-to-E Factor conversion - assert c.factor == 2879 - assert c.reps == 7 - col.close() - - def test_mnemo(): col = getEmptyCol() file = str(os.path.join(testDir, "support", "mnemo.db")) diff --git a/pyproject.toml b/pyproject.toml index f5443e229..2e47ee2f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,16 @@ dev = [ "colorama", # for isort --color "wheel", "hatchling", # for type checking hatch_build.py files + "mock", + "types-protobuf", + "types-requests", + "types-orjson", + "types-decorator", + "types-flask", + "types-flask-cors", + "types-markdown", + "types-waitress", + "types-pywin32", ] [project.optional-dependencies] diff --git a/qt/aqt/forms/main.ui b/qt/aqt/forms/main.ui index 596ea985c..bffc67ad0 100644 --- a/qt/aqt/forms/main.ui +++ b/qt/aqt/forms/main.ui @@ -46,7 +46,7 @@ 0 0 667 - 24 + 43 @@ -93,6 +93,7 @@ + @@ -130,7 +131,7 @@ Ctrl+P - QAction::PreferencesRole + QAction::MenuRole::PreferencesRole @@ -138,7 +139,7 @@ qt_accel_about - QAction::AboutRole + QAction::MenuRole::ApplicationSpecificRole @@ -283,6 +284,11 @@ qt_accel_load_backup + + + qt_accel_upgrade_downgrade + + diff --git a/qt/aqt/main.py b/qt/aqt/main.py index bc28e287b..b261cd34e 100644 --- a/qt/aqt/main.py +++ b/qt/aqt/main.py @@ -1308,6 +1308,14 @@ title="{}" {}>{}""".format( def onPrefs(self) -> None: aqt.dialogs.open("Preferences", self) + def on_upgrade_downgrade(self) -> None: + if not askUser(tr.qt_misc_open_anki_launcher()): + return + + from aqt.update import update_and_restart + + update_and_restart() + def onNoteTypes(self) -> None: import aqt.models @@ -1389,6 +1397,8 @@ title="{}" {}>{}""".format( ########################################################################## def setupMenus(self) -> None: + from aqt.update import have_launcher + m = self.form # File @@ -1405,6 +1415,7 @@ title="{}" {}>{}""".format( qconnect(m.actionDocumentation.triggered, self.onDocumentation) qconnect(m.actionDonate.triggered, self.onDonate) qconnect(m.actionAbout.triggered, self.onAbout) + m.actionAbout.setText(tr.qt_accel_about_mac()) # Edit qconnect(m.actionUndo.triggered, self.undo) @@ -1417,6 +1428,9 @@ title="{}" {}>{}""".format( qconnect(m.actionCreateFiltered.triggered, self.onCram) qconnect(m.actionEmptyCards.triggered, self.onEmptyCards) qconnect(m.actionNoteTypes.triggered, self.onNoteTypes) + qconnect(m.action_upgrade_downgrade.triggered, self.on_upgrade_downgrade) + if not have_launcher(): + m.action_upgrade_downgrade.setVisible(False) qconnect(m.actionPreferences.triggered, self.onPrefs) # View diff --git a/qt/aqt/update.py b/qt/aqt/update.py index d8e92426c..61fec8e6b 100644 --- a/qt/aqt/update.py +++ b/qt/aqt/update.py @@ -1,7 +1,11 @@ # Copyright: Ankitects Pty Ltd and contributors # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +from __future__ import annotations + +import contextlib import os +import subprocess from pathlib import Path import aqt @@ -10,7 +14,7 @@ from anki.collection import CheckForUpdateResponse, Collection from anki.utils import dev_mode, int_time, int_version, is_mac, is_win, plat_desc from aqt.operations import QueryOp from aqt.qt import * -from aqt.utils import show_info, show_warning, showText, tr +from aqt.utils import openLink, show_warning, showText, tr def check_for_update() -> None: @@ -80,22 +84,56 @@ def prompt_to_update(mw: aqt.AnkiQt, ver: str) -> None: # ignore this update mw.pm.meta["suppressUpdate"] = ver elif ret == QMessageBox.StandardButton.Yes: - update_and_restart() + if have_launcher(): + update_and_restart() + else: + openLink(aqt.appWebsiteDownloadSection) + + +def _anki_launcher_path() -> str | None: + return os.getenv("ANKI_LAUNCHER") + + +def have_launcher() -> bool: + return _anki_launcher_path() is not None def update_and_restart() -> None: - """Download and install the update, then restart Anki.""" - update_on_next_run() - # todo: do this automatically in the future - show_info(tr.qt_misc_please_restart_to_update_anki()) + from aqt import mw + + launcher = _anki_launcher_path() + assert launcher + + _trigger_launcher_run() + + with contextlib.suppress(ResourceWarning): + env = os.environ.copy() + creationflags = 0 + if sys.platform == "win32": + creationflags = ( + subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS + ) + subprocess.Popen( + [launcher], + start_new_session=True, + stdin=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + creationflags=creationflags, + ) + + mw.app.quit() -def update_on_next_run() -> None: +def _trigger_launcher_run() -> None: """Bump the mtime on pyproject.toml in the local data directory to trigger an update on next run.""" try: # Get the local data directory equivalent to Rust's dirs::data_local_dir() if is_win: - data_dir = Path(os.environ.get("LOCALAPPDATA", "")) + from .winpaths import get_local_appdata + + data_dir = Path(get_local_appdata()) elif is_mac: data_dir = Path.home() / "Library" / "Application Support" else: # Linux diff --git a/qt/hatch_build.py b/qt/hatch_build.py index fc716a57f..aaf345842 100644 --- a/qt/hatch_build.py +++ b/qt/hatch_build.py @@ -67,11 +67,16 @@ class CustomBuildHook(BuildHookInterface): def _should_exclude(self, path: Path) -> bool: """Check if a file should be excluded from the wheel.""" - # Match the exclusions from write_wheel.py exclude_aqt function + path_str = str(path) + + # Exclude __pycache__ + if "/__pycache__/" in path_str: + return True + if path.suffix in [".ui", ".scss", ".map", ".ts"]: return True if path.name.startswith("tsconfig"): return True - if "/aqt/data" in str(path): + if "/aqt/data" in path_str: return True return False diff --git a/qt/launcher/Cargo.toml b/qt/launcher/Cargo.toml index 45ca11e9b..735cd892e 100644 --- a/qt/launcher/Cargo.toml +++ b/qt/launcher/Cargo.toml @@ -11,6 +11,7 @@ rust-version.workspace = true anki_io.workspace = true anki_process.workspace = true anyhow.workspace = true +camino.workspace = true dirs.workspace = true [target.'cfg(windows)'.dependencies] @@ -22,5 +23,9 @@ libc-stdhandle.workspace = true name = "build_win" path = "src/bin/build_win.rs" +[[bin]] +name = "anki-console" +path = "src/bin/anki_console.rs" + [target.'cfg(windows)'.build-dependencies] embed-resource.workspace = true diff --git a/qt/launcher/src/bin/anki_console.rs b/qt/launcher/src/bin/anki_console.rs new file mode 100644 index 000000000..596377ba1 --- /dev/null +++ b/qt/launcher/src/bin/anki_console.rs @@ -0,0 +1,58 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +#![windows_subsystem = "console"] + +use std::env; +use std::io::stdin; +use std::process::Command; + +use anyhow::Context; +use anyhow::Result; + +fn main() { + if let Err(e) = run() { + eprintln!("Error: {:#}", e); + std::process::exit(1); + } +} + +fn run() -> Result<()> { + let current_exe = env::current_exe().context("Failed to get current executable path")?; + let exe_dir = current_exe + .parent() + .context("Failed to get executable directory")?; + + let anki_exe = exe_dir.join("anki.exe"); + + if !anki_exe.exists() { + anyhow::bail!("anki.exe not found in the same directory"); + } + + // Forward all command line arguments to anki.exe + let args: Vec = env::args().skip(1).collect(); + + let mut cmd = Command::new(&anki_exe); + cmd.args(&args); + + if std::env::var("ANKI_IMPLICIT_CONSOLE").is_err() { + // if directly invoked by the user, signal the launcher that the + // user wants a Python console + std::env::set_var("ANKI_CONSOLE", "1"); + } + + // Wait for the process to complete and forward its exit code + let status = cmd.status().context("Failed to execute anki.exe")?; + if !status.success() { + println!("\nPress enter to close."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + } + + if let Some(code) = status.code() { + std::process::exit(code); + } else { + // Process was terminated by a signal + std::process::exit(1); + } +} diff --git a/qt/launcher/src/bin/build_win.rs b/qt/launcher/src/bin/build_win.rs index ff385d9ea..3ad2c7ce0 100644 --- a/qt/launcher/src/bin/build_win.rs +++ b/qt/launcher/src/bin/build_win.rs @@ -114,6 +114,12 @@ fn copy_files(output_dir: &Path) -> Result<()> { let launcher_dst = output_dir.join("anki.exe"); copy_file(&launcher_src, &launcher_dst)?; + // Copy anki-console binary + let console_src = + PathBuf::from(CARGO_TARGET_DIR).join("x86_64-pc-windows-msvc/release/anki-console.exe"); + let console_dst = output_dir.join("anki-console.exe"); + copy_file(&console_src, &console_dst)?; + // Copy uv.exe and uvw.exe let uv_src = PathBuf::from("../../../out/extracted/uv/uv.exe"); let uv_dst = output_dir.join("uv.exe"); @@ -133,14 +139,12 @@ fn copy_files(output_dir: &Path) -> Result<()> { output_dir.join(".python-version"), )?; - // Copy anki-console.bat - copy_file("anki-console.bat", output_dir.join("anki-console.bat"))?; - Ok(()) } fn sign_binaries(output_dir: &Path) -> Result<()> { sign_file(&output_dir.join("anki.exe"))?; + sign_file(&output_dir.join("anki-console.exe"))?; sign_file(&output_dir.join("uv.exe"))?; Ok(()) } diff --git a/qt/launcher/src/main.rs b/qt/launcher/src/main.rs index 2ad3ac00c..b2535f410 100644 --- a/qt/launcher/src/main.rs +++ b/qt/launcher/src/main.rs @@ -16,21 +16,34 @@ use anki_io::modified_time; use anki_io::read_file; use anki_io::remove_file; use anki_io::write_file; +use anki_io::ToUtf8Path; use anki_process::CommandExt; use anyhow::Context; use anyhow::Result; use crate::platform::ensure_terminal_shown; -use crate::platform::exec_anki; -use crate::platform::get_anki_binary_path; use crate::platform::get_exe_and_resources_dirs; use crate::platform::get_uv_binary_name; -use crate::platform::handle_first_launch; -use crate::platform::initial_terminal_setup; -use crate::platform::launch_anki_detached; +use crate::platform::launch_anki_after_update; +use crate::platform::launch_anki_normally; mod platform; +// todo: -c appearing as app name now + +struct State { + has_existing_install: bool, + prerelease_marker: std::path::PathBuf, + uv_install_root: std::path::PathBuf, + uv_path: std::path::PathBuf, + user_pyproject_path: std::path::PathBuf, + user_python_version_path: std::path::PathBuf, + dist_pyproject_path: std::path::PathBuf, + dist_python_version_path: std::path::PathBuf, + uv_lock_path: std::path::PathBuf, + sync_complete_marker: std::path::PathBuf, +} + #[derive(Debug, Clone)] pub enum VersionKind { PyOxidizer(String), @@ -46,16 +59,8 @@ pub enum MainMenuChoice { Quit, } -#[derive(Debug, Clone, Default)] -pub struct Config { - pub show_console: bool, -} - fn main() { if let Err(e) = run() { - let mut config: Config = Config::default(); - initial_terminal_setup(&mut config); - eprintln!("Error: {:#}", e); eprintln!("Press enter to close..."); let mut input = String::new(); @@ -66,58 +71,92 @@ fn main() { } fn run() -> Result<()> { - let mut config: Config = Config::default(); - let uv_install_root = dirs::data_local_dir() .context("Unable to determine data_dir")? .join("AnkiProgramFiles"); - let sync_complete_marker = uv_install_root.join(".sync_complete"); - let prerelease_marker = uv_install_root.join("prerelease"); let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?; - let dist_pyproject_path = resources_dir.join("pyproject.toml"); - let user_pyproject_path = uv_install_root.join("pyproject.toml"); - let dist_python_version_path = resources_dir.join(".python-version"); - let user_python_version_path = uv_install_root.join(".python-version"); - let uv_lock_path = uv_install_root.join("uv.lock"); - let uv_path: std::path::PathBuf = exe_dir.join(get_uv_binary_name()); + + let state = State { + has_existing_install: uv_install_root.join(".sync_complete").exists(), + prerelease_marker: uv_install_root.join("prerelease"), + uv_install_root: uv_install_root.clone(), + uv_path: exe_dir.join(get_uv_binary_name()), + user_pyproject_path: uv_install_root.join("pyproject.toml"), + user_python_version_path: uv_install_root.join(".python-version"), + dist_pyproject_path: resources_dir.join("pyproject.toml"), + dist_python_version_path: resources_dir.join(".python-version"), + uv_lock_path: uv_install_root.join("uv.lock"), + sync_complete_marker: uv_install_root.join(".sync_complete"), + }; // Create install directory and copy project files in - create_dir_all(&uv_install_root)?; - let had_user_pyproj = user_pyproject_path.exists(); + create_dir_all(&state.uv_install_root)?; + let had_user_pyproj = state.user_pyproject_path.exists(); if !had_user_pyproj { // during initial launcher testing, enable betas by default - write_file(&prerelease_marker, "")?; + write_file(&state.prerelease_marker, "")?; } - copy_if_newer(&dist_pyproject_path, &user_pyproject_path)?; - copy_if_newer(&dist_python_version_path, &user_python_version_path)?; + copy_if_newer(&state.dist_pyproject_path, &state.user_pyproject_path)?; + copy_if_newer( + &state.dist_python_version_path, + &state.user_python_version_path, + )?; - let pyproject_has_changed = !sync_complete_marker.exists() || { - let pyproject_toml_time = modified_time(&user_pyproject_path)?; - let sync_complete_time = modified_time(&sync_complete_marker)?; + let pyproject_has_changed = !state.sync_complete_marker.exists() || { + let pyproject_toml_time = modified_time(&state.user_pyproject_path)?; + let sync_complete_time = modified_time(&state.sync_complete_marker)?; Ok::(pyproject_toml_time > sync_complete_time) } .unwrap_or(true); if !pyproject_has_changed { - // If venv is already up to date, exec as normal - initial_terminal_setup(&mut config); - let anki_bin = get_anki_binary_path(&uv_install_root); - exec_anki(&anki_bin, &config)?; + // If venv is already up to date, launch Anki normally + let args: Vec = std::env::args().skip(1).collect(); + let cmd = build_python_command(&state.uv_install_root, &args)?; + launch_anki_normally(cmd)?; return Ok(()); } - // we'll need to launch uv; reinvoke ourselves in a terminal so the user can see + // If we weren't in a terminal, respawn ourselves in one ensure_terminal_shown()?; + print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top println!("\x1B[1mAnki Launcher\x1B[0m\n"); - // Check if there's an existing installation before removing marker - let has_existing_install = sync_complete_marker.exists(); + main_menu_loop(&state)?; + // Write marker file to indicate we've completed the sync process + write_sync_marker(&state.sync_complete_marker)?; + + #[cfg(target_os = "macos")] + { + let cmd = build_python_command(&state.uv_install_root, &[])?; + platform::mac::prepare_for_launch_after_update(cmd)?; + } + + if cfg!(unix) && !cfg!(target_os = "macos") { + println!("\nPress enter to start Anki."); + let mut input = String::new(); + let _ = stdin().read_line(&mut input); + } else { + // on Windows/macOS, the user needs to close the terminal/console + // currently, but ideas on how we can avoid this would be good! + println!("Anki will start shortly."); + println!("\x1B[1mYou can close this window.\x1B[0m\n"); + } + + let cmd = build_python_command(&state.uv_install_root, &[])?; + launch_anki_after_update(cmd)?; + + Ok(()) +} + +fn main_menu_loop(state: &State) -> Result<()> { loop { - let menu_choice = get_main_menu_choice(has_existing_install, &prerelease_marker); + let menu_choice = + get_main_menu_choice(state.has_existing_install, &state.prerelease_marker); match menu_choice { MainMenuChoice::Quit => std::process::exit(0), @@ -127,40 +166,40 @@ fn run() -> Result<()> { } MainMenuChoice::ToggleBetas => { // Toggle beta prerelease file - if prerelease_marker.exists() { - let _ = remove_file(&prerelease_marker); + if state.prerelease_marker.exists() { + let _ = remove_file(&state.prerelease_marker); println!("Beta releases disabled."); } else { - write_file(&prerelease_marker, "")?; + write_file(&state.prerelease_marker, "")?; println!("Beta releases enabled."); } println!(); continue; } - _ => { + choice @ (MainMenuChoice::Latest | MainMenuChoice::Version(_)) => { // For other choices, update project files and sync update_pyproject_for_version( - menu_choice.clone(), - dist_pyproject_path.clone(), - user_pyproject_path.clone(), - dist_python_version_path.clone(), - user_python_version_path.clone(), + choice, + state.dist_pyproject_path.clone(), + state.user_pyproject_path.clone(), + state.dist_python_version_path.clone(), + state.user_python_version_path.clone(), )?; // Remove sync marker before attempting sync - let _ = remove_file(&sync_complete_marker); + let _ = remove_file(&state.sync_complete_marker); // Sync the venv - let mut command = Command::new(&uv_path); - command.current_dir(&uv_install_root).args([ + let mut command = Command::new(&state.uv_path); + command.current_dir(&state.uv_install_root).args([ "sync", "--upgrade", "--managed-python", ]); // Add python version if .python-version file exists - if user_python_version_path.exists() { - let python_version = read_file(&user_python_version_path)?; + if state.user_python_version_path.exists() { + let python_version = read_file(&state.user_python_version_path)?; let python_version_str = String::from_utf8(python_version) .context("Invalid UTF-8 in .python-version")?; let python_version_trimmed = python_version_str.trim(); @@ -168,7 +207,7 @@ fn run() -> Result<()> { } // Set UV_PRERELEASE=allow if beta mode is enabled - if prerelease_marker.exists() { + if state.prerelease_marker.exists() { command.env("UV_PRERELEASE", "allow"); } @@ -182,7 +221,7 @@ fn run() -> Result<()> { Err(e) => { // If sync fails due to things like a missing wheel on pypi, // we need to remove the lockfile or uv will cache the bad result. - let _ = remove_file(&uv_lock_path); + let _ = remove_file(&state.uv_lock_path); println!("Install failed: {:#}", e); println!(); continue; @@ -191,22 +230,6 @@ fn run() -> Result<()> { } } } - - // Write marker file to indicate we've completed the sync process - write_sync_marker(&sync_complete_marker)?; - - // First launch - let anki_bin = get_anki_binary_path(&uv_install_root); - handle_first_launch(&anki_bin)?; - - println!("\nPress enter to start Anki."); - - let mut input = String::new(); - let _ = stdin().read_line(&mut input); - - // Then launch the binary as detached subprocess so the terminal can close - launch_anki_detached(&anki_bin, &config)?; - Ok(()) } @@ -403,3 +426,25 @@ fn parse_version_kind(version: &str) -> Option { Some(VersionKind::Uv(version.to_string())) } } + +fn build_python_command(uv_install_root: &std::path::Path, args: &[String]) -> Result { + let python_exe = if cfg!(target_os = "windows") { + let show_console = std::env::var("ANKI_CONSOLE").is_ok(); + if show_console { + uv_install_root.join(".venv/Scripts/python.exe") + } else { + uv_install_root.join(".venv/Scripts/pythonw.exe") + } + } else { + uv_install_root.join(".venv/bin/python") + }; + + let mut cmd = Command::new(python_exe); + cmd.args(["-c", "import aqt; aqt.run()"]); + cmd.args(args); + // tell the Python code it was invoked by the launcher, and updating is + // available + cmd.env("ANKI_LAUNCHER", std::env::current_exe()?.utf8()?.as_str()); + + Ok(cmd) +} diff --git a/qt/launcher/src/platform/mac.rs b/qt/launcher/src/platform/mac.rs index 2369f538a..ab2c4b8fb 100644 --- a/qt/launcher/src/platform/mac.rs +++ b/qt/launcher/src/platform/mac.rs @@ -1,7 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::os::unix::process::CommandExt; +use std::io; +use std::io::Write; use std::process::Command; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; @@ -13,45 +14,7 @@ use anki_process::CommandExt as AnkiCommandExt; use anyhow::Context; use anyhow::Result; -// Re-export Unix functions that macOS uses -pub use super::unix::{ - ensure_terminal_shown, - exec_anki, - get_anki_binary_path, - initial_terminal_setup, -}; - -pub fn launch_anki_detached(anki_bin: &std::path::Path, _config: &crate::Config) -> Result<()> { - use std::process::Stdio; - - let child = Command::new(anki_bin) - .stdin(Stdio::null()) - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .process_group(0) - .ensure_spawn()?; - std::mem::forget(child); - - println!("Anki will start shortly."); - println!("\x1B[1mYou can close this window.\x1B[0m\n"); - Ok(()) -} - -pub fn relaunch_in_terminal() -> Result<()> { - let current_exe = std::env::current_exe().context("Failed to get current executable path")?; - Command::new("open") - .args(["-a", "Terminal"]) - .arg(current_exe) - .ensure_spawn()?; - std::process::exit(0); -} - -pub fn handle_first_launch(anki_bin: &std::path::Path) -> Result<()> { - use std::io::Write; - use std::io::{ - self, - }; - +pub fn prepare_for_launch_after_update(mut cmd: Command) -> Result<()> { // Pre-validate by running --version to trigger any Gatekeeper checks print!("\n\x1B[1mThis may take a few minutes. Please wait\x1B[0m"); io::stdout().flush().unwrap(); @@ -67,7 +30,7 @@ pub fn handle_first_launch(anki_bin: &std::path::Path) -> Result<()> { } }); - let _ = Command::new(anki_bin) + let _ = cmd .env("ANKI_FIRST_RUN", "1") .arg("--version") .stdout(std::process::Stdio::null()) @@ -81,22 +44,11 @@ pub fn handle_first_launch(anki_bin: &std::path::Path) -> Result<()> { Ok(()) } -pub fn get_exe_and_resources_dirs() -> Result<(std::path::PathBuf, std::path::PathBuf)> { - let exe_dir = std::env::current_exe() - .context("Failed to get current executable path")? - .parent() - .context("Failed to get executable directory")? - .to_owned(); - - let resources_dir = exe_dir - .parent() - .context("Failed to get parent directory")? - .join("Resources"); - - Ok((exe_dir, resources_dir)) -} - -pub fn get_uv_binary_name() -> &'static str { - // macOS uses standard uv binary name - "uv" +pub fn relaunch_in_terminal() -> Result<()> { + let current_exe = std::env::current_exe().context("Failed to get current executable path")?; + Command::new("open") + .args(["-a", "Terminal"]) + .arg(current_exe) + .ensure_spawn()?; + std::process::exit(0); } diff --git a/qt/launcher/src/platform/mod.rs b/qt/launcher/src/platform/mod.rs index bb7208abe..9dc74f8e9 100644 --- a/qt/launcher/src/platform/mod.rs +++ b/qt/launcher/src/platform/mod.rs @@ -1,18 +1,108 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -#[cfg(unix)] +#[cfg(all(unix, not(target_os = "macos")))] mod unix; #[cfg(target_os = "macos")] -mod mac; +pub mod mac; #[cfg(target_os = "windows")] -mod windows; +pub mod windows; -#[cfg(target_os = "macos")] -pub use mac::*; -#[cfg(all(unix, not(target_os = "macos")))] -pub use unix::*; -#[cfg(target_os = "windows")] -pub use windows::*; +use std::path::PathBuf; + +use anki_process::CommandExt; +use anyhow::Context; +use anyhow::Result; + +pub fn get_exe_and_resources_dirs() -> Result<(PathBuf, PathBuf)> { + let exe_dir = std::env::current_exe() + .context("Failed to get current executable path")? + .parent() + .context("Failed to get executable directory")? + .to_owned(); + + let resources_dir = if cfg!(target_os = "macos") { + // On macOS, resources are in ../Resources relative to the executable + exe_dir + .parent() + .context("Failed to get parent directory")? + .join("Resources") + } else { + // On other platforms, resources are in the same directory as executable + exe_dir.clone() + }; + + Ok((exe_dir, resources_dir)) +} + +pub fn get_uv_binary_name() -> &'static str { + if cfg!(target_os = "windows") { + "uv.exe" + } else if cfg!(target_os = "macos") { + "uv" + } else if cfg!(target_arch = "x86_64") { + "uv.amd64" + } else { + "uv.arm64" + } +} + +pub fn launch_anki_after_update(mut cmd: std::process::Command) -> Result<()> { + use std::process::Stdio; + + cmd.stdin(Stdio::null()) + .stdout(Stdio::null()) + .stderr(Stdio::null()); + + #[cfg(windows)] + { + use std::os::windows::process::CommandExt; + const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200; + const DETACHED_PROCESS: u32 = 0x00000008; + cmd.creation_flags(CREATE_NEW_PROCESS_GROUP | DETACHED_PROCESS); + } + + #[cfg(unix)] + { + use std::os::unix::process::CommandExt; + cmd.process_group(0); + } + + let child = cmd.ensure_spawn()?; + std::mem::forget(child); + + Ok(()) +} + +pub fn launch_anki_normally(mut cmd: std::process::Command) -> Result<()> { + #[cfg(windows)] + { + crate::platform::windows::attach_to_parent_console(); + cmd.ensure_success()?; + } + #[cfg(unix)] + cmd.ensure_exec()?; + Ok(()) +} + +#[cfg(windows)] +pub use windows::ensure_terminal_shown; + +#[cfg(unix)] +pub fn ensure_terminal_shown() -> Result<()> { + use std::io::IsTerminal; + + let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout()); + if !stdout_is_terminal { + #[cfg(target_os = "macos")] + mac::relaunch_in_terminal()?; + #[cfg(not(target_os = "macos"))] + unix::relaunch_in_terminal()?; + } + + // Set terminal title to "Anki Launcher" + print!("\x1b]2;Anki Launcher\x07"); + Ok(()) +} diff --git a/qt/launcher/src/platform/unix.rs b/qt/launcher/src/platform/unix.rs index 324bf5aa3..0430bfa96 100644 --- a/qt/launcher/src/platform/unix.rs +++ b/qt/launcher/src/platform/unix.rs @@ -1,36 +1,11 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -#![allow(dead_code)] - -use std::io::IsTerminal; -use std::path::PathBuf; use std::process::Command; -use anki_process::CommandExt as AnkiCommandExt; use anyhow::Context; use anyhow::Result; -use crate::Config; - -pub fn initial_terminal_setup(_config: &mut Config) { - // No special terminal setup needed on Unix -} - -pub fn ensure_terminal_shown() -> Result<()> { - let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout()); - if !stdout_is_terminal { - // If launched from GUI, try to relaunch in a terminal - crate::platform::relaunch_in_terminal()?; - } - - // Set terminal title to "Anki Launcher" - print!("\x1b]2;Anki Launcher\x07"); - - Ok(()) -} - -#[cfg(not(target_os = "macos"))] pub fn relaunch_in_terminal() -> Result<()> { let current_exe = std::env::current_exe().context("Failed to get current executable path")?; @@ -72,52 +47,3 @@ pub fn relaunch_in_terminal() -> Result<()> { // If no terminal worked, continue without relaunching Ok(()) } - -pub fn get_anki_binary_path(uv_install_root: &std::path::Path) -> PathBuf { - uv_install_root.join(".venv/bin/anki") -} - -pub fn launch_anki_detached(anki_bin: &std::path::Path, config: &Config) -> Result<()> { - // On non-macOS Unix systems, we don't need to detach since we never spawned a - // terminal - exec_anki(anki_bin, config) -} - -pub fn handle_first_launch(_anki_bin: &std::path::Path) -> Result<()> { - // No special first launch handling needed for generic Unix systems - Ok(()) -} - -pub fn exec_anki(anki_bin: &std::path::Path, _config: &Config) -> Result<()> { - let args: Vec = std::env::args().skip(1).collect(); - Command::new(anki_bin) - .args(args) - .ensure_exec() - .map_err(anyhow::Error::new) -} - -pub fn get_exe_and_resources_dirs() -> Result<(PathBuf, PathBuf)> { - let exe_dir = std::env::current_exe() - .context("Failed to get current executable path")? - .parent() - .context("Failed to get executable directory")? - .to_owned(); - - // On generic Unix systems, assume resources are in the same directory as - // executable - let resources_dir = exe_dir.clone(); - - Ok((exe_dir, resources_dir)) -} - -pub fn get_uv_binary_name() -> &'static str { - // Use architecture-specific uv binary for non-Mac Unix systems - if cfg!(target_arch = "x86_64") { - "uv.amd64" - } else if cfg!(target_arch = "aarch64") { - "uv.arm64" - } else { - // Fallback to generic uv for other architectures - "uv" - } -} diff --git a/qt/launcher/src/platform/windows.rs b/qt/launcher/src/platform/windows.rs index 4e3752d44..0a701c07a 100644 --- a/qt/launcher/src/platform/windows.rs +++ b/qt/launcher/src/platform/windows.rs @@ -1,82 +1,71 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::path::PathBuf; use std::process::Command; -use anki_process::CommandExt; use anyhow::Context; use anyhow::Result; -use winapi::um::consoleapi; -use winapi::um::errhandlingapi; use winapi::um::wincon; -use crate::Config; - pub fn ensure_terminal_shown() -> Result<()> { - ensure_console(); - // // Check if we're already relaunched to prevent infinite recursion - // if std::env::var("ANKI_LAUNCHER_IN_TERMINAL").is_ok() { - // println!("Recurse: Preparing to start Anki...\n"); - // return Ok(()); - // } - - // if have_console { - // } else { - // relaunch_in_cmd()?; - // } - Ok(()) -} - -fn ensure_console() { unsafe { if !wincon::GetConsoleWindow().is_null() { - return; + // We already have a console, no need to spawn anki-console.exe + return Ok(()); } - - if consoleapi::AllocConsole() == 0 { - let error_code = errhandlingapi::GetLastError(); - eprintln!("unexpected AllocConsole error: {}", error_code); - return; - } - - // This black magic triggers Windows to switch to the new - // ANSI-supporting console host, which is usually only available - // when the app is built with the console subsystem. - let _ = Command::new("cmd").args(&["/C", ""]).status(); } + + if std::env::var("ANKI_IMPLICIT_CONSOLE").is_ok() && attach_to_parent_console() { + // Successfully attached to parent console + reconnect_stdio_to_console(); + return Ok(()); + } + + // No console available, spawn anki-console.exe and exit + let current_exe = std::env::current_exe().context("Failed to get current executable path")?; + let exe_dir = current_exe + .parent() + .context("Failed to get executable directory")?; + + let console_exe = exe_dir.join("anki-console.exe"); + + if !console_exe.exists() { + anyhow::bail!("anki-console.exe not found in the same directory"); + } + + // Spawn anki-console.exe without waiting + Command::new(&console_exe) + .env("ANKI_IMPLICIT_CONSOLE", "1") + .spawn() + .context("Failed to spawn anki-console.exe")?; + + // Exit immediately after spawning + std::process::exit(0); } -fn attach_to_parent_console() -> bool { +pub fn attach_to_parent_console() -> bool { unsafe { if !wincon::GetConsoleWindow().is_null() { // we have a console already - println!("attach: already had console, false"); return false; } if wincon::AttachConsole(wincon::ATTACH_PARENT_PROCESS) != 0 { // successfully attached to parent - println!("attach: true"); + reconnect_stdio_to_console(); true } else { - println!("attach: false"); false } } } -/// If parent process has a console (eg cmd.exe), redirect our output there. -/// Sets config.show_console to true if successfully attached to console. -pub fn initial_terminal_setup(config: &mut Config) { +/// Reconnect stdin/stdout/stderr to the console. +fn reconnect_stdio_to_console() { use std::ffi::CString; use libc_stdhandle::*; - if !attach_to_parent_console() { - return; - } - // we launched without a console, so we'll need to open stdin/out/err let conin = CString::new("CONIN$").unwrap(); let conout = CString::new("CONOUT$").unwrap(); @@ -89,79 +78,4 @@ pub fn initial_terminal_setup(config: &mut Config) { libc::freopen(conout.as_ptr(), w.as_ptr(), stdout()); libc::freopen(conout.as_ptr(), w.as_ptr(), stderr()); } - - config.show_console = true; -} - -pub fn get_anki_binary_path(uv_install_root: &std::path::Path) -> std::path::PathBuf { - uv_install_root.join(".venv/Scripts/anki.exe") -} - -fn build_python_command( - anki_bin: &std::path::Path, - args: &[String], - config: &Config, -) -> Result { - let venv_dir = anki_bin - .parent() - .context("Failed to get venv Scripts directory")? - .parent() - .context("Failed to get venv directory")?; - - // Use python.exe if show_console is true, otherwise pythonw.exe - let python_exe = if config.show_console { - venv_dir.join("Scripts/python.exe") - } else { - venv_dir.join("Scripts/pythonw.exe") - }; - - let mut cmd = Command::new(python_exe); - cmd.args(["-c", "import aqt; aqt.run()"]); - cmd.args(args); - - Ok(cmd) -} - -pub fn launch_anki_detached(anki_bin: &std::path::Path, config: &Config) -> Result<()> { - use std::os::windows::process::CommandExt; - use std::process::Stdio; - - const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200; - const DETACHED_PROCESS: u32 = 0x00000008; - - let mut cmd = build_python_command(anki_bin, &[], config)?; - cmd.stdin(Stdio::null()) - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .creation_flags(CREATE_NEW_PROCESS_GROUP | DETACHED_PROCESS) - .ensure_spawn()?; - Ok(()) -} - -pub fn handle_first_launch(_anki_bin: &std::path::Path) -> Result<()> { - Ok(()) -} - -pub fn exec_anki(anki_bin: &std::path::Path, config: &Config) -> Result<()> { - let args: Vec = std::env::args().skip(1).collect(); - let mut cmd = build_python_command(anki_bin, &args, config)?; - cmd.ensure_success()?; - Ok(()) -} - -pub fn get_exe_and_resources_dirs() -> Result<(PathBuf, PathBuf)> { - let exe_dir = std::env::current_exe() - .context("Failed to get current executable path")? - .parent() - .context("Failed to get executable directory")? - .to_owned(); - - // On Windows, resources dir is the same as exe_dir - let resources_dir = exe_dir.clone(); - - Ok((exe_dir, resources_dir)) -} - -pub fn get_uv_binary_name() -> &'static str { - "uv.exe" } diff --git a/qt/launcher/win/anki-console.bat b/qt/launcher/win/anki-console.bat deleted file mode 100644 index a565fa7b6..000000000 --- a/qt/launcher/win/anki-console.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -"%~dp0"\anki %* -pause - - diff --git a/qt/launcher/win/build.bat b/qt/launcher/win/build.bat index b21831462..da574f210 100644 --- a/qt/launcher/win/build.bat +++ b/qt/launcher/win/build.bat @@ -1,5 +1,10 @@ @echo off -set CODESIGN=1 -REM set NO_COMPRESS=1 +if "%NOCOMP%"=="1" ( + set NO_COMPRESS=1 + set CODESIGN=0 +) else ( + set CODESIGN=1 + set NO_COMPRESS=0 +) cargo run --bin build_win diff --git a/qt/pyproject.toml b/qt/pyproject.toml index ed4b372b0..bd59ff330 100644 --- a/qt/pyproject.toml +++ b/qt/pyproject.toml @@ -11,17 +11,9 @@ dependencies = [ "requests", "send2trash", "waitress>=2.0.0", - "psutil; sys.platform == 'win32'", "pywin32; sys.platform == 'win32'", "anki-mac-helper; sys.platform == 'darwin'", "pip-system-certs!=5.1", - "mock", - "types-decorator", - "types-flask", - "types-flask-cors", - "types-markdown", - "types-waitress", - "types-pywin32", "pyqt6>=6.2", "pyqt6-webengine>=6.2", # anki dependency is added dynamically in hatch_build.py with exact version diff --git a/rslib/src/scheduler/answering/mod.rs b/rslib/src/scheduler/answering/mod.rs index d498f7eaf..8ae1518d0 100644 --- a/rslib/src/scheduler/answering/mod.rs +++ b/rslib/src/scheduler/answering/mod.rs @@ -33,6 +33,7 @@ use crate::deckconfig::LeechAction; use crate::decks::Deck; use crate::prelude::*; use crate::scheduler::fsrs::memory_state::fsrs_item_for_memory_state; +use crate::scheduler::fsrs::memory_state::get_decay_from_params; use crate::scheduler::states::PreviewState; use crate::search::SearchNode; @@ -433,7 +434,9 @@ impl Collection { let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?; let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs); let fsrs_next_states = if fsrs_enabled { - let fsrs = FSRS::new(Some(config.fsrs_params()))?; + let params = config.fsrs_params(); + let fsrs = FSRS::new(Some(params))?; + card.decay = Some(get_decay_from_params(params)); if card.memory_state.is_none() && card.ctype != CardType::New { // Card has been moved or imported into an FSRS deck after params were set, // and will need its initial memory state to be calculated based on review diff --git a/rslib/src/scheduler/fsrs/memory_state.rs b/rslib/src/scheduler/fsrs/memory_state.rs index b592e4da4..425d8da69 100644 --- a/rslib/src/scheduler/fsrs/memory_state.rs +++ b/rslib/src/scheduler/fsrs/memory_state.rs @@ -32,7 +32,7 @@ pub struct ComputeMemoryProgress { /// Helper function to determine the appropriate decay value based on FSRS /// parameters -fn get_decay_from_params(params: &[f32]) -> f32 { +pub(crate) fn get_decay_from_params(params: &[f32]) -> f32 { if params.is_empty() { FSRS6_DEFAULT_DECAY // default decay for FSRS-6 } else if params.len() < 21 { diff --git a/rslib/src/scheduler/fsrs/retention.rs b/rslib/src/scheduler/fsrs/retention.rs index 4c21623bb..29f6b490d 100644 --- a/rslib/src/scheduler/fsrs/retention.rs +++ b/rslib/src/scheduler/fsrs/retention.rs @@ -1,7 +1,9 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::scheduler::simulate_fsrs_review_request::cmrr_target::Kind; use anki_proto::scheduler::SimulateFsrsReviewRequest; use fsrs::extract_simulator_config; +use fsrs::SimulationResult; use fsrs::SimulatorConfig; use fsrs::FSRS; @@ -14,14 +16,115 @@ pub struct ComputeRetentionProgress { pub total: u32, } +pub fn average_r_power_forgetting_curve( + learn_span: usize, + cards: &[fsrs::Card], + offset: f32, + decay: f32, +) -> f32 { + let factor = 0.9_f32.powf(1.0 / decay) - 1.0; + let exp = decay + 1.0; + let den_factor = factor * exp; + + // Closure equivalent to the inner integral function + let integral_calc = |card: &fsrs::Card| -> f32 { + // Performs element-wise: (s / den_factor) * (1.0 + factor * t / s).powf(exp) + let t1 = learn_span as f32 - card.last_date; + let t2 = t1 + offset; + (card.stability / den_factor) * (1.0 + factor * t2 / card.stability).powf(exp) + - (card.stability / den_factor) * (1.0 + factor * t1 / card.stability).powf(exp) + }; + + // Calculate integral difference and divide by time difference element-wise + cards.iter().map(integral_calc).sum::() / offset +} + impl Collection { pub fn compute_optimal_retention(&mut self, req: SimulateFsrsReviewRequest) -> Result { + // Helper macro to wrap the closure for "CMRRTargetFn"s + macro_rules! wrap { + ($f:expr) => { + Some(fsrs::CMRRTargetFn(std::sync::Arc::new($f))) + }; + } + + let target_type = req.target.unwrap().kind; + + let days_to_simulate = req.days_to_simulate as f32; + + let target = match target_type { + Some(Kind::Memorized(_)) => None, + Some(Kind::FutureMemorized(settings)) => { + wrap!(move |SimulationResult { + cards, + cost_per_day, + .. + }, + w| { + let total_cost = cost_per_day.iter().sum::(); + total_cost + / cards.iter().fold(0., |p, c| { + c.retention_on(w, days_to_simulate + settings.days as f32) + p + }) + }) + } + Some(Kind::AverageFutureMemorized(settings)) => { + wrap!(move |SimulationResult { + cards, + cost_per_day, + .. + }, + w| { + let total_cost = cost_per_day.iter().sum::(); + total_cost + / average_r_power_forgetting_curve( + days_to_simulate as usize, + cards, + settings.days as f32, + -w[20], + ) + }) + } + Some(Kind::Stability(_)) => { + wrap!(move |SimulationResult { + cards, + cost_per_day, + .. + }, + w| { + let total_cost = cost_per_day.iter().sum::(); + total_cost + / cards.iter().fold(0., |p, c| { + p + (c.retention_on(w, days_to_simulate) * c.stability) + }) + }) + } + None => None, + }; + let mut anki_progress = self.new_progress_handler::(); let fsrs = FSRS::new(None)?; if req.days_to_simulate == 0 { invalid_input!("no days to simulate") } - let (config, cards) = self.simulate_request_to_config(&req)?; + let (mut config, cards) = self.simulate_request_to_config(&req)?; + + if let Some(Kind::Memorized(settings)) = target_type { + let loss_aversion = settings.loss_aversion; + + config.relearning_step_transitions[0][0] *= loss_aversion; + config.relearning_step_transitions[1][0] *= loss_aversion; + config.relearning_step_transitions[2][0] *= loss_aversion; + + config.learning_step_transitions[0][0] *= loss_aversion; + config.learning_step_transitions[1][0] *= loss_aversion; + config.learning_step_transitions[2][0] *= loss_aversion; + + config.state_rating_costs[0][0] *= loss_aversion; + config.state_rating_costs[1][0] *= loss_aversion; + config.state_rating_costs[2][0] *= loss_aversion; + } + Ok(fsrs .optimal_retention( &config, @@ -34,7 +137,7 @@ impl Collection { .is_ok() }, Some(cards), - None, + target, )? .clamp(0.7, 0.95)) } diff --git a/rslib/sync/Cargo.toml b/rslib/sync/Cargo.toml index 7a8f8534a..d23b4f380 100644 --- a/rslib/sync/Cargo.toml +++ b/rslib/sync/Cargo.toml @@ -13,4 +13,9 @@ path = "main.rs" name = "anki-sync-server" [dependencies] + +[target.'cfg(windows)'.dependencies] +anki = { workspace = true, features = ["native-tls"] } + +[target.'cfg(not(windows))'.dependencies] anki = { workspace = true, features = ["rustls"] } diff --git a/tools/minilints/src/main.rs b/tools/minilints/src/main.rs index 3a3c06f2c..2650ec648 100644 --- a/tools/minilints/src/main.rs +++ b/tools/minilints/src/main.rs @@ -21,12 +21,7 @@ use walkdir::WalkDir; const NONSTANDARD_HEADER: &[&str] = &[ "./pylib/anki/_vendor/stringcase.py", - "./pylib/anki/importing/pauker.py", - "./pylib/anki/importing/supermemo_xml.py", "./pylib/anki/statsbg.py", - "./pylib/tools/protoc-gen-mypy.py", - "./python/pyqt/install.py", - "./python/write_wheel.py", "./qt/aqt/mpv.py", "./qt/aqt/winpaths.py", ]; diff --git a/tools/update-launcher-env b/tools/update-launcher-env new file mode 100755 index 000000000..c84569f55 --- /dev/null +++ b/tools/update-launcher-env @@ -0,0 +1,15 @@ +#!/bin/bash +# +# Install our latest anki/aqt code into the launcher venv + +set -e + +rm -rf out/wheels +./ninja wheels +if [[ "$OSTYPE" == "darwin"* ]]; then + export VIRTUAL_ENV=$HOME/Library/Application\ Support/AnkiProgramFiles/.venv +else + export VIRTUAL_ENV=$HOME/.local/share/AnkiProgramFiles/.venv +fi +./out/extracted/uv/uv pip install out/wheels/* + diff --git a/tools/update-launcher-env.bat b/tools/update-launcher-env.bat new file mode 100644 index 000000000..9b0b814c6 --- /dev/null +++ b/tools/update-launcher-env.bat @@ -0,0 +1,8 @@ +@echo off +rem +rem Install our latest anki/aqt code into the launcher venv + +rmdir /s /q out\wheels 2>nul +call tools\ninja wheels +set VIRTUAL_ENV=%LOCALAPPDATA%\AnkiProgramFiles\.venv +for %%f in (out\wheels\*.whl) do out\extracted\uv\uv pip install "%%f" \ No newline at end of file diff --git a/ts/routes/change-notetype/ChangeNotetypePage.svelte b/ts/routes/change-notetype/ChangeNotetypePage.svelte index db25aea2e..e07d088fb 100644 --- a/ts/routes/change-notetype/ChangeNotetypePage.svelte +++ b/ts/routes/change-notetype/ChangeNotetypePage.svelte @@ -3,6 +3,8 @@ Copyright: Ankitects Pty Ltd and contributors License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -->