Merge branch 'main' into learning_queue

This commit is contained in:
user1823 2026-01-06 18:07:50 +05:30 committed by GitHub
commit 1e9fca2ecc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 252 additions and 124 deletions

13
.idea.dist/repo.iml Normal file
View file

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/out/pylib" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/pylib" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/qt" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/extra" />
<excludeFolder url="file://$MODULE_DIR$/out/pyenv" />
</content>
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View file

@ -12,8 +12,7 @@
"command": "tools/ninja.bat", "command": "tools/ninja.bat",
"args": [ "args": [
"pylib", "pylib",
"qt", "qt"
"extract:win_amd64_audio"
] ]
} }
} }

View file

@ -189,7 +189,7 @@ Christian Donat <https://github.com/cdonat2>
Asuka Minato <https://asukaminato.eu.org> Asuka Minato <https://asukaminato.eu.org>
Dillon Baldwin <https://github.com/DillBal> Dillon Baldwin <https://github.com/DillBal>
Voczi <https://github.com/voczi> Voczi <https://github.com/voczi>
Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com> Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com>
Themis Demetriades <themis100@outlook.com> Themis Demetriades <themis100@outlook.com>
Luke Bartholomew <lukesbart@icloud.com> Luke Bartholomew <lukesbart@icloud.com>
Gregory Abrasaldo <degeemon@gmail.com> Gregory Abrasaldo <degeemon@gmail.com>
@ -251,6 +251,11 @@ Matbe766 <matildabergstrom01@gmail.com>
Amanda Sternberg <mandis.sternberg@gmail.com> Amanda Sternberg <mandis.sternberg@gmail.com>
arold0 <arold0@icloud.com> arold0 <arold0@icloud.com>
nav1s <nav1s@proton.me> nav1s <nav1s@proton.me>
Ranjit Odedra <ranjitodedra.dev@gmail.com>
Eltaurus <https://github.com/Eltaurus-Lt>
jariji
Francisco Esteva <fr.esteva@duocuc.cl>
SelfishPig <https://github.com/SelfishPig>
******************** ********************

View file

@ -46,10 +46,14 @@ see and install a number of recommended extensions.
## PyCharm/IntelliJ ## PyCharm/IntelliJ
If you decide to use PyCharm instead of VS Code, there are somethings to be ### Setting up Python environment
aware of.
### Pylib References To make PyCharm recognize `anki` and `aqt` imports, you need to add source paths to _Settings > Project Structure_.
You can copy the provided .idea.dist directory to set up the paths automatically:
You'll need to use File>Project Structure to tell IntelliJ that pylib/ is a ```
sources root, so it knows references to 'anki' in aqt are valid. mkdir .idea && cd .idea
ln -sf ../.idea.dist/* .
```
You also need to add a new Python interpreter under _Settings > Python > Interpreter_ pointing to the Python executable under `out/pyenv` (available after building Anki).

View file

@ -234,7 +234,7 @@ class DeckBrowser:
if node.collapsed: if node.collapsed:
prefix = "+" prefix = "+"
else: else:
prefix = "-" prefix = ""
def indent() -> str: def indent() -> str:
return "&nbsp;" * 6 * (node.level - 1) return "&nbsp;" * 6 * (node.level - 1)

View file

@ -14,7 +14,7 @@ from markdown import markdown
import aqt import aqt
from anki.collection import HelpPage from anki.collection import HelpPage
from anki.errors import BackendError, Interrupted from anki.errors import BackendError, CardTypeError, Interrupted
from anki.utils import is_win from anki.utils import is_win
from aqt.addons import AddonManager, AddonMeta from aqt.addons import AddonManager, AddonMeta
from aqt.qt import * from aqt.qt import *
@ -36,6 +36,14 @@ def show_exception(*, parent: QWidget, exception: Exception) -> None:
global _mbox global _mbox
error_lines = [] error_lines = []
help_page = HelpPage.TROUBLESHOOTING help_page = HelpPage.TROUBLESHOOTING
# default to PlainText
text_format = Qt.TextFormat.PlainText
# set CardTypeError messages as rich text to allow HTML formatting
if isinstance(exception, CardTypeError):
text_format = Qt.TextFormat.RichText
if isinstance(exception, BackendError): if isinstance(exception, BackendError):
if exception.context: if exception.context:
error_lines.append(exception.context) error_lines.append(exception.context)
@ -51,7 +59,7 @@ def show_exception(*, parent: QWidget, exception: Exception) -> None:
) )
error_text = "\n".join(error_lines) error_text = "\n".join(error_lines)
print(error_lines) print(error_lines)
_mbox = _init_message_box(str(exception), error_text, help_page) _mbox = _init_message_box(str(exception), error_text, help_page, text_format)
_mbox.show() _mbox.show()
@ -171,7 +179,10 @@ if not os.environ.get("DEBUG"):
def _init_message_box( def _init_message_box(
user_text: str, debug_text: str, help_page=HelpPage.TROUBLESHOOTING user_text: str,
debug_text: str,
help_page=HelpPage.TROUBLESHOOTING,
text_format=Qt.TextFormat.PlainText,
): ):
global _mbox global _mbox
@ -179,7 +190,7 @@ def _init_message_box(
_mbox.setWindowTitle("Anki") _mbox.setWindowTitle("Anki")
_mbox.setText(user_text) _mbox.setText(user_text)
_mbox.setIcon(QMessageBox.Icon.Warning) _mbox.setIcon(QMessageBox.Icon.Warning)
_mbox.setTextFormat(Qt.TextFormat.PlainText) _mbox.setTextFormat(text_format)
def show_help(): def show_help():
openHelp(help_page) openHelp(help_page)

View file

@ -85,11 +85,11 @@
</item> </item>
<item row="2" column="2"> <item row="2" column="2">
<widget class="QSpinBox" name="limit"> <widget class="QSpinBox" name="limit">
<property name="maximumSize"> <property name="sizePolicy">
<size> <sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<width>60</width> <horstretch>0</horstretch>
<height>16777215</height> <verstretch>0</verstretch>
</size> </sizepolicy>
</property> </property>
<property name="minimum"> <property name="minimum">
<number>1</number> <number>1</number>
@ -168,11 +168,11 @@
</item> </item>
<item row="1" column="1"> <item row="1" column="1">
<widget class="QSpinBox" name="limit_2"> <widget class="QSpinBox" name="limit_2">
<property name="maximumSize"> <property name="sizePolicy">
<size> <sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<width>60</width> <horstretch>0</horstretch>
<height>16777215</height> <verstretch>0</verstretch>
</size> </sizepolicy>
</property> </property>
<property name="minimum"> <property name="minimum">
<number>1</number> <number>1</number>

View file

@ -47,6 +47,9 @@
<property name="insertPolicy"> <property name="insertPolicy">
<enum>QComboBox::NoInsert</enum> <enum>QComboBox::NoInsert</enum>
</property> </property>
<property name="sizeAdjustPolicy">
<enum>QComboBox::SizeAdjustPolicy::AdjustToMinimumContentsLengthWithIcon</enum>
</property>
</widget> </widget>
</item> </item>
</layout> </layout>

View file

@ -260,6 +260,7 @@ class Preferences(QDialog):
self.update_login_status() self.update_login_status()
self.confirm_sync_after_login() self.confirm_sync_after_login()
self.update_network()
sync_login(self.mw, on_success) sync_login(self.mw, on_success)
def sync_logout(self) -> None: def sync_logout(self) -> None:

View file

@ -94,8 +94,15 @@ class TTSPlayer:
rank -= 1 rank -= 1
# if no preferred voices match, we fall back on language # if no requested voices match, use a preferred fallback voice
# with a rank of -100 # (for example, Apple Samantha) with rank of -50
for avail in avail_voices:
if avail.lang == tag.lang:
if avail.lang == "en_US" and avail.name.startswith("Apple_Samantha"):
return TTSVoiceMatch(voice=avail, rank=-50)
# if no requested or preferred voices match, we fall back on
# the first available voice for the language, with a rank of -100
for avail in avail_voices: for avail in avail_voices:
if avail.lang == tag.lang: if avail.lang == tag.lang:
return TTSVoiceMatch(voice=avail, rank=-100) return TTSVoiceMatch(voice=avail, rank=-100)

View file

@ -809,7 +809,7 @@ def ensureWidgetInScreenBoundaries(widget: QWidget) -> None:
wsize = widget.size() wsize = widget.size()
cappedWidth = min(geom.width(), wsize.width()) cappedWidth = min(geom.width(), wsize.width())
cappedHeight = min(geom.height(), wsize.height()) cappedHeight = min(geom.height(), wsize.height())
if cappedWidth > wsize.width() or cappedHeight > wsize.height(): if cappedWidth < wsize.width() or cappedHeight < wsize.height():
widget.resize(QSize(cappedWidth, cappedHeight)) widget.resize(QSize(cappedWidth, cappedHeight))
# ensure widget is inside top left # ensure widget is inside top left

View file

@ -7,4 +7,7 @@ fn main() {
.manifest_required() .manifest_required()
.unwrap(); .unwrap();
} }
println!("cargo:rerun-if-changed=../../out/buildhash");
let buildhash = std::fs::read_to_string("../../out/buildhash").unwrap_or_default();
println!("cargo:rustc-env=BUILDHASH={buildhash}");
} }

View file

@ -152,7 +152,9 @@ fn run() -> Result<()> {
let sync_time = file_timestamp_secs(&state.sync_complete_marker); let sync_time = file_timestamp_secs(&state.sync_complete_marker);
state.pyproject_modified_by_user = pyproject_time > sync_time; state.pyproject_modified_by_user = pyproject_time > sync_time;
let pyproject_has_changed = state.pyproject_modified_by_user; let pyproject_has_changed = state.pyproject_modified_by_user;
if !launcher_requested && !pyproject_has_changed { let different_launcher = diff_launcher_was_installed(&state)?;
if !launcher_requested && !pyproject_has_changed && !different_launcher {
// If no launcher request and venv is already up to date, launch Anki normally // If no launcher request and venv is already up to date, launch Anki normally
let args: Vec<String> = std::env::args().skip(1).collect(); let args: Vec<String> = std::env::args().skip(1).collect();
let cmd = build_python_command(&state, &args)?; let cmd = build_python_command(&state, &args)?;
@ -325,7 +327,6 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
} }
command command
.env("UV_CACHE_DIR", &state.uv_cache_dir)
.env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir)
.env( .env(
"UV_HTTP_TIMEOUT", "UV_HTTP_TIMEOUT",
@ -344,10 +345,6 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re
} }
} }
if state.no_cache_marker.exists() {
command.env("UV_NO_CACHE", "1");
}
match command.ensure_success() { match command.ensure_success() {
Ok(_) => { Ok(_) => {
// Sync succeeded // Sync succeeded
@ -603,18 +600,27 @@ fn get_version_kind(state: &State) -> Result<Option<VersionKind>> {
} }
fn with_only_latest_patch(versions: &[String]) -> Vec<String> { fn with_only_latest_patch(versions: &[String]) -> Vec<String> {
// Only show the latest patch release for a given (major, minor) // Assumes versions are sorted in descending order (newest first)
// Only show the latest patch release for a given (major, minor),
// and exclude pre-releases if a newer major_minor exists
let mut seen_major_minor = std::collections::HashSet::new(); let mut seen_major_minor = std::collections::HashSet::new();
versions versions
.iter() .iter()
.filter(|v| { .filter(|v| {
let (major, minor, _, _) = parse_version_for_filtering(v); let (major, minor, _, is_prerelease) = parse_version_for_filtering(v);
if major == 2 { if major == 2 {
return true; return true;
} }
let major_minor = (major, minor); let major_minor = (major, minor);
if seen_major_minor.contains(&major_minor) { if seen_major_minor.contains(&major_minor) {
false false
} else if is_prerelease
&& seen_major_minor
.iter()
.any(|&(seen_major, seen_minor)| (seen_major, seen_minor) > (major, minor))
{
// Exclude pre-release if a newer major_minor exists
false
} else { } else {
seen_major_minor.insert(major_minor); seen_major_minor.insert(major_minor);
true true
@ -1013,6 +1019,15 @@ fn uv_command(state: &State) -> Result<Command> {
.env("UV_DEFAULT_INDEX", &pypi_mirror); .env("UV_DEFAULT_INDEX", &pypi_mirror);
} }
if state.no_cache_marker.exists() {
command.env("UV_NO_CACHE", "1");
} else {
command.env("UV_CACHE_DIR", &state.uv_cache_dir);
}
// have uv use the system certstore instead of webpki-roots'
command.env("UV_NATIVE_TLS", "1");
Ok(command) Ok(command)
} }
@ -1107,6 +1122,20 @@ fn show_mirror_submenu(state: &State) -> Result<()> {
Ok(()) Ok(())
} }
fn diff_launcher_was_installed(state: &State) -> Result<bool> {
let launcher_version = option_env!("BUILDHASH").unwrap_or("dev").trim();
let launcher_version_path = state.uv_install_root.join("launcher-version");
if let Ok(content) = read_file(&launcher_version_path) {
if let Ok(version_str) = String::from_utf8(content) {
if version_str.trim() == launcher_version {
return Ok(false);
}
}
}
write_file(launcher_version_path, launcher_version)?;
Ok(true)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -8,7 +8,6 @@ mod python;
mod typescript; mod typescript;
mod write_strings; mod write_strings;
use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use anki_io::create_dir_all; use anki_io::create_dir_all;
@ -32,8 +31,7 @@ fn main() -> Result<()> {
python::write_py_interface(&modules)?; python::write_py_interface(&modules)?;
// write strings.json file to requested path // write strings.json file to requested path
println!("cargo:rerun-if-env-changed=STRINGS_JSON"); if let Some(path) = option_env!("STRINGS_JSON") {
if let Ok(path) = env::var("STRINGS_JSON") {
if !path.is_empty() { if !path.is_empty() {
let path = PathBuf::from(path); let path = PathBuf::from(path);
let meta_json = serde_json::to_string_pretty(&modules).unwrap(); let meta_json = serde_json::to_string_pretty(&modules).unwrap();

View file

@ -1,7 +1,6 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use std::fmt::Write; use std::fmt::Write;
use std::path::PathBuf; use std::path::PathBuf;
@ -21,7 +20,7 @@ pub fn write_py_interface(modules: &[Module]) -> Result<()> {
render_methods(modules, &mut out); render_methods(modules, &mut out);
render_legacy_enum(modules, &mut out); render_legacy_enum(modules, &mut out);
if let Ok(path) = env::var("STRINGS_PY") { if let Some(path) = option_env!("STRINGS_PY") {
let path = PathBuf::from(path); let path = PathBuf::from(path);
create_dir_all(path.parent().unwrap())?; create_dir_all(path.parent().unwrap())?;
write_file_if_changed(path, out)?; write_file_if_changed(path, out)?;

View file

@ -1,7 +1,6 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::env;
use std::fmt::Write; use std::fmt::Write;
use std::path::PathBuf; use std::path::PathBuf;
@ -22,7 +21,7 @@ pub fn write_ts_interface(modules: &[Module]) -> Result<()> {
render_module_map(modules, &mut ts_out); render_module_map(modules, &mut ts_out);
render_methods(modules, &mut ts_out); render_methods(modules, &mut ts_out);
if let Ok(path) = env::var("STRINGS_TS") { if let Some(path) = option_env!("STRINGS_TS") {
let path = PathBuf::from(path); let path = PathBuf::from(path);
create_dir_all(path.parent().unwrap())?; create_dir_all(path.parent().unwrap())?;
write_file_if_changed(path, ts_out)?; write_file_if_changed(path, ts_out)?;

View file

@ -335,6 +335,15 @@ pub fn write_file_if_changed(path: impl AsRef<Path>, contents: impl AsRef<[u8]>)
.map(|existing| existing != contents) .map(|existing| existing != contents)
.unwrap_or(true) .unwrap_or(true)
}; };
match std::env::var("CARGO_PKG_NAME") {
Ok(pkg) if pkg == "anki_proto" || pkg == "anki_i18n" => {
// at comptime for the proto/i18n crates, register implicit output as input
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
}
_ => {}
}
if changed { if changed {
write_file(path, contents)?; write_file(path, contents)?;
Ok(true) Ok(true)

View file

@ -22,7 +22,7 @@ pub(crate) fn write_python_interface(services: &[BackendService]) -> Result<()>
write_header(&mut out)?; write_header(&mut out)?;
for service in services { for service in services {
if service.name == "BackendAnkidroidService" { if ["BackendAnkidroidService", "BackendFrontendService"].contains(&service.name.as_str()) {
continue; continue;
} }
for method in service.all_methods() { for method in service.all_methods() {

View file

@ -260,7 +260,6 @@ fn reveal_cloze(
image_occlusion_text, image_occlusion_text,
question, question,
active, active,
cloze_ord,
&cloze.ordinals, &cloze.ordinals,
)); ));
return; return;
@ -332,10 +331,9 @@ fn render_image_occlusion(
text: &str, text: &str,
question_side: bool, question_side: bool,
active: bool, active: bool,
ordinal: u16,
ordinals: &[u16], ordinals: &[u16],
) -> String { ) -> String {
if (question_side && active) || ordinal == 0 { if (question_side && active) || ordinals.contains(&0) {
format!( format!(
r#"<div class="cloze" data-ordinal="{}" {}></div>"#, r#"<div class="cloze" data-ordinal="{}" {}></div>"#,
ordinals_str(ordinals), ordinals_str(ordinals),

View file

@ -17,6 +17,7 @@ use crate::import_export::package::media::SafeMediaEntry;
use crate::import_export::ImportProgress; use crate::import_export::ImportProgress;
use crate::media::files::add_hash_suffix_to_file_stem; use crate::media::files::add_hash_suffix_to_file_stem;
use crate::media::files::sha1_of_reader; use crate::media::files::sha1_of_reader;
use crate::media::Checksums;
use crate::prelude::*; use crate::prelude::*;
use crate::progress::ThrottlingProgressHandler; use crate::progress::ThrottlingProgressHandler;
@ -75,7 +76,7 @@ impl Context<'_> {
fn prepare_media( fn prepare_media(
media_entries: Vec<SafeMediaEntry>, media_entries: Vec<SafeMediaEntry>,
archive: &mut ZipArchive<File>, archive: &mut ZipArchive<File>,
existing_sha1s: &HashMap<String, Sha1Hash>, existing_sha1s: &Checksums,
progress: &mut ThrottlingProgressHandler<ImportProgress>, progress: &mut ThrottlingProgressHandler<ImportProgress>,
) -> Result<MediaUseMap> { ) -> Result<MediaUseMap> {
let mut media_map = MediaUseMap::default(); let mut media_map = MediaUseMap::default();

View file

@ -1,6 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::collections::HashSet;
use std::io::BufRead; use std::io::BufRead;
use std::io::BufReader; use std::io::BufReader;
use std::io::Read; use std::io::Read;
@ -106,6 +107,8 @@ struct ColumnContext {
notetype_column: Option<usize>, notetype_column: Option<usize>,
/// Source column indices for the fields of a notetype /// Source column indices for the fields of a notetype
field_source_columns: FieldSourceColumns, field_source_columns: FieldSourceColumns,
/// Metadata column indices (1-based)
meta_columns: HashSet<usize>,
/// How fields are converted to strings. Used for escaping HTML if /// How fields are converted to strings. Used for escaping HTML if
/// appropriate. /// appropriate.
stringify: fn(&str) -> String, stringify: fn(&str) -> String,
@ -119,6 +122,7 @@ impl ColumnContext {
deck_column: metadata.deck()?.column(), deck_column: metadata.deck()?.column(),
notetype_column: metadata.notetype()?.column(), notetype_column: metadata.notetype()?.column(),
field_source_columns: metadata.field_source_columns()?, field_source_columns: metadata.field_source_columns()?,
meta_columns: metadata.meta_columns(),
stringify: stringify_fn(metadata.is_html), stringify: stringify_fn(metadata.is_html),
}) })
} }
@ -166,11 +170,19 @@ impl ColumnContext {
} }
fn gather_note_fields(&self, record: &csv::StringRecord) -> Vec<Option<String>> { fn gather_note_fields(&self, record: &csv::StringRecord) -> Vec<Option<String>> {
let stringify = self.stringify; let op = |i| record.get(i - 1).map(self.stringify);
self.field_source_columns if !self.field_source_columns.is_empty() {
.iter() self.field_source_columns
.map(|opt| opt.and_then(|idx| record.get(idx - 1)).map(stringify)) .iter()
.collect() .map(|opt| opt.and_then(op))
.collect()
} else {
// notetype column provided, assume all non-metadata columns are notetype fields
(1..=record.len())
.filter(|i| !self.meta_columns.contains(i))
.map(op)
.collect()
}
} }
} }

View file

@ -291,11 +291,8 @@ impl CsvMetadataHelpers for CsvMetadata {
.map(|&i| (i > 0).then_some(i as usize)) .map(|&i| (i > 0).then_some(i as usize))
.collect(), .collect(),
CsvNotetype::NotetypeColumn(_) => { CsvNotetype::NotetypeColumn(_) => {
let meta_columns = self.meta_columns(); // each row's notetype could have varying number of fields
(1..self.column_labels.len() + 1) vec![]
.filter(|idx| !meta_columns.contains(idx))
.map(Some)
.collect()
} }
}) })
} }

View file

@ -173,7 +173,9 @@ pub fn add_data_to_folder_uniquely<'a, P>(
where where
P: AsRef<Path>, P: AsRef<Path>,
{ {
let normalized_name = normalize_filename(desired_name); // force lowercase to account for case-insensitive filesystems
// but not within normalize_filename, for existing media refs
let normalized_name: Cow<_> = normalize_filename(desired_name).to_lowercase().into();
let mut target_path = folder.as_ref().join(normalized_name.as_ref()); let mut target_path = folder.as_ref().join(normalized_name.as_ref());
@ -496,8 +498,14 @@ mod test {
"test.mp3" "test.mp3"
); );
// different contents // different contents, filenames differ only by case
let h2 = sha1_of_data(b"hello1"); let h2 = sha1_of_data(b"hello1");
assert_eq!(
add_data_to_folder_uniquely(dpath, "Test.mp3", b"hello1", h2).unwrap(),
"test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3"
);
// same contents, filenames differ only by case
assert_eq!( assert_eq!(
add_data_to_folder_uniquely(dpath, "test.mp3", b"hello1", h2).unwrap(), add_data_to_folder_uniquely(dpath, "test.mp3", b"hello1", h2).unwrap(),
"test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3" "test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3"

View file

@ -6,7 +6,6 @@ pub mod files;
mod service; mod service;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -22,6 +21,7 @@ use crate::progress::ThrottlingProgressHandler;
use crate::sync::http_client::HttpSyncClient; use crate::sync::http_client::HttpSyncClient;
use crate::sync::login::SyncAuth; use crate::sync::login::SyncAuth;
use crate::sync::media::database::client::changetracker::ChangeTracker; use crate::sync::media::database::client::changetracker::ChangeTracker;
pub use crate::sync::media::database::client::Checksums;
use crate::sync::media::database::client::MediaDatabase; use crate::sync::media::database::client::MediaDatabase;
use crate::sync::media::database::client::MediaEntry; use crate::sync::media::database::client::MediaEntry;
use crate::sync::media::progress::MediaSyncProgress; use crate::sync::media::progress::MediaSyncProgress;
@ -157,7 +157,7 @@ impl MediaManager {
pub fn all_checksums_after_checking( pub fn all_checksums_after_checking(
&self, &self,
progress: impl FnMut(usize) -> bool, progress: impl FnMut(usize) -> bool,
) -> Result<HashMap<String, Sha1Hash>> { ) -> Result<Checksums> {
ChangeTracker::new(&self.media_folder, progress).register_changes(&self.db)?; ChangeTracker::new(&self.media_folder, progress).register_changes(&self.db)?;
self.db.all_registered_checksums() self.db.all_registered_checksums()
} }
@ -176,7 +176,7 @@ impl MediaManager {
/// All checksums without registering changes first. /// All checksums without registering changes first.
#[cfg(test)] #[cfg(test)]
pub(crate) fn all_checksums_as_is(&self) -> HashMap<String, [u8; 20]> { pub(crate) fn all_checksums_as_is(&self) -> Checksums {
self.db.all_registered_checksums().unwrap() self.db.all_registered_checksums().unwrap()
} }
} }

View file

@ -443,9 +443,20 @@ impl Collection {
.storage .storage
.get_deck(card.deck_id)? .get_deck(card.deck_id)?
.or_not_found(card.deck_id)?; .or_not_found(card.deck_id)?;
let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?; let home_deck = if card.original_deck_id.0 == 0 {
&deck
} else {
&self
.storage
.get_deck(card.original_deck_id)?
.or_not_found(card.original_deck_id)?
};
let config = self
.storage
.get_deck_config(home_deck.config_id().or_invalid("home deck is filtered")?)?
.unwrap_or_default();
let desired_retention = deck.effective_desired_retention(&config); let desired_retention = home_deck.effective_desired_retention(&config);
let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs); let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs);
let fsrs_next_states = if fsrs_enabled { let fsrs_next_states = if fsrs_enabled {
let params = config.fsrs_params(); let params = config.fsrs_params();

View file

@ -434,9 +434,10 @@ impl SqlWriter<'_> {
let timing = self.col.timing_today()?; let timing = self.col.timing_today()?;
(timing.days_elapsed, timing.next_day_at, timing.now) (timing.days_elapsed, timing.next_day_at, timing.now)
}; };
const NEW_TYPE: i8 = CardType::New as i8;
write!( write!(
self.sql, self.sql,
"extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}, {now}) {op} {r}" "case when c.type = {NEW_TYPE} then false else (extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}, {now}) {op} {r}) end"
) )
.unwrap() .unwrap()
} }

View file

@ -54,7 +54,7 @@ fn build_retrievability_query(
) -> String { ) -> String {
if fsrs { if fsrs {
format!( format!(
"extract_fsrs_relative_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, {today}, ivl, {next_day_at}, {now}) {order}" "extract_fsrs_relative_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, ivl, {today}, {next_day_at}, {now}) {order}"
) )
} else { } else {
format!( format!(

View file

@ -839,7 +839,7 @@ impl fmt::Display for ReviewOrderSubclause {
let next_day_at = timing.next_day_at.0; let next_day_at = timing.next_day_at.0;
let now = timing.now.0; let now = timing.now.0;
temp_string = temp_string =
format!("extract_fsrs_relative_retrievability(data, case when odue !=0 then odue else due end, {today}, ivl, {next_day_at}, {now}) {order}"); format!("extract_fsrs_relative_retrievability(data, case when odue !=0 then odue else due end, ivl, {today}, {next_day_at}, {now}) {order}");
&temp_string &temp_string
} }
ReviewOrderSubclause::Added => "nid asc, ord asc", ReviewOrderSubclause::Added => "nid asc, ord asc",

View file

@ -332,23 +332,30 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> {
return Ok(None); return Ok(None);
}; };
let seconds_elapsed = if let Some(last_review_time) = card_data.last_review_time { let seconds_elapsed = if let Some(last_review_time) = card_data.last_review_time {
now.saturating_sub(last_review_time.0) as u32 // This and any following
// (x as u32).saturating_sub(y as u32)
// must not be changed to
// x.saturating_sub(y) as u32
// as x and y are i64's and saturating_sub will therfore allow negative numbers
// before converting to u32 in the latter example.
(now as u32).saturating_sub(last_review_time.0 as u32)
} else if due > 365_000 { } else if due > 365_000 {
// (re)learning card in seconds // (re)learning card in seconds
let Ok(ivl) = ctx.get_raw(2).as_i64() else { let Ok(ivl) = ctx.get_raw(2).as_i64() else {
return Ok(None); return Ok(None);
}; };
let last_review_time = due.saturating_sub(ivl); let last_review_time = (due as u32).saturating_sub(ivl as u32);
now.saturating_sub(last_review_time) as u32 (now as u32).saturating_sub(last_review_time)
} else { } else {
let Ok(ivl) = ctx.get_raw(2).as_i64() else { let Ok(ivl) = ctx.get_raw(2).as_i64() else {
return Ok(None); return Ok(None);
}; };
let Ok(days_elapsed) = ctx.get_raw(3).as_i64() else { // timing.days_elapsed
let Ok(today) = ctx.get_raw(3).as_i64() else {
return Ok(None); return Ok(None);
}; };
let review_day = due.saturating_sub(ivl); let review_day = (due as u32).saturating_sub(ivl as u32);
days_elapsed.saturating_sub(review_day) as u32 * 86_400 (today as u32).saturating_sub(review_day) * 86_400
}; };
let decay = card_data.decay.unwrap_or(FSRS5_DEFAULT_DECAY); let decay = card_data.decay.unwrap_or(FSRS5_DEFAULT_DECAY);
let retrievability = card_data.memory_state().map(|state| { let retrievability = card_data.memory_state().map(|state| {
@ -364,7 +371,7 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> {
} }
/// eg. extract_fsrs_relative_retrievability(card.data, card.due, /// eg. extract_fsrs_relative_retrievability(card.data, card.due,
/// timing.days_elapsed, card.ivl, timing.next_day_at, timing.now) -> float | /// card.ivl, timing.days_elapsed, timing.next_day_at, timing.now) -> float |
/// null. The higher the number, the higher the card's retrievability relative /// null. The higher the number, the higher the card's retrievability relative
/// to the configured desired retention. /// to the configured desired retention.
fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result<()> { fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result<()> {
@ -378,25 +385,32 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result
let Ok(due) = ctx.get_raw(1).as_i64() else { let Ok(due) = ctx.get_raw(1).as_i64() else {
return Ok(None); return Ok(None);
}; };
let Ok(interval) = ctx.get_raw(3).as_i64() else { let Ok(interval) = ctx.get_raw(2).as_i64() else {
return Ok(None); return Ok(None);
}; };
/*
// Unused
let Ok(next_day_at) = ctx.get_raw(4).as_i64() else { let Ok(next_day_at) = ctx.get_raw(4).as_i64() else {
return Ok(None); return Ok(None);
}; };
*/
let Ok(now) = ctx.get_raw(5).as_i64() else { let Ok(now) = ctx.get_raw(5).as_i64() else {
return Ok(None); return Ok(None);
}; };
let days_elapsed = if due > 365_000 { let secs_elapsed = if due > 365_000 {
// (re)learning // (re)learning card with due in seconds
(next_day_at as u32).saturating_sub(due as u32) / 86_400
// Don't change this to now.subtracting_sub(due) as u32
// for the same reasons listed in the comment
// in add_extract_fsrs_retrievability
(now as u32).saturating_sub(due as u32)
} else { } else {
let Ok(days_elapsed) = ctx.get_raw(2).as_i64() else { // timing.days_elapsed
let Ok(today) = ctx.get_raw(3).as_i64() else {
return Ok(None); return Ok(None);
}; };
let review_day = due.saturating_sub(interval); let review_day = due.saturating_sub(interval);
(today as u32).saturating_sub(review_day as u32) * 86_400
(days_elapsed as u32).saturating_sub(review_day as u32)
}; };
if let Ok(card_data) = ctx.get_raw(0).as_str() { if let Ok(card_data) = ctx.get_raw(0).as_str() {
if !card_data.is_empty() { if !card_data.is_empty() {
@ -410,23 +424,12 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result
let seconds_elapsed = let seconds_elapsed =
if let Some(last_review_time) = card_data.last_review_time { if let Some(last_review_time) = card_data.last_review_time {
now.saturating_sub(last_review_time.0) as u32 // Don't change this to now.subtracting_sub(due) as u32
} else if due > 365_000 { // for the same reasons listed in the comment
// (re)learning card in seconds // in add_extract_fsrs_retrievability
let Ok(ivl) = ctx.get_raw(2).as_i64() else { (now as u32).saturating_sub(last_review_time.0 as u32)
return Ok(None);
};
let last_review_time = due.saturating_sub(ivl);
now.saturating_sub(last_review_time) as u32
} else { } else {
let Ok(ivl) = ctx.get_raw(2).as_i64() else { secs_elapsed
return Ok(None);
};
let Ok(days_elapsed) = ctx.get_raw(3).as_i64() else {
return Ok(None);
};
let review_day = due.saturating_sub(ivl);
days_elapsed.saturating_sub(review_day) as u32 * 86_400
}; };
let current_retrievability = FSRS::new(None) let current_retrievability = FSRS::new(None)
@ -441,7 +444,7 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result
} }
} }
} }
let days_elapsed = secs_elapsed / 86_400;
// FSRS data missing; fall back to SM2 ordering // FSRS data missing; fall back to SM2 ordering
Ok(Some( Ok(Some(
-((days_elapsed as f32) + 0.001) / (interval as f32).max(1.0), -((days_elapsed as f32) + 0.001) / (interval as f32).max(1.0),

View file

@ -18,6 +18,20 @@ use crate::prelude::*;
pub mod changetracker; pub mod changetracker;
pub struct Checksums(HashMap<String, Sha1Hash>);
impl Checksums {
// case-fold filenames when checking files to be imported
// to account for case-insensitive filesystems
pub fn get(&self, key: impl AsRef<str>) -> Option<&Sha1Hash> {
self.0.get(key.as_ref().to_lowercase().as_str())
}
pub fn contains_key(&self, key: impl AsRef<str>) -> bool {
self.get(key).is_some()
}
}
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct MediaEntry { pub struct MediaEntry {
pub fname: String, pub fname: String,
@ -175,11 +189,12 @@ delete from media where fname=?",
} }
/// Returns all filenames and checksums, where the checksum is not null. /// Returns all filenames and checksums, where the checksum is not null.
pub(crate) fn all_registered_checksums(&self) -> error::Result<HashMap<String, Sha1Hash>> { pub(crate) fn all_registered_checksums(&self) -> error::Result<Checksums> {
self.db self.db
.prepare("SELECT fname, csum FROM media WHERE csum IS NOT NULL")? .prepare("SELECT fname, csum FROM media WHERE csum IS NOT NULL")?
.query_and_then([], row_to_name_and_checksum)? .query_and_then([], row_to_name_and_checksum)?
.collect() .collect::<error::Result<_>>()
.map(Checksums)
} }
pub(crate) fn force_resync(&self) -> error::Result<()> { pub(crate) fn force_resync(&self) -> error::Result<()> {

View file

@ -58,7 +58,7 @@ trait DiffTrait {
if self.get_typed() == self.get_expected() { if self.get_typed() == self.get_expected() {
format_typeans!(format!( format_typeans!(format!(
"<span class=typeGood>{}</span>", "<span class=typeGood>{}</span>",
self.get_expected_original() htmlescape::encode_minimal(&self.get_expected_original())
)) ))
} else { } else {
let output = self.to_tokens(); let output = self.to_tokens();
@ -391,6 +391,15 @@ mod test {
assert_eq!(ctx, "<code id=typeans>123</code>"); assert_eq!(ctx, "<code id=typeans>123</code>");
} }
#[test]
fn correct_input_is_escaped() {
let ctx = Diff::new("source <dir>/bin/activate", "source <dir>/bin/activate");
assert_eq!(
ctx.to_html(),
"<code id=typeans><span class=typeGood>source &lt;dir&gt;/bin/activate</span></code>"
);
}
#[test] #[test]
fn correct_input_is_collapsed() { fn correct_input_is_collapsed() {
let ctx = Diff::new("123", "123"); let ctx = Diff::new("123", "123");

View file

@ -202,7 +202,7 @@ fn sveltekit_temp_file(path: &str) -> bool {
} }
fn check_cargo_deny() -> Result<()> { fn check_cargo_deny() -> Result<()> {
Command::run("cargo install cargo-deny@0.18.3")?; Command::run("cargo install cargo-deny@0.18.6")?;
Command::run("cargo deny check")?; Command::run("cargo deny check")?;
Ok(()) Ok(())
} }

View file

@ -5,8 +5,6 @@ import os
import sys import sys
sys.path.extend(["pylib", "qt", "out/pylib", "out/qt"]) sys.path.extend(["pylib", "qt", "out/pylib", "out/qt"])
if sys.platform == "win32":
os.environ["PATH"] += ";out\\extracted\\win_amd64_audio"
import aqt import aqt

View file

@ -4,7 +4,12 @@
import { getRange, getSelection } from "./cross-browser"; import { getRange, getSelection } from "./cross-browser";
function wrappedExceptForWhitespace(text: string, front: string, back: string): string { function wrappedExceptForWhitespace(text: string, front: string, back: string): string {
const match = text.match(/^(\s*)([^]*?)(\s*)$/)!; const normalizedText = text
.replace(/&nbsp;/g, " ")
.replace(/&#160;/g, " ")
.replace(/\u00A0/g, " ");
const match = normalizedText.match(/^(\s*)([^]*?)(\s*)$/)!;
return match[1] + front + match[2] + back + match[3]; return match[1] + front + match[2] + back + match[3];
} }

View file

@ -10,9 +10,6 @@ export function allImagesLoaded(): Promise<void[]> {
} }
function imageLoaded(img: HTMLImageElement): Promise<void> { function imageLoaded(img: HTMLImageElement): Promise<void> {
if (!img.getAttribute("decoding")) {
img.decoding = "async";
}
return img.complete return img.complete
? Promise.resolve() ? Promise.resolve()
: new Promise((resolve) => { : new Promise((resolve) => {
@ -31,6 +28,8 @@ function extractImageSrcs(fragment: DocumentFragment): string[] {
function createImage(src: string): HTMLImageElement { function createImage(src: string): HTMLImageElement {
const img = new Image(); const img = new Image();
img.src = src; img.src = src;
img.decoding = "async";
img.decode();
return img; return img;
} }

View file

@ -37,7 +37,9 @@ export const addOrUpdateNote = async function(
backExtra, backExtra,
tags, tags,
}); });
showResult(mode.noteId, result, noteCount); if (result.note) {
showResult(mode.noteId, result, noteCount);
}
} else { } else {
const result = await addImageOcclusionNote({ const result = await addImageOcclusionNote({
// IOCloningMode is not used on mobile // IOCloningMode is not used on mobile
@ -55,23 +57,12 @@ export const addOrUpdateNote = async function(
// show toast message // show toast message
const showResult = (noteId: number | null, result: OpChanges, count: number) => { const showResult = (noteId: number | null, result: OpChanges, count: number) => {
const props = $state({ const props = $state({
message: "", message: noteId ? tr.browsingCardsUpdated({ count: count }) : tr.importingCardsAdded({ count: count }),
type: "error" as "error" | "success", type: "success" as "error" | "success",
showToast: true, showToast: true,
}); });
mount(Toast, { mount(Toast, {
target: document.body, target: document.body,
props, props,
}); });
if (result.note) {
const msg = noteId ? tr.browsingCardsUpdated({ count: count }) : tr.importingCardsAdded({ count: count });
props.message = msg;
props.type = "success";
props.showToast = true;
} else {
const msg = tr.notetypesErrorGeneratingCloze();
props.message = msg;
props.showToast = true;
}
}; };

View file

@ -6939,8 +6939,8 @@ __metadata:
linkType: hard linkType: hard
"vite@npm:6": "vite@npm:6":
version: 6.3.6 version: 6.4.1
resolution: "vite@npm:6.3.6" resolution: "vite@npm:6.4.1"
dependencies: dependencies:
esbuild: "npm:^0.25.0" esbuild: "npm:^0.25.0"
fdir: "npm:^6.4.4" fdir: "npm:^6.4.4"
@ -6989,7 +6989,7 @@ __metadata:
optional: true optional: true
bin: bin:
vite: bin/vite.js vite: bin/vite.js
checksum: 10c0/add701f1e72596c002275782e38d0389ab400c1be330c93a3009804d62db68097a936ca1c53c3301df3aaacfe5e328eab547060f31ef9c49a277ae50df6ad4fb checksum: 10c0/77bb4c5b10f2a185e7859cc9a81c789021bc18009b02900347d1583b453b58e4b19ff07a5e5a5b522b68fc88728460bb45a63b104d969e8c6a6152aea3b849f7
languageName: node languageName: node
linkType: hard linkType: hard