Merge branch 'main' into fix-system-font-overwridden

This commit is contained in:
GithubAnon0000 2025-07-01 21:19:35 +00:00 committed by GitHub
commit a347423d38
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 1149 additions and 1201 deletions

1
Cargo.lock generated
View file

@ -94,6 +94,7 @@ dependencies = [
"axum", "axum",
"axum-client-ip", "axum-client-ip",
"axum-extra", "axum-extra",
"bitflags 2.9.1",
"blake3", "blake3",
"bytes", "bytes",
"chrono", "chrono",

View file

@ -60,6 +60,7 @@ async-trait = "0.1.88"
axum = { version = "0.8.4", features = ["multipart", "macros"] } axum = { version = "0.8.4", features = ["multipart", "macros"] }
axum-client-ip = "1.1.3" axum-client-ip = "1.1.3"
axum-extra = { version = "0.10.1", features = ["typed-header"] } axum-extra = { version = "0.10.1", features = ["typed-header"] }
bitflags = "2.9.1"
blake3 = "1.8.2" blake3 = "1.8.2"
bytes = "1.10.1" bytes = "1.10.1"
camino = "1.1.10" camino = "1.1.10"

View file

@ -1,4 +1,4 @@
# Anki # Anki®
[![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci) [![Build status](https://badge.buildkite.com/c9edf020a4aec976f9835e54751cc5409d843adbb66d043bd3.svg?branch=main)](https://buildkite.com/ankitects/anki-ci)

View file

@ -35,3 +35,7 @@ path = "src/bin/update_uv.rs"
[[bin]] [[bin]]
name = "update_protoc" name = "update_protoc"
path = "src/bin/update_protoc.rs" path = "src/bin/update_protoc.rs"
[[bin]]
name = "update_node"
path = "src/bin/update_node.rs"

View file

@ -0,0 +1,268 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use std::error::Error;
use std::fs;
use std::path::Path;
use regex::Regex;
use reqwest::blocking::Client;
use serde_json::Value;
#[derive(Debug)]
struct NodeRelease {
version: String,
files: Vec<NodeFile>,
}
#[derive(Debug)]
struct NodeFile {
filename: String,
url: String,
}
fn main() -> Result<(), Box<dyn Error>> {
let release_info = fetch_node_release_info()?;
let new_text = generate_node_archive_function(&release_info)?;
update_node_text(&new_text)?;
println!("Node.js archive function updated successfully!");
Ok(())
}
fn fetch_node_release_info() -> Result<NodeRelease, Box<dyn Error>> {
let client = Client::new();
// Get the Node.js release info
let response = client
.get("https://nodejs.org/dist/index.json")
.header("User-Agent", "anki-build-updater")
.send()?;
let releases: Vec<Value> = response.json()?;
// Find the latest LTS release
let latest = releases
.iter()
.find(|release| {
// LTS releases have a non-false "lts" field
release["lts"].as_str().is_some() && release["lts"] != false
})
.ok_or("No LTS releases found")?;
let version = latest["version"]
.as_str()
.ok_or("Version not found")?
.to_string();
let files = latest["files"]
.as_array()
.ok_or("Files array not found")?
.iter()
.map(|f| f.as_str().unwrap_or(""))
.collect::<Vec<_>>();
let lts_name = latest["lts"].as_str().unwrap_or("unknown");
println!("Found Node.js LTS version: {version} ({lts_name})");
// Map platforms to their expected file keys and full filenames
let platform_mapping = vec![
(
"linux-x64",
"linux-x64",
format!("node-{version}-linux-x64.tar.xz"),
),
(
"linux-arm64",
"linux-arm64",
format!("node-{version}-linux-arm64.tar.xz"),
),
(
"darwin-x64",
"osx-x64-tar",
format!("node-{version}-darwin-x64.tar.xz"),
),
(
"darwin-arm64",
"osx-arm64-tar",
format!("node-{version}-darwin-arm64.tar.xz"),
),
(
"win-x64",
"win-x64-zip",
format!("node-{version}-win-x64.zip"),
),
(
"win-arm64",
"win-arm64-zip",
format!("node-{version}-win-arm64.zip"),
),
];
let mut node_files = Vec::new();
for (platform, file_key, filename) in platform_mapping {
// Check if this file exists in the release
if files.contains(&file_key) {
let url = format!("https://nodejs.org/dist/{version}/{filename}");
node_files.push(NodeFile {
filename: filename.clone(),
url,
});
println!("Found file for {platform}: {filename} (key: {file_key})");
} else {
return Err(
format!("File not found for {platform} (key: {file_key}): {filename}").into(),
);
}
}
Ok(NodeRelease {
version,
files: node_files,
})
}
fn generate_node_archive_function(release: &NodeRelease) -> Result<String, Box<dyn Error>> {
let client = Client::new();
// Fetch the SHASUMS256.txt file once
println!("Fetching SHA256 checksums...");
let shasums_url = format!("https://nodejs.org/dist/{}/SHASUMS256.txt", release.version);
let shasums_response = client
.get(&shasums_url)
.header("User-Agent", "anki-build-updater")
.send()?;
let shasums_text = shasums_response.text()?;
// Create a mapping from filename patterns to platform names - using the exact
// patterns we stored in files
let platform_mapping = vec![
("linux-x64.tar.xz", "LinuxX64"),
("linux-arm64.tar.xz", "LinuxArm"),
("darwin-x64.tar.xz", "MacX64"),
("darwin-arm64.tar.xz", "MacArm"),
("win-x64.zip", "WindowsX64"),
("win-arm64.zip", "WindowsArm"),
];
let mut platform_blocks = Vec::new();
for (file_pattern, platform_name) in platform_mapping {
// Find the file that ends with this pattern
if let Some(file) = release
.files
.iter()
.find(|f| f.filename.ends_with(file_pattern))
{
// Find the SHA256 for this file
let sha256 = shasums_text
.lines()
.find(|line| line.contains(&file.filename))
.and_then(|line| line.split_whitespace().next())
.ok_or_else(|| format!("SHA256 not found for {}", file.filename))?;
println!(
"Found SHA256 for {}: {} => {}",
platform_name, file.filename, sha256
);
let block = format!(
" Platform::{} => OnlineArchive {{\n url: \"{}\",\n sha256: \"{}\",\n }},",
platform_name, file.url, sha256
);
platform_blocks.push(block);
} else {
return Err(format!(
"File not found for platform {platform_name}: no file ending with {file_pattern}"
)
.into());
}
}
let function = format!(
"pub fn node_archive(platform: Platform) -> OnlineArchive {{\n match platform {{\n{}\n }}\n}}",
platform_blocks.join("\n")
);
Ok(function)
}
fn update_node_text(new_function: &str) -> Result<(), Box<dyn Error>> {
let node_rs_content = read_node_rs()?;
// Regex to match the entire node_archive function with proper multiline
// matching
let re = Regex::new(
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}",
)?;
let updated_content = re.replace(&node_rs_content, new_function);
write_node_rs(&updated_content)?;
Ok(())
}
fn read_node_rs() -> Result<String, Box<dyn Error>> {
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
let manifest_dir =
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
let path = Path::new(&manifest_dir).join("src").join("node.rs");
Ok(fs::read_to_string(path)?)
}
fn write_node_rs(content: &str) -> Result<(), Box<dyn Error>> {
// Use CARGO_MANIFEST_DIR to get the crate root, then find src/node.rs
let manifest_dir =
std::env::var("CARGO_MANIFEST_DIR").map_err(|_| "CARGO_MANIFEST_DIR not set")?;
let path = Path::new(&manifest_dir).join("src").join("node.rs");
fs::write(path, content)?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_regex_replacement() {
let sample_content = r#"Some other code
pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform {
Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz",
sha256: "old_hash",
},
Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz",
sha256: "old_hash",
},
}
}
More code here"#;
let new_function = r#"pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform {
Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-linux-x64.tar.xz",
sha256: "new_hash",
},
Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v21.0.0/node-v21.0.0-darwin-x64.tar.xz",
sha256: "new_hash",
},
}
}"#;
let re = Regex::new(
r"(?s)pub fn node_archive\(platform: Platform\) -> OnlineArchive \{.*?\n\s*\}\s*\n\s*\}"
).unwrap();
let result = re.replace(sample_content, new_function);
assert!(result.contains("v21.0.0"));
assert!(result.contains("new_hash"));
assert!(!result.contains("old_hash"));
assert!(result.contains("Some other code"));
assert!(result.contains("More code here"));
}
}

View file

@ -19,28 +19,28 @@ use crate::input::BuildInput;
pub fn node_archive(platform: Platform) -> OnlineArchive { pub fn node_archive(platform: Platform) -> OnlineArchive {
match platform { match platform {
Platform::LinuxX64 => OnlineArchive { Platform::LinuxX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-x64.tar.xz", url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-x64.tar.xz",
sha256: "822780369d0ea309e7d218e41debbd1a03f8cdf354ebf8a4420e89f39cc2e612", sha256: "325c0f1261e0c61bcae369a1274028e9cfb7ab7949c05512c5b1e630f7e80e12",
}, },
Platform::LinuxArm => OnlineArchive { Platform::LinuxArm => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-linux-arm64.tar.xz", url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-linux-arm64.tar.xz",
sha256: "f6df68c6793244071f69023a9b43a0cf0b13d65cbe86d55925c28e4134d9aafb", sha256: "140aee84be6774f5fb3f404be72adbe8420b523f824de82daeb5ab218dab7b18",
}, },
Platform::MacX64 => OnlineArchive { Platform::MacX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-x64.tar.xz", url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-x64.tar.xz",
sha256: "d4b4ab81ebf1f7aab09714f834992f27270ad0079600da00c8110f8950ca6c5a", sha256: "f79de1f64df4ac68493a344bb5ab7d289d0275271e87b543d1278392c9de778a",
}, },
Platform::MacArm => OnlineArchive { Platform::MacArm => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-darwin-arm64.tar.xz", url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-darwin-arm64.tar.xz",
sha256: "f18a7438723d48417f5e9be211a2f3c0520ffbf8e02703469e5153137ca0f328", sha256: "cc9cc294eaf782dd93c8c51f460da610cc35753c6a9947411731524d16e97914",
}, },
Platform::WindowsX64 => OnlineArchive { Platform::WindowsX64 => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-x64.zip", url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-x64.zip",
sha256: "893115cd92ad27bf178802f15247115e93c0ef0c753b93dca96439240d64feb5", sha256: "721ab118a3aac8584348b132767eadf51379e0616f0db802cc1e66d7f0d98f85",
}, },
Platform::WindowsArm => OnlineArchive { Platform::WindowsArm => OnlineArchive {
url: "https://nodejs.org/dist/v20.11.0/node-v20.11.0-win-arm64.zip", url: "https://nodejs.org/dist/v22.17.0/node-v22.17.0-win-arm64.zip",
sha256: "89c1f7034dcd6ff5c17f2af61232a96162a1902f862078347dcf274a938b6142", sha256: "78355dc9ca117bb71d3f081e4b1b281855e2b134f3939bb0ca314f7567b0e621",
}, },
} }
} }

View file

@ -426,7 +426,7 @@ deck-config-desired-retention-tooltip =
values will greatly increase your workload, and lower values can be demoralizing when you forget values will greatly increase your workload, and lower values can be demoralizing when you forget
a lot of material. a lot of material.
deck-config-desired-retention-tooltip2 = deck-config-desired-retention-tooltip2 =
The workload values provided by the tooltip are a rough approximation. For a greater level of accuracy, use the simulator. The workload values provided by the info box are a rough approximation. For a greater level of accuracy, use the simulator.
deck-config-historical-retention-tooltip = deck-config-historical-retention-tooltip =
When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will When some of your review history is missing, FSRS needs to fill in the gaps. By default, it will
assume that when you did those old reviews, you remembered 90% of the material. If your old retention assume that when you did those old reviews, you remembered 90% of the material. If your old retention

View file

@ -19,8 +19,8 @@
"@poppanator/sveltekit-svg": "^5.0.0", "@poppanator/sveltekit-svg": "^5.0.0",
"@sqltools/formatter": "^1.2.2", "@sqltools/formatter": "^1.2.2",
"@sveltejs/adapter-static": "^3.0.0", "@sveltejs/adapter-static": "^3.0.0",
"@sveltejs/kit": "^2.20.7", "@sveltejs/kit": "^2.22.2",
"@sveltejs/vite-plugin-svelte": "4.0.0", "@sveltejs/vite-plugin-svelte": "5.1",
"@types/bootstrap": "^5.0.12", "@types/bootstrap": "^5.0.12",
"@types/codemirror": "^5.60.0", "@types/codemirror": "^5.60.0",
"@types/d3": "^7.0.0", "@types/d3": "^7.0.0",
@ -30,7 +30,7 @@
"@types/jqueryui": "^1.12.13", "@types/jqueryui": "^1.12.13",
"@types/lodash-es": "^4.17.4", "@types/lodash-es": "^4.17.4",
"@types/marked": "^5.0.0", "@types/marked": "^5.0.0",
"@types/node": "^20", "@types/node": "^22",
"@typescript-eslint/eslint-plugin": "^5.60.1", "@typescript-eslint/eslint-plugin": "^5.60.1",
"@typescript-eslint/parser": "^5.60.1", "@typescript-eslint/parser": "^5.60.1",
"caniuse-lite": "^1.0.30001431", "caniuse-lite": "^1.0.30001431",
@ -48,16 +48,16 @@
"prettier": "^3.4.2", "prettier": "^3.4.2",
"prettier-plugin-svelte": "^3.3.2", "prettier-plugin-svelte": "^3.3.2",
"sass": "<1.77", "sass": "<1.77",
"svelte": "^5.17.3", "svelte": "^5.34.9",
"svelte-check": "^3.4.4", "svelte-check": "^4.2.2",
"svelte-preprocess": "^5.0.4", "svelte-preprocess": "^6.0.3",
"svelte-preprocess-esbuild": "^3.0.1", "svelte-preprocess-esbuild": "^3.0.1",
"svgo": "^3.2.0", "svgo": "^3.2.0",
"tslib": "^2.0.3", "tslib": "^2.0.3",
"tsx": "^3.12.0", "tsx": "^4.8.1",
"typescript": "^5.0.4", "typescript": "^5.0.4",
"vite": "5.4.19", "vite": "6",
"vitest": "^2" "vitest": "^3"
}, },
"dependencies": { "dependencies": {
"@bufbuild/protobuf": "^1.2.1", "@bufbuild/protobuf": "^1.2.1",
@ -81,7 +81,8 @@
}, },
"resolutions": { "resolutions": {
"canvas": "npm:empty-npm-package@1.0.0", "canvas": "npm:empty-npm-package@1.0.0",
"cookie": "0.7.0" "cookie": "0.7.0",
"vite": "6"
}, },
"browserslist": [ "browserslist": [
"defaults", "defaults",

View file

@ -56,6 +56,8 @@ service SchedulerService {
rpc SimulateFsrsReview(SimulateFsrsReviewRequest) rpc SimulateFsrsReview(SimulateFsrsReviewRequest)
returns (SimulateFsrsReviewResponse); returns (SimulateFsrsReviewResponse);
rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse); rpc EvaluateParams(EvaluateParamsRequest) returns (EvaluateParamsResponse);
rpc EvaluateParamsLegacy(EvaluateParamsLegacyRequest)
returns (EvaluateParamsResponse);
rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse); rpc ComputeMemoryState(cards.CardId) returns (ComputeMemoryStateResponse);
// The number of days the calculated interval was fuzzed by on the previous // The number of days the calculated interval was fuzzed by on the previous
// review (if any). Utilized by the FSRS add-on. // review (if any). Utilized by the FSRS add-on.
@ -402,31 +404,6 @@ message SimulateFsrsReviewRequest {
repeated float easy_days_percentages = 10; repeated float easy_days_percentages = 10;
deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11; deck_config.DeckConfig.Config.ReviewCardOrder review_order = 11;
optional uint32 suspend_after_lapse_count = 12; optional uint32 suspend_after_lapse_count = 12;
// For CMRR
message CMRRTarget {
message Memorized {
float loss_aversion = 1;
};
message Stability {};
message FutureMemorized {
int32 days = 1;
};
message AverageFutureMemorized {
int32 days = 1;
};
oneof kind {
Memorized memorized = 1;
Stability stability = 2;
FutureMemorized future_memorized = 3;
AverageFutureMemorized average_future_memorized = 4;
};
};
optional CMRRTarget target = 13;
} }
message SimulateFsrsReviewResponse { message SimulateFsrsReviewResponse {
@ -467,6 +444,12 @@ message EvaluateParamsRequest {
uint32 num_of_relearning_steps = 3; uint32 num_of_relearning_steps = 3;
} }
message EvaluateParamsLegacyRequest {
repeated float params = 1;
string search = 2;
int64 ignore_revlogs_before_ms = 3;
}
message EvaluateParamsResponse { message EvaluateParamsResponse {
float log_loss = 1; float log_loss = 1;
float rmse_bins = 2; float rmse_bins = 2;

View file

@ -66,7 +66,8 @@ def show(mw: aqt.AnkiQt) -> QDialog:
# WebView contents # WebView contents
###################################################################### ######################################################################
abouttext = "<center><img src='/_anki/imgs/anki-logo-thin.png'></center>" abouttext = "<center><img src='/_anki/imgs/anki-logo-thin.png'></center>"
abouttext += f"<p>{tr.about_anki_is_a_friendly_intelligent_spaced()}" lede = tr.about_anki_is_a_friendly_intelligent_spaced().replace("Anki", "Anki®")
abouttext += f"<p>{lede}"
abouttext += f"<p>{tr.about_anki_is_licensed_under_the_agpl3()}" abouttext += f"<p>{tr.about_anki_is_licensed_under_the_agpl3()}"
abouttext += f"<p>{tr.about_version(val=version_with_build())}<br>" abouttext += f"<p>{tr.about_version(val=version_with_build())}<br>"
abouttext += ("Python %s Qt %s PyQt %s<br>") % ( abouttext += ("Python %s Qt %s PyQt %s<br>") % (

View file

@ -23,25 +23,36 @@ from aqt.utils import openHelp, showWarning, supportText, tooltip, tr
if TYPE_CHECKING: if TYPE_CHECKING:
from aqt.main import AnkiQt from aqt.main import AnkiQt
# so we can be non-modal/non-blocking, without Python deallocating the message
# box ahead of time
_mbox: QMessageBox | None = None
def show_exception(*, parent: QWidget, exception: Exception) -> None: def show_exception(*, parent: QWidget, exception: Exception) -> None:
"Present a caught exception to the user using a pop-up." "Present a caught exception to the user using a pop-up."
if isinstance(exception, Interrupted): if isinstance(exception, Interrupted):
# nothing to do # nothing to do
return return
global _mbox
error_lines = []
help_page = HelpPage.TROUBLESHOOTING
if isinstance(exception, BackendError): if isinstance(exception, BackendError):
if exception.context: if exception.context:
print(exception.context) error_lines.append(exception.context)
if exception.backtrace: if exception.backtrace:
print(exception.backtrace) error_lines.append(exception.backtrace)
showWarning(str(exception), parent=parent, help=exception.help_page) if exception.help_page is not None:
help_page = exception.help_page
else: else:
# if the error is not originating from the backend, dump # if the error is not originating from the backend, dump
# a traceback to the console to aid in debugging # a traceback to the console to aid in debugging
traceback.print_exception( error_lines = traceback.format_exception(
None, exception, exception.__traceback__, file=sys.stdout None, exception, exception.__traceback__
) )
showWarning(str(exception), parent=parent) error_text = "\n".join(error_lines)
print(error_lines)
_mbox = _init_message_box(str(exception), error_text, help_page)
_mbox.show()
def is_chromium_cert_error(error: str) -> bool: def is_chromium_cert_error(error: str) -> bool:
@ -158,9 +169,39 @@ if not os.environ.get("DEBUG"):
sys.excepthook = excepthook sys.excepthook = excepthook
# so we can be non-modal/non-blocking, without Python deallocating the message
# box ahead of time def _init_message_box(
_mbox: QMessageBox | None = None user_text: str, debug_text: str, help_page=HelpPage.TROUBLESHOOTING
):
global _mbox
_mbox = QMessageBox()
_mbox.setWindowTitle("Anki")
_mbox.setText(user_text)
_mbox.setIcon(QMessageBox.Icon.Warning)
_mbox.setTextFormat(Qt.TextFormat.PlainText)
def show_help():
openHelp(help_page)
def copy_debug_info():
QApplication.clipboard().setText(debug_text)
tooltip(tr.errors_copied_to_clipboard(), parent=_mbox)
help = _mbox.addButton(QMessageBox.StandardButton.Help)
if debug_text:
debug_info = _mbox.addButton(
tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole
)
debug_info.disconnect()
debug_info.clicked.connect(copy_debug_info)
cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel)
cancel.setText(tr.actions_close())
help.disconnect()
help.clicked.connect(show_help)
return _mbox
class ErrorHandler(QObject): class ErrorHandler(QObject):
@ -252,33 +293,7 @@ class ErrorHandler(QObject):
user_text += "\n\n" + self._addonText(error) user_text += "\n\n" + self._addonText(error)
debug_text += addon_debug_info() debug_text += addon_debug_info()
def show_troubleshooting(): _mbox = _init_message_box(user_text, debug_text)
openHelp(HelpPage.TROUBLESHOOTING)
def copy_debug_info():
QApplication.clipboard().setText(debug_text)
tooltip(tr.errors_copied_to_clipboard(), parent=_mbox)
global _mbox
_mbox = QMessageBox()
_mbox.setWindowTitle("Anki")
_mbox.setText(user_text)
_mbox.setIcon(QMessageBox.Icon.Warning)
_mbox.setTextFormat(Qt.TextFormat.PlainText)
troubleshooting = _mbox.addButton(
tr.errors_troubleshooting_button(), QMessageBox.ButtonRole.ActionRole
)
debug_info = _mbox.addButton(
tr.errors_copy_debug_info_button(), QMessageBox.ButtonRole.ActionRole
)
cancel = _mbox.addButton(QMessageBox.StandardButton.Cancel)
cancel.setText(tr.actions_close())
troubleshooting.disconnect()
troubleshooting.clicked.connect(show_troubleshooting)
debug_info.disconnect()
debug_info.clicked.connect(copy_debug_info)
if self.fatal_error_encountered: if self.fatal_error_encountered:
_mbox.exec() _mbox.exec()

View file

@ -48,6 +48,7 @@ async-trait.workspace = true
axum.workspace = true axum.workspace = true
axum-client-ip.workspace = true axum-client-ip.workspace = true
axum-extra.workspace = true axum-extra.workspace = true
bitflags.workspace = true
blake3.workspace = true blake3.workspace = true
bytes.workspace = true bytes.workspace = true
chrono.workspace = true chrono.workspace = true

View file

@ -25,6 +25,9 @@ use crate::latex::contains_latex;
use crate::template::RenderContext; use crate::template::RenderContext;
use crate::text::strip_html_preserving_entities; use crate::text::strip_html_preserving_entities;
static CLOZE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap());
static MATHJAX: LazyLock<Regex> = LazyLock::new(|| { static MATHJAX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new( Regex::new(
r"(?xsi) r"(?xsi)
@ -453,6 +456,10 @@ pub fn cloze_number_in_fields(fields: impl IntoIterator<Item: AsRef<str>>) -> Ha
set set
} }
pub(crate) fn strip_clozes(text: &str) -> Cow<'_, str> {
CLOZE.replace_all(text, "$1")
}
fn strip_html_inside_mathjax(text: &str) -> Cow<str> { fn strip_html_inside_mathjax(text: &str) -> Cow<str> {
MATHJAX.replace_all(text, |caps: &Captures| -> String { MATHJAX.replace_all(text, |caps: &Captures| -> String {
format!( format!(
@ -610,6 +617,16 @@ mod test {
); );
} }
#[test]
fn strip_clozes_regex() {
assert_eq!(
strip_clozes(
r#"The {{c1::moon::🌛}} {{c2::orbits::this hint has "::" in it}} the {{c3::🌏}}."#
),
"The moon orbits the 🌏."
);
}
#[test] #[test]
fn mathjax_html() { fn mathjax_html() {
// escaped angle brackets should be preserved // escaped angle brackets should be preserved

View file

@ -299,6 +299,33 @@ impl Collection {
.is_ok() .is_ok()
})?) })?)
} }
pub fn evaluate_params_legacy(
&mut self,
params: &Params,
search: &str,
ignore_revlogs_before: TimestampMillis,
) -> Result<ModelEvaluation> {
let timing = self.timing_today()?;
let mut anki_progress = self.new_progress_handler::<ComputeParamsProgress>();
let guard = self.search_cards_into_table(search, SortMode::NoOrder)?;
let revlogs: Vec<RevlogEntry> = guard
.col
.storage
.get_revlog_entries_for_searched_cards_in_card_order()?;
let (items, review_count) =
fsrs_items_for_training(revlogs, timing.next_day_at, ignore_revlogs_before);
anki_progress.state.reviews = review_count as u32;
let fsrs = FSRS::new(Some(params))?;
Ok(fsrs.evaluate(items, |ip| {
anki_progress
.update(false, |p| {
p.total_iterations = ip.total as u32;
p.current_iteration = ip.current as u32;
})
.is_ok()
})?)
}
} }
#[derive(Default, Clone, Copy, Debug)] #[derive(Default, Clone, Copy, Debug)]

View file

@ -1,9 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use anki_proto::scheduler::simulate_fsrs_review_request::cmrr_target::Kind;
use anki_proto::scheduler::SimulateFsrsReviewRequest; use anki_proto::scheduler::SimulateFsrsReviewRequest;
use fsrs::extract_simulator_config; use fsrs::extract_simulator_config;
use fsrs::SimulationResult;
use fsrs::SimulatorConfig; use fsrs::SimulatorConfig;
use fsrs::FSRS; use fsrs::FSRS;
@ -16,115 +14,14 @@ pub struct ComputeRetentionProgress {
pub total: u32, pub total: u32,
} }
pub fn average_r_power_forgetting_curve(
learn_span: usize,
cards: &[fsrs::Card],
offset: f32,
decay: f32,
) -> f32 {
let factor = 0.9_f32.powf(1.0 / decay) - 1.0;
let exp = decay + 1.0;
let den_factor = factor * exp;
// Closure equivalent to the inner integral function
let integral_calc = |card: &fsrs::Card| -> f32 {
// Performs element-wise: (s / den_factor) * (1.0 + factor * t / s).powf(exp)
let t1 = learn_span as f32 - card.last_date;
let t2 = t1 + offset;
(card.stability / den_factor) * (1.0 + factor * t2 / card.stability).powf(exp)
- (card.stability / den_factor) * (1.0 + factor * t1 / card.stability).powf(exp)
};
// Calculate integral difference and divide by time difference element-wise
cards.iter().map(integral_calc).sum::<f32>() / offset
}
impl Collection { impl Collection {
pub fn compute_optimal_retention(&mut self, req: SimulateFsrsReviewRequest) -> Result<f32> { pub fn compute_optimal_retention(&mut self, req: SimulateFsrsReviewRequest) -> Result<f32> {
// Helper macro to wrap the closure for "CMRRTargetFn"s
macro_rules! wrap {
($f:expr) => {
Some(fsrs::CMRRTargetFn(std::sync::Arc::new($f)))
};
}
let target_type = req.target.unwrap().kind;
let days_to_simulate = req.days_to_simulate as f32;
let target = match target_type {
Some(Kind::Memorized(_)) => None,
Some(Kind::FutureMemorized(settings)) => {
wrap!(move |SimulationResult {
cards,
cost_per_day,
..
},
w| {
let total_cost = cost_per_day.iter().sum::<f32>();
total_cost
/ cards.iter().fold(0., |p, c| {
c.retention_on(w, days_to_simulate + settings.days as f32) + p
})
})
}
Some(Kind::AverageFutureMemorized(settings)) => {
wrap!(move |SimulationResult {
cards,
cost_per_day,
..
},
w| {
let total_cost = cost_per_day.iter().sum::<f32>();
total_cost
/ average_r_power_forgetting_curve(
days_to_simulate as usize,
cards,
settings.days as f32,
-w[20],
)
})
}
Some(Kind::Stability(_)) => {
wrap!(move |SimulationResult {
cards,
cost_per_day,
..
},
w| {
let total_cost = cost_per_day.iter().sum::<f32>();
total_cost
/ cards.iter().fold(0., |p, c| {
p + (c.retention_on(w, days_to_simulate) * c.stability)
})
})
}
None => None,
};
let mut anki_progress = self.new_progress_handler::<ComputeRetentionProgress>(); let mut anki_progress = self.new_progress_handler::<ComputeRetentionProgress>();
let fsrs = FSRS::new(None)?; let fsrs = FSRS::new(None)?;
if req.days_to_simulate == 0 { if req.days_to_simulate == 0 {
invalid_input!("no days to simulate") invalid_input!("no days to simulate")
} }
let (mut config, cards) = self.simulate_request_to_config(&req)?; let (config, cards) = self.simulate_request_to_config(&req)?;
if let Some(Kind::Memorized(settings)) = target_type {
let loss_aversion = settings.loss_aversion;
config.relearning_step_transitions[0][0] *= loss_aversion;
config.relearning_step_transitions[1][0] *= loss_aversion;
config.relearning_step_transitions[2][0] *= loss_aversion;
config.learning_step_transitions[0][0] *= loss_aversion;
config.learning_step_transitions[1][0] *= loss_aversion;
config.learning_step_transitions[2][0] *= loss_aversion;
config.state_rating_costs[0][0] *= loss_aversion;
config.state_rating_costs[1][0] *= loss_aversion;
config.state_rating_costs[2][0] *= loss_aversion;
}
Ok(fsrs Ok(fsrs
.optimal_retention( .optimal_retention(
&config, &config,
@ -137,7 +34,7 @@ impl Collection {
.is_ok() .is_ok()
}, },
Some(cards), Some(cards),
target, None,
)? )?
.clamp(0.7, 0.95)) .clamp(0.7, 0.95))
} }

View file

@ -307,6 +307,21 @@ impl crate::services::SchedulerService for Collection {
}) })
} }
fn evaluate_params_legacy(
&mut self,
input: scheduler::EvaluateParamsLegacyRequest,
) -> Result<scheduler::EvaluateParamsResponse> {
let ret = self.evaluate_params_legacy(
&input.params,
&input.search,
input.ignore_revlogs_before_ms.into(),
)?;
Ok(scheduler::EvaluateParamsResponse {
log_loss: ret.log_loss,
rmse_bins: ret.rmse_bins,
})
}
fn get_optimal_retention_parameters( fn get_optimal_retention_parameters(
&mut self, &mut self,
input: scheduler::GetOptimalRetentionParametersRequest, input: scheduler::GetOptimalRetentionParametersRequest,

View file

@ -94,6 +94,7 @@ pub enum SearchNode {
WholeCollection, WholeCollection,
Regex(String), Regex(String),
NoCombining(String), NoCombining(String),
StripClozes(String),
WordBoundary(String), WordBoundary(String),
CustomData(String), CustomData(String),
Preset(String), Preset(String),
@ -358,6 +359,7 @@ fn search_node_for_text_with_argument<'a>(
"cid" => SearchNode::CardIds(check_id_list(val, key)?.into()), "cid" => SearchNode::CardIds(check_id_list(val, key)?.into()),
"re" => SearchNode::Regex(unescape_quotes(val)), "re" => SearchNode::Regex(unescape_quotes(val)),
"nc" => SearchNode::NoCombining(unescape(val)?), "nc" => SearchNode::NoCombining(unescape(val)?),
"sc" => SearchNode::StripClozes(unescape(val)?),
"w" => SearchNode::WordBoundary(unescape(val)?), "w" => SearchNode::WordBoundary(unescape(val)?),
"dupe" => parse_dupe(val)?, "dupe" => parse_dupe(val)?,
"has-cd" => SearchNode::CustomData(unescape(val)?), "has-cd" => SearchNode::CustomData(unescape(val)?),

View file

@ -22,6 +22,7 @@ use crate::notes::field_checksum;
use crate::notetype::NotetypeId; use crate::notetype::NotetypeId;
use crate::prelude::*; use crate::prelude::*;
use crate::storage::ids_to_string; use crate::storage::ids_to_string;
use crate::storage::ProcessTextFlags;
use crate::text::glob_matcher; use crate::text::glob_matcher;
use crate::text::is_glob; use crate::text::is_glob;
use crate::text::normalize_to_nfc; use crate::text::normalize_to_nfc;
@ -134,6 +135,7 @@ impl SqlWriter<'_> {
self.write_unqualified( self.write_unqualified(
text, text,
self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch), self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch),
false,
)? )?
} }
SearchNode::SingleField { field, text, is_re } => { SearchNode::SingleField { field, text, is_re } => {
@ -143,7 +145,14 @@ impl SqlWriter<'_> {
self.write_dupe(*notetype_id, &self.norm_note(text))? self.write_dupe(*notetype_id, &self.norm_note(text))?
} }
SearchNode::Regex(re) => self.write_regex(&self.norm_note(re), false)?, SearchNode::Regex(re) => self.write_regex(&self.norm_note(re), false)?,
SearchNode::NoCombining(text) => self.write_unqualified(&self.norm_note(text), true)?, SearchNode::NoCombining(text) => {
self.write_unqualified(&self.norm_note(text), true, false)?
}
SearchNode::StripClozes(text) => self.write_unqualified(
&self.norm_note(text),
self.col.get_config_bool(BoolKey::IgnoreAccentsInSearch),
true,
)?,
SearchNode::WordBoundary(text) => self.write_word_boundary(&self.norm_note(text))?, SearchNode::WordBoundary(text) => self.write_word_boundary(&self.norm_note(text))?,
// other // other
@ -190,7 +199,12 @@ impl SqlWriter<'_> {
Ok(()) Ok(())
} }
fn write_unqualified(&mut self, text: &str, no_combining: bool) -> Result<()> { fn write_unqualified(
&mut self,
text: &str,
no_combining: bool,
strip_clozes: bool,
) -> Result<()> {
let text = to_sql(text); let text = to_sql(text);
let text = if no_combining { let text = if no_combining {
without_combining(&text) without_combining(&text)
@ -202,17 +216,37 @@ impl SqlWriter<'_> {
self.args.push(text); self.args.push(text);
let arg_idx = self.args.len(); let arg_idx = self.args.len();
let sfld_expr = if no_combining { let mut process_text_flags = ProcessTextFlags::empty();
"coalesce(without_combining(cast(n.sfld as text)), n.sfld)" if no_combining {
process_text_flags.insert(ProcessTextFlags::NoCombining);
}
if strip_clozes {
process_text_flags.insert(ProcessTextFlags::StripClozes);
}
let (sfld_expr, flds_expr) = if !process_text_flags.is_empty() {
let bits = process_text_flags.bits();
(
Cow::from(format!(
"coalesce(process_text(cast(n.sfld as text), {bits}), n.sfld)"
)),
Cow::from(format!("coalesce(process_text(n.flds, {bits}), n.flds)")),
)
} else { } else {
"n.sfld" (Cow::from("n.sfld"), Cow::from("n.flds"))
};
let flds_expr = if no_combining {
"coalesce(without_combining(n.flds), n.flds)"
} else {
"n.flds"
}; };
if strip_clozes {
let cloze_notetypes_only_clause = self
.col
.get_all_notetypes()?
.iter()
.filter(|nt| nt.is_cloze())
.map(|nt| format!("n.mid = {}", nt.id))
.join(" or ");
write!(self.sql, "({cloze_notetypes_only_clause}) and ").unwrap();
}
if let Some(field_indicies_by_notetype) = self.included_fields_by_notetype()? { if let Some(field_indicies_by_notetype) = self.included_fields_by_notetype()? {
let field_idx_str = format!("' || ?{arg_idx} || '"); let field_idx_str = format!("' || ?{arg_idx} || '");
let other_idx_str = "%".to_string(); let other_idx_str = "%".to_string();
@ -803,9 +837,12 @@ impl SqlWriter<'_> {
fn write_regex(&mut self, word: &str, no_combining: bool) -> Result<()> { fn write_regex(&mut self, word: &str, no_combining: bool) -> Result<()> {
let flds_expr = if no_combining { let flds_expr = if no_combining {
"coalesce(without_combining(n.flds), n.flds)" Cow::from(format!(
"coalesce(process_text(n.flds, {}), n.flds)",
ProcessTextFlags::NoCombining.bits()
))
} else { } else {
"n.flds" Cow::from("n.flds")
}; };
let word = if no_combining { let word = if no_combining {
without_combining(word) without_combining(word)
@ -995,6 +1032,7 @@ impl SearchNode {
SearchNode::Duplicates { .. } => RequiredTable::Notes, SearchNode::Duplicates { .. } => RequiredTable::Notes,
SearchNode::Regex(_) => RequiredTable::Notes, SearchNode::Regex(_) => RequiredTable::Notes,
SearchNode::NoCombining(_) => RequiredTable::Notes, SearchNode::NoCombining(_) => RequiredTable::Notes,
SearchNode::StripClozes(_) => RequiredTable::Notes,
SearchNode::WordBoundary(_) => RequiredTable::Notes, SearchNode::WordBoundary(_) => RequiredTable::Notes,
SearchNode::NotetypeId(_) => RequiredTable::Notes, SearchNode::NotetypeId(_) => RequiredTable::Notes,
SearchNode::Notetype(_) => RequiredTable::Notes, SearchNode::Notetype(_) => RequiredTable::Notes,
@ -1299,6 +1337,9 @@ c.odue != 0 then c.odue else c.due end) != {days}) or (c.queue in (1,4) and
"((c.did in (1) or c.odid in (1)))" "((c.did in (1) or c.odid in (1)))"
); );
assert_eq!(&s(ctx, "preset:typo").0, "(false)"); assert_eq!(&s(ctx, "preset:typo").0, "(false)");
// strip clozes
assert_eq!(&s(ctx, "sc:abcdef").0, "((n.mid = 1581236385343) and (coalesce(process_text(cast(n.sfld as text), 2), n.sfld) like ?1 escape '\\' or coalesce(process_text(n.flds, 2), n.flds) like ?1 escape '\\'))");
} }
#[test] #[test]

View file

@ -91,6 +91,7 @@ fn write_search_node(node: &SearchNode) -> String {
WholeCollection => "deck:*".to_string(), WholeCollection => "deck:*".to_string(),
Regex(s) => maybe_quote(&format!("re:{s}")), Regex(s) => maybe_quote(&format!("re:{s}")),
NoCombining(s) => maybe_quote(&format!("nc:{s}")), NoCombining(s) => maybe_quote(&format!("nc:{s}")),
StripClozes(s) => maybe_quote(&format!("sc:{s}")),
WordBoundary(s) => maybe_quote(&format!("w:{s}")), WordBoundary(s) => maybe_quote(&format!("w:{s}")),
CustomData(k) => maybe_quote(&format!("has-cd:{k}")), CustomData(k) => maybe_quote(&format!("has-cd:{k}")),
Preset(s) => maybe_quote(&format!("preset:{s}")), Preset(s) => maybe_quote(&format!("preset:{s}")),

View file

@ -19,6 +19,7 @@ mod upgrades;
use std::fmt::Write; use std::fmt::Write;
pub(crate) use sqlite::ProcessTextFlags;
pub(crate) use sqlite::SqliteStorage; pub(crate) use sqlite::SqliteStorage;
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]

View file

@ -9,6 +9,7 @@ use std::hash::Hasher;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use bitflags::bitflags;
use fnv::FnvHasher; use fnv::FnvHasher;
use fsrs::FSRS; use fsrs::FSRS;
use fsrs::FSRS5_DEFAULT_DECAY; use fsrs::FSRS5_DEFAULT_DECAY;
@ -24,6 +25,7 @@ use super::upgrades::SCHEMA_MAX_VERSION;
use super::upgrades::SCHEMA_MIN_VERSION; use super::upgrades::SCHEMA_MIN_VERSION;
use super::upgrades::SCHEMA_STARTING_VERSION; use super::upgrades::SCHEMA_STARTING_VERSION;
use super::SchemaVersion; use super::SchemaVersion;
use crate::cloze::strip_clozes;
use crate::config::schema11::schema11_config_as_string; use crate::config::schema11::schema11_config_as_string;
use crate::error::DbErrorKind; use crate::error::DbErrorKind;
use crate::prelude::*; use crate::prelude::*;
@ -31,6 +33,7 @@ use crate::scheduler::timing::local_minutes_west_for_stamp;
use crate::scheduler::timing::v1_creation_date; use crate::scheduler::timing::v1_creation_date;
use crate::storage::card::data::CardData; use crate::storage::card::data::CardData;
use crate::text::without_combining; use crate::text::without_combining;
use crate::text::CowMapping;
fn unicase_compare(s1: &str, s2: &str) -> Ordering { fn unicase_compare(s1: &str, s2: &str) -> Ordering {
UniCase::new(s1).cmp(&UniCase::new(s2)) UniCase::new(s1).cmp(&UniCase::new(s2))
@ -74,7 +77,7 @@ fn open_or_create_collection_db(path: &Path) -> Result<Connection> {
add_regexp_function(&db)?; add_regexp_function(&db)?;
add_regexp_fields_function(&db)?; add_regexp_fields_function(&db)?;
add_regexp_tags_function(&db)?; add_regexp_tags_function(&db)?;
add_without_combining_function(&db)?; add_process_text_function(&db)?;
add_fnvhash_function(&db)?; add_fnvhash_function(&db)?;
add_extract_original_position_function(&db)?; add_extract_original_position_function(&db)?;
add_extract_custom_data_function(&db)?; add_extract_custom_data_function(&db)?;
@ -111,17 +114,28 @@ fn add_field_index_function(db: &Connection) -> rusqlite::Result<()> {
) )
} }
fn add_without_combining_function(db: &Connection) -> rusqlite::Result<()> { bitflags! {
pub(crate) struct ProcessTextFlags: u8 {
const NoCombining = 1;
const StripClozes = 1 << 1;
}
}
fn add_process_text_function(db: &Connection) -> rusqlite::Result<()> {
db.create_scalar_function( db.create_scalar_function(
"without_combining", "process_text",
1, 2,
FunctionFlags::SQLITE_DETERMINISTIC, FunctionFlags::SQLITE_DETERMINISTIC,
|ctx| { |ctx| {
let text = ctx.get_raw(0).as_str()?; let mut text = Cow::from(ctx.get_raw(0).as_str()?);
Ok(match without_combining(text) { let opt = ProcessTextFlags::from_bits_truncate(ctx.get_raw(1).as_i64()? as u8);
Cow::Borrowed(_) => None, if opt.contains(ProcessTextFlags::StripClozes) {
Cow::Owned(o) => Some(o), text = text.map_cow(strip_clozes);
}) }
if opt.contains(ProcessTextFlags::NoCombining) {
text = text.map_cow(without_combining);
}
Ok(text.get_owned())
}, },
) )
} }

View file

@ -95,8 +95,8 @@
"repository": "https://github.com/TooTallNate/node-agent-base", "repository": "https://github.com/TooTallNate/node-agent-base",
"publisher": "Nathan Rajlich", "publisher": "Nathan Rajlich",
"email": "nathan@tootallnate.net", "email": "nathan@tootallnate.net",
"path": "node_modules/http-proxy-agent/node_modules/agent-base", "path": "node_modules/https-proxy-agent/node_modules/agent-base",
"licenseFile": "node_modules/http-proxy-agent/node_modules/agent-base/README.md" "licenseFile": "node_modules/https-proxy-agent/node_modules/agent-base/README.md"
}, },
"asynckit@0.4.0": { "asynckit@0.4.0": {
"licenses": "MIT", "licenses": "MIT",

View file

@ -7,11 +7,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
ComputeRetentionProgress, ComputeRetentionProgress,
type ComputeParamsProgress, type ComputeParamsProgress,
} from "@generated/anki/collection_pb"; } from "@generated/anki/collection_pb";
import { import { SimulateFsrsReviewRequest } from "@generated/anki/scheduler_pb";
SimulateFsrsReviewRequest,
SimulateFsrsReviewRequest_CMRRTarget,
SimulateFsrsReviewRequest_CMRRTarget_Memorized,
} from "@generated/anki/scheduler_pb";
import { import {
computeFsrsParams, computeFsrsParams,
evaluateParams, evaluateParams,
@ -99,14 +95,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
newCardsIgnoreReviewLimit: $newCardsIgnoreReviewLimit, newCardsIgnoreReviewLimit: $newCardsIgnoreReviewLimit,
easyDaysPercentages: $config.easyDaysPercentages, easyDaysPercentages: $config.easyDaysPercentages,
reviewOrder: $config.reviewOrder, reviewOrder: $config.reviewOrder,
target: new SimulateFsrsReviewRequest_CMRRTarget({
kind: {
case: "memorized",
value: new SimulateFsrsReviewRequest_CMRRTarget_Memorized({
lossAversion: 1.6,
}),
},
}),
}); });
const DESIRED_RETENTION_LOW_THRESHOLD = 0.8; const DESIRED_RETENTION_LOW_THRESHOLD = 0.8;

View file

@ -18,30 +18,21 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import { renderSimulationChart } from "../graphs/simulator"; import { renderSimulationChart } from "../graphs/simulator";
import { computeOptimalRetention, simulateFsrsReview } from "@generated/backend"; import { computeOptimalRetention, simulateFsrsReview } from "@generated/backend";
import { runWithBackendProgress } from "@tslib/progress"; import { runWithBackendProgress } from "@tslib/progress";
import { import type {
SimulateFsrsReviewRequest_CMRRTarget_AverageFutureMemorized, ComputeOptimalRetentionResponse,
SimulateFsrsReviewRequest_CMRRTarget_FutureMemorized, SimulateFsrsReviewRequest,
SimulateFsrsReviewRequest_CMRRTarget_Memorized, SimulateFsrsReviewResponse,
SimulateFsrsReviewRequest_CMRRTarget_Stability,
type ComputeOptimalRetentionResponse,
type SimulateFsrsReviewRequest,
type SimulateFsrsReviewResponse,
} from "@generated/anki/scheduler_pb"; } from "@generated/anki/scheduler_pb";
import type { DeckOptionsState } from "./lib"; import type { DeckOptionsState } from "./lib";
import SwitchRow from "$lib/components/SwitchRow.svelte"; import SwitchRow from "$lib/components/SwitchRow.svelte";
import GlobalLabel from "./GlobalLabel.svelte"; import GlobalLabel from "./GlobalLabel.svelte";
import SpinBoxFloatRow from "./SpinBoxFloatRow.svelte"; import SpinBoxFloatRow from "./SpinBoxFloatRow.svelte";
import { import { reviewOrderChoices } from "./choices";
DEFAULT_CMRR_TARGET,
CMRRTargetChoices,
reviewOrderChoices,
} from "./choices";
import EnumSelectorRow from "$lib/components/EnumSelectorRow.svelte"; import EnumSelectorRow from "$lib/components/EnumSelectorRow.svelte";
import { DeckConfig_Config_LeechAction } from "@generated/anki/deck_config_pb"; import { DeckConfig_Config_LeechAction } from "@generated/anki/deck_config_pb";
import EasyDaysInput from "./EasyDaysInput.svelte"; import EasyDaysInput from "./EasyDaysInput.svelte";
import Warning from "./Warning.svelte"; import Warning from "./Warning.svelte";
import type { ComputeRetentionProgress } from "@generated/anki/collection_pb"; import type { ComputeRetentionProgress } from "@generated/anki/collection_pb";
import Item from "$lib/components/Item.svelte";
import Modal from "bootstrap/js/dist/modal"; import Modal from "bootstrap/js/dist/modal";
export let state: DeckOptionsState; export let state: DeckOptionsState;
@ -50,45 +41,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
export let openHelpModal: (key: string) => void; export let openHelpModal: (key: string) => void;
export let onPresetChange: () => void; export let onPresetChange: () => void;
let cmrrTargetType = DEFAULT_CMRR_TARGET;
// All added types must be updated in the proceeding switch statement.
let lastCmrrTargetType = cmrrTargetType;
$: if (simulateFsrsRequest?.target && cmrrTargetType !== lastCmrrTargetType) {
switch (cmrrTargetType) {
case "memorized":
simulateFsrsRequest.target.kind = {
case: "memorized",
value: new SimulateFsrsReviewRequest_CMRRTarget_Memorized({
lossAversion: 1.6,
}),
};
break;
case "stability":
simulateFsrsRequest.target.kind = {
case: "stability",
value: new SimulateFsrsReviewRequest_CMRRTarget_Stability({}),
};
break;
case "futureMemorized":
simulateFsrsRequest.target.kind = {
case: "futureMemorized",
value: new SimulateFsrsReviewRequest_CMRRTarget_FutureMemorized({
days: 365,
}),
};
break;
case "averageFutureMemorized":
simulateFsrsRequest.target.kind = {
case: "averageFutureMemorized",
value: new SimulateFsrsReviewRequest_CMRRTarget_AverageFutureMemorized(
{ days: 365 },
),
};
break;
}
lastCmrrTargetType = cmrrTargetType;
}
const config = state.currentConfig; const config = state.currentConfig;
let simulateSubgraph: SimulateSubgraph = SimulateSubgraph.count; let simulateSubgraph: SimulateSubgraph = SimulateSubgraph.count;
let tableData: TableDatum[] = []; let tableData: TableDatum[] = [];
@ -443,71 +395,38 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
{/if} {/if}
</details> </details>
<details> <div style="display:none;">
<summary>{tr.deckConfigComputeOptimalRetention()}</summary> <details>
<button <summary>{tr.deckConfigComputeOptimalRetention()}</summary>
class="btn {computingRetention ? 'btn-warning' : 'btn-primary'}" <button
disabled={!computingRetention && computing} class="btn {computingRetention
on:click={() => computeRetention()} ? 'btn-warning'
> : 'btn-primary'}"
disabled={!computingRetention && computing}
on:click={() => computeRetention()}
>
{#if computingRetention}
{tr.actionsCancel()}
{:else}
{tr.deckConfigComputeButton()}
{/if}
</button>
{#if optimalRetention}
{estimatedRetention(optimalRetention)}
{#if optimalRetention - $config.desiredRetention >= 0.01}
<Warning
warning={tr.deckConfigDesiredRetentionBelowOptimal()}
className="alert-warning"
/>
{/if}
{/if}
{#if computingRetention} {#if computingRetention}
{tr.actionsCancel()} <div>{computeRetentionProgressString}</div>
{:else}
{tr.deckConfigComputeButton()}
{/if} {/if}
</button> </details>
</div>
{#if optimalRetention}
{estimatedRetention(optimalRetention)}
{#if optimalRetention - $config.desiredRetention >= 0.01}
<Warning
warning={tr.deckConfigDesiredRetentionBelowOptimal()}
className="alert-warning"
/>
{/if}
{/if}
{#if computingRetention}
<div>{computeRetentionProgressString}</div>
{/if}
<Item>
<EnumSelectorRow
choices={CMRRTargetChoices()}
bind:value={cmrrTargetType}
defaultValue={DEFAULT_CMRR_TARGET}
>
<SettingTitle>
{"Target: "}
</SettingTitle>
</EnumSelectorRow>
</Item>
{#if simulateFsrsRequest.target?.kind.case === "memorized"}
<SpinBoxFloatRow
bind:value={simulateFsrsRequest.target.kind.value
.lossAversion}
defaultValue={1.6}
>
<SettingTitle>
{"Fail Cost Multiplier: "}
</SettingTitle>
</SpinBoxFloatRow>
{/if}
{#if simulateFsrsRequest.target?.kind.case === "futureMemorized" || simulateFsrsRequest.target?.kind.case === "averageFutureMemorized"}
<SpinBoxFloatRow
bind:value={simulateFsrsRequest.target.kind.value.days}
defaultValue={365}
step={1}
>
<SettingTitle>
{"Days after simulation end: "}
</SettingTitle>
</SpinBoxFloatRow>
{/if}
</details>
<button <button
class="btn {computing ? 'btn-warning' : 'btn-primary'}" class="btn {computing ? 'btn-warning' : 'btn-primary'}"
disabled={computing} disabled={computing}

View file

@ -199,29 +199,6 @@ export function questionActionChoices(): Choice<DeckConfig_Config_QuestionAction
]; ];
} }
export const DEFAULT_CMRR_TARGET = "memorized";
export function CMRRTargetChoices(): Choice<string>[] {
return [
{
label: "Memorized (Default)",
value: "memorized",
},
{
label: "Stability (Experimental)",
value: "stability",
},
{
label: "Post Abandon Memorized (Experimental)",
value: "futureMemorized",
},
{
label: "Average Post Abandon Memorized (Experimental)",
value: "averageFutureMemorized",
},
];
}
function difficultyOrders(fsrs: boolean): Choice<DeckConfig_Config_ReviewCardOrder>[] { function difficultyOrders(fsrs: boolean): Choice<DeckConfig_Config_ReviewCardOrder>[] {
const order = [ const order = [
{ {

View file

@ -4,7 +4,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
--> -->
<script lang="ts"> <script lang="ts">
import { bridgeCommand } from "@tslib/bridgecommand"; import { bridgeCommand } from "@tslib/bridgecommand";
import type { SvelteComponent } from "svelte"; import type { Component } from "svelte";
import { writable } from "svelte/store"; import { writable } from "svelte/store";
import { pageTheme } from "$lib/sveltelib/theme"; import { pageTheme } from "$lib/sveltelib/theme";
@ -18,9 +18,9 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
const search = writable(initialSearch); const search = writable(initialSearch);
const days = writable(initialDays); const days = writable(initialDays);
export let graphs: (typeof SvelteComponent<any>)[]; export let graphs: Component<any>[];
/** See RangeBox */ /** See RangeBox */
export let controller: typeof SvelteComponent<any> | null = RangeBox; export let controller: Component<any> | null = RangeBox;
function browserSearch(event: CustomEvent) { function browserSearch(event: CustomEvent) {
bridgeCommand(`browserSearch: ${$search} ${event.detail.query}`); bridgeCommand(`browserSearch: ${$search} ${event.detail.query}`);

View file

@ -9,18 +9,18 @@ import "./graphs-base.scss";
import { ModuleName, setupI18n } from "@tslib/i18n"; import { ModuleName, setupI18n } from "@tslib/i18n";
import { checkNightMode } from "@tslib/nightmode"; import { checkNightMode } from "@tslib/nightmode";
import type { SvelteComponent } from "svelte"; import type { Component } from "svelte";
import GraphsPage from "./GraphsPage.svelte"; import GraphsPage from "./GraphsPage.svelte";
const i18n = setupI18n({ modules: [ModuleName.STATISTICS, ModuleName.SCHEDULING] }); const i18n = setupI18n({ modules: [ModuleName.STATISTICS, ModuleName.SCHEDULING] });
export async function setupGraphs( export async function setupGraphs(
graphs: typeof SvelteComponent<any>[], graphs: Component<any>[],
{ {
search = "deck:current", search = "deck:current",
days = 365, days = 365,
controller = null satisfies typeof SvelteComponent<any> | null, controller = null satisfies Component<any> | null,
} = {}, } = {},
): Promise<GraphsPage> { ): Promise<GraphsPage> {
checkNightMode(); checkNightMode();

1432
yarn.lock

File diff suppressed because it is too large Load diff