mirror of
https://github.com/ankitects/anki.git
synced 2025-11-12 15:47:12 -05:00
Merge branch 'main' into editor-3830
This commit is contained in:
commit
0241113e2c
30 changed files with 762 additions and 265 deletions
2
.version
2
.version
|
|
@ -1 +1 @@
|
||||||
25.06b4
|
25.06b5
|
||||||
|
|
|
||||||
13
Cargo.lock
generated
13
Cargo.lock
generated
|
|
@ -117,7 +117,7 @@ dependencies = [
|
||||||
"id_tree",
|
"id_tree",
|
||||||
"inflections",
|
"inflections",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"nom",
|
"nom 8.0.0",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"num_enum",
|
"num_enum",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
|
@ -4117,6 +4117,15 @@ dependencies = [
|
||||||
"minimal-lexical",
|
"minimal-lexical",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nom"
|
||||||
|
version = "8.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "normpath"
|
name = "normpath"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
|
|
@ -6258,7 +6267,7 @@ dependencies = [
|
||||||
"bytesize",
|
"bytesize",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"libc",
|
"libc",
|
||||||
"nom",
|
"nom 7.1.3",
|
||||||
"time",
|
"time",
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -93,7 +93,7 @@ junction = "1.2.0"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
libc-stdhandle = "0.1"
|
libc-stdhandle = "0.1"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
nom = "7.1.3"
|
nom = "8.0.0"
|
||||||
num-format = "0.4.4"
|
num-format = "0.4.4"
|
||||||
num_cpus = "1.17.0"
|
num_cpus = "1.17.0"
|
||||||
num_enum = "0.7.3"
|
num_enum = "0.7.3"
|
||||||
|
|
@ -138,7 +138,7 @@ unic-ucd-category = "0.9.0"
|
||||||
unicode-normalization = "0.1.24"
|
unicode-normalization = "0.1.24"
|
||||||
walkdir = "2.5.0"
|
walkdir = "2.5.0"
|
||||||
which = "8.0.0"
|
which = "8.0.0"
|
||||||
winapi = { version = "0.3", features = ["wincon"] }
|
winapi = { version = "0.3", features = ["wincon", "errhandlingapi", "consoleapi"] }
|
||||||
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams"] }
|
windows = { version = "0.61.3", features = ["Media_SpeechSynthesis", "Media_Core", "Foundation_Collections", "Storage_Streams"] }
|
||||||
wiremock = "0.6.3"
|
wiremock = "0.6.3"
|
||||||
xz2 = "0.1.7"
|
xz2 = "0.1.7"
|
||||||
|
|
|
||||||
|
|
@ -2645,6 +2645,15 @@
|
||||||
"license_file": null,
|
"license_file": null,
|
||||||
"description": "A byte-oriented, zero-copy, parser combinators library"
|
"description": "A byte-oriented, zero-copy, parser combinators library"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "nom",
|
||||||
|
"version": "8.0.0",
|
||||||
|
"authors": "contact@geoffroycouprie.com",
|
||||||
|
"repository": "https://github.com/rust-bakery/nom",
|
||||||
|
"license": "MIT",
|
||||||
|
"license_file": null,
|
||||||
|
"description": "A byte-oriented, zero-copy, parser combinators library"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "ntapi",
|
"name": "ntapi",
|
||||||
"version": "0.4.1",
|
"version": "0.4.1",
|
||||||
|
|
|
||||||
|
|
@ -23,47 +23,45 @@ def first_run_setup() -> None:
|
||||||
if not is_mac:
|
if not is_mac:
|
||||||
return
|
return
|
||||||
|
|
||||||
def _dot():
|
# Import anki_audio first and spawn commands
|
||||||
print(".", flush=True, end="")
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import anki.collection
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import PyQt6.sip
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import PyQt6.QtCore
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import PyQt6.QtGui
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import PyQt6.QtNetwork
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import PyQt6.QtQuick
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import PyQt6.QtWebChannel
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import PyQt6.QtWebEngineCore
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import PyQt6.QtWebEngineWidgets
|
|
||||||
|
|
||||||
_dot()
|
|
||||||
import anki_audio
|
import anki_audio
|
||||||
import PyQt6.QtWidgets
|
|
||||||
|
|
||||||
audio_pkg_path = Path(anki_audio.__file__).parent
|
audio_pkg_path = Path(anki_audio.__file__).parent
|
||||||
|
|
||||||
# Invoke mpv and lame
|
# Start mpv and lame commands concurrently
|
||||||
cmd = [Path(""), "--version"]
|
processes = []
|
||||||
for cmd_name in ["mpv", "lame"]:
|
for cmd_name in ["mpv", "lame"]:
|
||||||
_dot()
|
cmd_path = audio_pkg_path / cmd_name
|
||||||
cmd[0] = audio_pkg_path / cmd_name
|
proc = subprocess.Popen(
|
||||||
subprocess.run([str(cmd[0]), str(cmd[1])], check=True, capture_output=True)
|
[str(cmd_path), "--version"],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
processes.append(proc)
|
||||||
|
|
||||||
print()
|
# Continue with other imports while commands run
|
||||||
|
import concurrent.futures
|
||||||
|
|
||||||
|
import bs4
|
||||||
|
import flask
|
||||||
|
import flask_cors
|
||||||
|
import markdown
|
||||||
|
import PyQt6.QtCore
|
||||||
|
import PyQt6.QtGui
|
||||||
|
import PyQt6.QtNetwork
|
||||||
|
import PyQt6.QtQuick
|
||||||
|
import PyQt6.QtWebChannel
|
||||||
|
import PyQt6.QtWebEngineCore
|
||||||
|
import PyQt6.QtWebEngineWidgets
|
||||||
|
import PyQt6.QtWidgets
|
||||||
|
import PyQt6.sip
|
||||||
|
import requests
|
||||||
|
import waitress
|
||||||
|
|
||||||
|
import anki.collection
|
||||||
|
|
||||||
|
from . import _macos_helper
|
||||||
|
|
||||||
|
# Wait for both commands to complete
|
||||||
|
for proc in processes:
|
||||||
|
proc.wait()
|
||||||
|
|
|
||||||
|
|
@ -87,6 +87,7 @@ class TopWebView(ToolbarWebView):
|
||||||
else:
|
else:
|
||||||
self.flatten()
|
self.flatten()
|
||||||
|
|
||||||
|
self.adjustHeightToFit()
|
||||||
self.show()
|
self.show()
|
||||||
|
|
||||||
def _onHeight(self, qvar: int | None) -> None:
|
def _onHeight(self, qvar: int | None) -> None:
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,15 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# This script currently only supports universal builds on x86_64.
|
||||||
|
#
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# Add Linux cross-compilation target
|
# Add Linux cross-compilation target
|
||||||
rustup target add aarch64-unknown-linux-gnu
|
rustup target add aarch64-unknown-linux-gnu
|
||||||
|
# Detect host architecture
|
||||||
|
HOST_ARCH=$(uname -m)
|
||||||
|
|
||||||
|
|
||||||
# Define output paths
|
# Define output paths
|
||||||
OUTPUT_DIR="../../../out/launcher"
|
OUTPUT_DIR="../../../out/launcher"
|
||||||
|
|
@ -12,11 +18,18 @@ LAUNCHER_DIR="$OUTPUT_DIR/anki-launcher"
|
||||||
# Clean existing output directory
|
# Clean existing output directory
|
||||||
rm -rf "$LAUNCHER_DIR"
|
rm -rf "$LAUNCHER_DIR"
|
||||||
|
|
||||||
# Build binaries for both Linux architectures
|
# Build binaries based on host architecture
|
||||||
|
if [ "$HOST_ARCH" = "aarch64" ]; then
|
||||||
|
# On aarch64 host, only build for aarch64
|
||||||
|
cargo build -p launcher --release --target aarch64-unknown-linux-gnu
|
||||||
|
else
|
||||||
|
# On other hosts, build for both architectures
|
||||||
cargo build -p launcher --release --target x86_64-unknown-linux-gnu
|
cargo build -p launcher --release --target x86_64-unknown-linux-gnu
|
||||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \
|
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \
|
||||||
cargo build -p launcher --release --target aarch64-unknown-linux-gnu
|
cargo build -p launcher --release --target aarch64-unknown-linux-gnu
|
||||||
|
# Extract uv_lin_arm for cross-compilation
|
||||||
(cd ../../.. && ./ninja extract:uv_lin_arm)
|
(cd ../../.. && ./ninja extract:uv_lin_arm)
|
||||||
|
fi
|
||||||
|
|
||||||
# Create output directory
|
# Create output directory
|
||||||
mkdir -p "$LAUNCHER_DIR"
|
mkdir -p "$LAUNCHER_DIR"
|
||||||
|
|
@ -24,13 +37,21 @@ mkdir -p "$LAUNCHER_DIR"
|
||||||
# Copy binaries and support files
|
# Copy binaries and support files
|
||||||
TARGET_DIR=${CARGO_TARGET_DIR:-../../../target}
|
TARGET_DIR=${CARGO_TARGET_DIR:-../../../target}
|
||||||
|
|
||||||
# Copy launcher binaries with architecture suffixes
|
# Copy binaries with architecture suffixes
|
||||||
|
if [ "$HOST_ARCH" = "aarch64" ]; then
|
||||||
|
# On aarch64 host, copy arm64 binary to both locations
|
||||||
|
cp "$TARGET_DIR/aarch64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.amd64"
|
||||||
|
cp "$TARGET_DIR/aarch64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.arm64"
|
||||||
|
# Copy uv binary to both locations
|
||||||
|
cp "../../../out/extracted/uv/uv" "$LAUNCHER_DIR/uv.amd64"
|
||||||
|
cp "../../../out/extracted/uv/uv" "$LAUNCHER_DIR/uv.arm64"
|
||||||
|
else
|
||||||
|
# On other hosts, copy architecture-specific binaries
|
||||||
cp "$TARGET_DIR/x86_64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.amd64"
|
cp "$TARGET_DIR/x86_64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.amd64"
|
||||||
cp "$TARGET_DIR/aarch64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.arm64"
|
cp "$TARGET_DIR/aarch64-unknown-linux-gnu/release/launcher" "$LAUNCHER_DIR/launcher.arm64"
|
||||||
|
|
||||||
# Copy uv binaries with architecture suffixes
|
|
||||||
cp "../../../out/extracted/uv/uv" "$LAUNCHER_DIR/uv.amd64"
|
cp "../../../out/extracted/uv/uv" "$LAUNCHER_DIR/uv.amd64"
|
||||||
cp "../../../out/extracted/uv_lin_arm/uv" "$LAUNCHER_DIR/uv.arm64"
|
cp "../../../out/extracted/uv_lin_arm/uv" "$LAUNCHER_DIR/uv.arm64"
|
||||||
|
fi
|
||||||
|
|
||||||
# Copy support files from lin directory
|
# Copy support files from lin directory
|
||||||
for file in README.md anki.1 anki.desktop anki.png anki.xml anki.xpm install.sh uninstall.sh anki; do
|
for file in README.md anki.1 anki.desktop anki.png anki.xml anki.xpm install.sh uninstall.sh anki; do
|
||||||
|
|
|
||||||
|
|
@ -47,8 +47,8 @@ done
|
||||||
codesign -vvv "$APP_LAUNCHER"
|
codesign -vvv "$APP_LAUNCHER"
|
||||||
spctl -a "$APP_LAUNCHER"
|
spctl -a "$APP_LAUNCHER"
|
||||||
|
|
||||||
# Notarize
|
# Notarize and bundle (skip if NODMG is set)
|
||||||
|
if [ -z "$NODMG" ]; then
|
||||||
./notarize.sh "$OUTPUT_DIR"
|
./notarize.sh "$OUTPUT_DIR"
|
||||||
|
|
||||||
# Bundle
|
|
||||||
./dmg/build.sh "$OUTPUT_DIR"
|
./dmg/build.sh "$OUTPUT_DIR"
|
||||||
|
fi
|
||||||
|
|
@ -114,10 +114,13 @@ fn copy_files(output_dir: &Path) -> Result<()> {
|
||||||
let launcher_dst = output_dir.join("anki.exe");
|
let launcher_dst = output_dir.join("anki.exe");
|
||||||
copy_file(&launcher_src, &launcher_dst)?;
|
copy_file(&launcher_src, &launcher_dst)?;
|
||||||
|
|
||||||
// Copy uv.exe
|
// Copy uv.exe and uvw.exe
|
||||||
let uv_src = PathBuf::from("../../../out/extracted/uv/uv.exe");
|
let uv_src = PathBuf::from("../../../out/extracted/uv/uv.exe");
|
||||||
let uv_dst = output_dir.join("uv.exe");
|
let uv_dst = output_dir.join("uv.exe");
|
||||||
copy_file(&uv_src, &uv_dst)?;
|
copy_file(&uv_src, &uv_dst)?;
|
||||||
|
let uv_src = PathBuf::from("../../../out/extracted/uv/uvw.exe");
|
||||||
|
let uv_dst = output_dir.join("uvw.exe");
|
||||||
|
copy_file(&uv_src, &uv_dst)?;
|
||||||
|
|
||||||
println!("Copying support files...");
|
println!("Copying support files...");
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,28 +4,48 @@
|
||||||
#![windows_subsystem = "windows"]
|
#![windows_subsystem = "windows"]
|
||||||
|
|
||||||
use std::io::stdin;
|
use std::io::stdin;
|
||||||
|
use std::io::stdout;
|
||||||
|
use std::io::Write;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
use std::time::SystemTime;
|
||||||
|
use std::time::UNIX_EPOCH;
|
||||||
|
|
||||||
use anki_io::copy_if_newer;
|
use anki_io::copy_if_newer;
|
||||||
use anki_io::create_dir_all;
|
use anki_io::create_dir_all;
|
||||||
use anki_io::modified_time;
|
use anki_io::modified_time;
|
||||||
|
use anki_io::read_file;
|
||||||
use anki_io::remove_file;
|
use anki_io::remove_file;
|
||||||
use anki_io::write_file;
|
use anki_io::write_file;
|
||||||
use anki_process::CommandExt;
|
use anki_process::CommandExt;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use crate::platform::ensure_terminal_shown;
|
||||||
use crate::platform::exec_anki;
|
use crate::platform::exec_anki;
|
||||||
use crate::platform::get_anki_binary_path;
|
use crate::platform::get_anki_binary_path;
|
||||||
use crate::platform::get_exe_and_resources_dirs;
|
use crate::platform::get_exe_and_resources_dirs;
|
||||||
use crate::platform::get_uv_binary_name;
|
use crate::platform::get_uv_binary_name;
|
||||||
use crate::platform::handle_first_launch;
|
use crate::platform::handle_first_launch;
|
||||||
use crate::platform::handle_terminal_launch;
|
|
||||||
use crate::platform::initial_terminal_setup;
|
use crate::platform::initial_terminal_setup;
|
||||||
use crate::platform::launch_anki_detached;
|
use crate::platform::launch_anki_detached;
|
||||||
|
|
||||||
mod platform;
|
mod platform;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum VersionKind {
|
||||||
|
PyOxidizer(String),
|
||||||
|
Uv(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum MainMenuChoice {
|
||||||
|
Latest,
|
||||||
|
KeepExisting,
|
||||||
|
Version(VersionKind),
|
||||||
|
ToggleBetas,
|
||||||
|
Quit,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
pub show_console: bool,
|
pub show_console: bool,
|
||||||
|
|
@ -33,6 +53,9 @@ pub struct Config {
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
if let Err(e) = run() {
|
if let Err(e) = run() {
|
||||||
|
let mut config: Config = Config::default();
|
||||||
|
initial_terminal_setup(&mut config);
|
||||||
|
|
||||||
eprintln!("Error: {:#}", e);
|
eprintln!("Error: {:#}", e);
|
||||||
eprintln!("Press enter to close...");
|
eprintln!("Press enter to close...");
|
||||||
let mut input = String::new();
|
let mut input = String::new();
|
||||||
|
|
@ -43,8 +66,7 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run() -> Result<()> {
|
fn run() -> Result<()> {
|
||||||
let mut config = Config::default();
|
let mut config: Config = Config::default();
|
||||||
initial_terminal_setup(&mut config);
|
|
||||||
|
|
||||||
let uv_install_root = dirs::data_local_dir()
|
let uv_install_root = dirs::data_local_dir()
|
||||||
.context("Unable to determine data_dir")?
|
.context("Unable to determine data_dir")?
|
||||||
|
|
@ -62,11 +84,16 @@ fn run() -> Result<()> {
|
||||||
|
|
||||||
// Create install directory and copy project files in
|
// Create install directory and copy project files in
|
||||||
create_dir_all(&uv_install_root)?;
|
create_dir_all(&uv_install_root)?;
|
||||||
|
let had_user_pyproj = user_pyproject_path.exists();
|
||||||
|
if !had_user_pyproj {
|
||||||
|
// during initial launcher testing, enable betas by default
|
||||||
|
write_file(&prerelease_marker, "")?;
|
||||||
|
}
|
||||||
|
|
||||||
copy_if_newer(&dist_pyproject_path, &user_pyproject_path)?;
|
copy_if_newer(&dist_pyproject_path, &user_pyproject_path)?;
|
||||||
copy_if_newer(&dist_python_version_path, &user_python_version_path)?;
|
copy_if_newer(&dist_python_version_path, &user_python_version_path)?;
|
||||||
|
|
||||||
let pyproject_has_changed =
|
let pyproject_has_changed = !sync_complete_marker.exists() || {
|
||||||
!user_pyproject_path.exists() || !sync_complete_marker.exists() || {
|
|
||||||
let pyproject_toml_time = modified_time(&user_pyproject_path)?;
|
let pyproject_toml_time = modified_time(&user_pyproject_path)?;
|
||||||
let sync_complete_time = modified_time(&sync_complete_marker)?;
|
let sync_complete_time = modified_time(&sync_complete_marker)?;
|
||||||
Ok::<bool, anyhow::Error>(pyproject_toml_time > sync_complete_time)
|
Ok::<bool, anyhow::Error>(pyproject_toml_time > sync_complete_time)
|
||||||
|
|
@ -75,47 +102,304 @@ fn run() -> Result<()> {
|
||||||
|
|
||||||
if !pyproject_has_changed {
|
if !pyproject_has_changed {
|
||||||
// If venv is already up to date, exec as normal
|
// If venv is already up to date, exec as normal
|
||||||
|
initial_terminal_setup(&mut config);
|
||||||
let anki_bin = get_anki_binary_path(&uv_install_root);
|
let anki_bin = get_anki_binary_path(&uv_install_root);
|
||||||
exec_anki(&anki_bin, &config)?;
|
exec_anki(&anki_bin, &config)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// we'll need to launch uv; reinvoke ourselves in a terminal so the user can see
|
// we'll need to launch uv; reinvoke ourselves in a terminal so the user can see
|
||||||
handle_terminal_launch()?;
|
ensure_terminal_shown()?;
|
||||||
|
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
|
||||||
|
println!("\x1B[1mAnki Launcher\x1B[0m\n");
|
||||||
|
|
||||||
|
// Check if there's an existing installation before removing marker
|
||||||
|
let has_existing_install = sync_complete_marker.exists();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let menu_choice = get_main_menu_choice(has_existing_install, &prerelease_marker);
|
||||||
|
|
||||||
|
match menu_choice {
|
||||||
|
MainMenuChoice::Quit => std::process::exit(0),
|
||||||
|
MainMenuChoice::KeepExisting => {
|
||||||
|
// Skip sync, just launch existing installation
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
MainMenuChoice::ToggleBetas => {
|
||||||
|
// Toggle beta prerelease file
|
||||||
|
if prerelease_marker.exists() {
|
||||||
|
let _ = remove_file(&prerelease_marker);
|
||||||
|
println!("Beta releases disabled.");
|
||||||
|
} else {
|
||||||
|
write_file(&prerelease_marker, "")?;
|
||||||
|
println!("Beta releases enabled.");
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// For other choices, update project files and sync
|
||||||
|
update_pyproject_for_version(
|
||||||
|
menu_choice.clone(),
|
||||||
|
dist_pyproject_path.clone(),
|
||||||
|
user_pyproject_path.clone(),
|
||||||
|
dist_python_version_path.clone(),
|
||||||
|
user_python_version_path.clone(),
|
||||||
|
)?;
|
||||||
|
|
||||||
// Remove sync marker before attempting sync
|
// Remove sync marker before attempting sync
|
||||||
let _ = remove_file(&sync_complete_marker);
|
let _ = remove_file(&sync_complete_marker);
|
||||||
|
|
||||||
// Sync the venv
|
// Sync the venv
|
||||||
let mut command = Command::new(&uv_path);
|
let mut command = Command::new(&uv_path);
|
||||||
command
|
command.current_dir(&uv_install_root).args([
|
||||||
.current_dir(&uv_install_root)
|
"sync",
|
||||||
.args(["sync", "--upgrade", "--managed-python"]);
|
"--upgrade",
|
||||||
|
"--managed-python",
|
||||||
|
]);
|
||||||
|
|
||||||
// Set UV_PRERELEASE=allow if prerelease file exists
|
// Add python version if .python-version file exists
|
||||||
|
if user_python_version_path.exists() {
|
||||||
|
let python_version = read_file(&user_python_version_path)?;
|
||||||
|
let python_version_str = String::from_utf8(python_version)
|
||||||
|
.context("Invalid UTF-8 in .python-version")?;
|
||||||
|
let python_version_trimmed = python_version_str.trim();
|
||||||
|
command.args(["--python", python_version_trimmed]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set UV_PRERELEASE=allow if beta mode is enabled
|
||||||
if prerelease_marker.exists() {
|
if prerelease_marker.exists() {
|
||||||
command.env("UV_PRERELEASE", "allow");
|
command.env("UV_PRERELEASE", "allow");
|
||||||
}
|
}
|
||||||
|
|
||||||
// temporarily force it on during initial beta testing
|
println!("\x1B[1mUpdating Anki...\x1B[0m\n");
|
||||||
command.env("UV_PRERELEASE", "allow");
|
|
||||||
|
|
||||||
if let Err(e) = command.ensure_success() {
|
match command.ensure_success() {
|
||||||
|
Ok(_) => {
|
||||||
|
// Sync succeeded, break out of loop
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
// If sync fails due to things like a missing wheel on pypi,
|
// If sync fails due to things like a missing wheel on pypi,
|
||||||
// we need to remove the lockfile or uv will cache the bad result.
|
// we need to remove the lockfile or uv will cache the bad result.
|
||||||
let _ = remove_file(&uv_lock_path);
|
let _ = remove_file(&uv_lock_path);
|
||||||
return Err(e.into());
|
println!("Install failed: {:#}", e);
|
||||||
|
println!();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write marker file to indicate successful sync
|
// Write marker file to indicate we've completed the sync process
|
||||||
write_file(&sync_complete_marker, "")?;
|
write_sync_marker(&sync_complete_marker)?;
|
||||||
|
|
||||||
// First launch
|
// First launch
|
||||||
let anki_bin = get_anki_binary_path(&uv_install_root);
|
let anki_bin = get_anki_binary_path(&uv_install_root);
|
||||||
handle_first_launch(&anki_bin)?;
|
handle_first_launch(&anki_bin)?;
|
||||||
|
|
||||||
|
println!("\nPress enter to start Anki.");
|
||||||
|
|
||||||
|
let mut input = String::new();
|
||||||
|
let _ = stdin().read_line(&mut input);
|
||||||
|
|
||||||
// Then launch the binary as detached subprocess so the terminal can close
|
// Then launch the binary as detached subprocess so the terminal can close
|
||||||
launch_anki_detached(&anki_bin, &config)?;
|
launch_anki_detached(&anki_bin, &config)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn write_sync_marker(sync_complete_marker: &std::path::Path) -> Result<()> {
|
||||||
|
let timestamp = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.context("Failed to get system time")?
|
||||||
|
.as_secs();
|
||||||
|
write_file(sync_complete_marker, timestamp.to_string())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_main_menu_choice(
|
||||||
|
has_existing_install: bool,
|
||||||
|
prerelease_marker: &std::path::Path,
|
||||||
|
) -> MainMenuChoice {
|
||||||
|
loop {
|
||||||
|
println!("1) Latest Anki (just press enter)");
|
||||||
|
println!("2) Choose a version");
|
||||||
|
if has_existing_install {
|
||||||
|
println!("3) Keep existing version");
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
|
||||||
|
let betas_enabled = prerelease_marker.exists();
|
||||||
|
println!(
|
||||||
|
"4) Allow betas: {}",
|
||||||
|
if betas_enabled { "on" } else { "off" }
|
||||||
|
);
|
||||||
|
println!("5) Quit");
|
||||||
|
print!("> ");
|
||||||
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
let mut input = String::new();
|
||||||
|
let _ = stdin().read_line(&mut input);
|
||||||
|
let input = input.trim();
|
||||||
|
|
||||||
|
println!();
|
||||||
|
|
||||||
|
return match input {
|
||||||
|
"" | "1" => MainMenuChoice::Latest,
|
||||||
|
"2" => MainMenuChoice::Version(get_version_kind()),
|
||||||
|
"3" => {
|
||||||
|
if has_existing_install {
|
||||||
|
MainMenuChoice::KeepExisting
|
||||||
|
} else {
|
||||||
|
println!("Invalid input. Please try again.\n");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"4" => MainMenuChoice::ToggleBetas,
|
||||||
|
"5" => MainMenuChoice::Quit,
|
||||||
|
_ => {
|
||||||
|
println!("Invalid input. Please try again.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_version_kind() -> VersionKind {
|
||||||
|
loop {
|
||||||
|
println!("Enter the version you want to install:");
|
||||||
|
print!("> ");
|
||||||
|
let _ = stdout().flush();
|
||||||
|
|
||||||
|
let mut input = String::new();
|
||||||
|
let _ = stdin().read_line(&mut input);
|
||||||
|
let input = input.trim();
|
||||||
|
|
||||||
|
if input.is_empty() {
|
||||||
|
println!("Please enter a version.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
match parse_version_kind(input) {
|
||||||
|
Some(version_kind) => {
|
||||||
|
println!();
|
||||||
|
return version_kind;
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
println!("Invalid version format. Please enter a version like 24.10 or 25.06.1 (minimum 2.1.50)");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_pyproject_for_version(
|
||||||
|
menu_choice: MainMenuChoice,
|
||||||
|
dist_pyproject_path: std::path::PathBuf,
|
||||||
|
user_pyproject_path: std::path::PathBuf,
|
||||||
|
dist_python_version_path: std::path::PathBuf,
|
||||||
|
user_python_version_path: std::path::PathBuf,
|
||||||
|
) -> Result<()> {
|
||||||
|
match menu_choice {
|
||||||
|
MainMenuChoice::Latest => {
|
||||||
|
let content = read_file(&dist_pyproject_path)?;
|
||||||
|
write_file(&user_pyproject_path, &content)?;
|
||||||
|
let python_version_content = read_file(&dist_python_version_path)?;
|
||||||
|
write_file(&user_python_version_path, &python_version_content)?;
|
||||||
|
}
|
||||||
|
MainMenuChoice::KeepExisting => {
|
||||||
|
// Do nothing - keep existing pyproject.toml and .python-version
|
||||||
|
}
|
||||||
|
MainMenuChoice::ToggleBetas => {
|
||||||
|
// This should not be reached as ToggleBetas is handled in the loop
|
||||||
|
unreachable!("ToggleBetas should be handled in the main loop");
|
||||||
|
}
|
||||||
|
MainMenuChoice::Version(version_kind) => {
|
||||||
|
let content = read_file(&dist_pyproject_path)?;
|
||||||
|
let content_str =
|
||||||
|
String::from_utf8(content).context("Invalid UTF-8 in pyproject.toml")?;
|
||||||
|
let updated_content = match &version_kind {
|
||||||
|
VersionKind::PyOxidizer(version) => {
|
||||||
|
// Replace package name and add PyQt6 dependencies
|
||||||
|
content_str.replace(
|
||||||
|
"anki-release",
|
||||||
|
&format!(
|
||||||
|
concat!(
|
||||||
|
"aqt[qt6]=={}\",\n",
|
||||||
|
" \"pyqt6==6.6.1\",\n",
|
||||||
|
" \"pyqt6-qt6==6.6.2\",\n",
|
||||||
|
" \"pyqt6-webengine==6.6.0\",\n",
|
||||||
|
" \"pyqt6-webengine-qt6==6.6.2\",\n",
|
||||||
|
" \"pyqt6_sip==13.6.0"
|
||||||
|
),
|
||||||
|
version
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
VersionKind::Uv(version) => {
|
||||||
|
content_str.replace("anki-release", &format!("anki-release=={}", version))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
write_file(&user_pyproject_path, &updated_content)?;
|
||||||
|
|
||||||
|
// Update .python-version based on version kind
|
||||||
|
match &version_kind {
|
||||||
|
VersionKind::PyOxidizer(_) => {
|
||||||
|
write_file(&user_python_version_path, "3.9")?;
|
||||||
|
}
|
||||||
|
VersionKind::Uv(_) => {
|
||||||
|
copy_if_newer(&dist_python_version_path, &user_python_version_path)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MainMenuChoice::Quit => {
|
||||||
|
std::process::exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_version_kind(version: &str) -> Option<VersionKind> {
|
||||||
|
let numeric_chars: String = version
|
||||||
|
.chars()
|
||||||
|
.filter(|c| c.is_ascii_digit() || *c == '.')
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let parts: Vec<&str> = numeric_chars.split('.').collect();
|
||||||
|
|
||||||
|
if parts.len() < 2 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let major: u32 = match parts[0].parse() {
|
||||||
|
Ok(val) => val,
|
||||||
|
Err(_) => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let minor: u32 = match parts[1].parse() {
|
||||||
|
Ok(val) => val,
|
||||||
|
Err(_) => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let patch: u32 = if parts.len() >= 3 {
|
||||||
|
match parts[2].parse() {
|
||||||
|
Ok(val) => val,
|
||||||
|
Err(_) => return None,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0 // Default patch to 0 if not provided
|
||||||
|
};
|
||||||
|
|
||||||
|
// Reject versions < 2.1.50
|
||||||
|
if major == 2 && (minor != 1 || patch < 50) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
if major < 25 || (major == 25 && minor < 6) {
|
||||||
|
Some(VersionKind::PyOxidizer(version.to_string()))
|
||||||
|
} else {
|
||||||
|
Some(VersionKind::Uv(version.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,11 @@
|
||||||
|
|
||||||
use std::os::unix::process::CommandExt;
|
use std::os::unix::process::CommandExt;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
use std::sync::atomic::AtomicBool;
|
||||||
|
use std::sync::atomic::Ordering;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::thread;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use anki_process::CommandExt as AnkiCommandExt;
|
use anki_process::CommandExt as AnkiCommandExt;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
|
@ -10,6 +15,7 @@ use anyhow::Result;
|
||||||
|
|
||||||
// Re-export Unix functions that macOS uses
|
// Re-export Unix functions that macOS uses
|
||||||
pub use super::unix::{
|
pub use super::unix::{
|
||||||
|
ensure_terminal_shown,
|
||||||
exec_anki,
|
exec_anki,
|
||||||
get_anki_binary_path,
|
get_anki_binary_path,
|
||||||
initial_terminal_setup,
|
initial_terminal_setup,
|
||||||
|
|
@ -25,22 +31,13 @@ pub fn launch_anki_detached(anki_bin: &std::path::Path, _config: &crate::Config)
|
||||||
.process_group(0)
|
.process_group(0)
|
||||||
.ensure_spawn()?;
|
.ensure_spawn()?;
|
||||||
std::mem::forget(child);
|
std::mem::forget(child);
|
||||||
|
|
||||||
|
println!("Anki will start shortly.");
|
||||||
|
println!("\x1B[1mYou can close this window.\x1B[0m\n");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn handle_terminal_launch() -> Result<()> {
|
pub fn relaunch_in_terminal() -> Result<()> {
|
||||||
let stdout_is_terminal = std::io::IsTerminal::is_terminal(&std::io::stdout());
|
|
||||||
if stdout_is_terminal {
|
|
||||||
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
|
|
||||||
println!("\x1B[1mPreparing to start Anki...\x1B[0m\n");
|
|
||||||
} else {
|
|
||||||
// If launched from GUI, relaunch in Terminal.app
|
|
||||||
relaunch_in_terminal()?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn relaunch_in_terminal() -> Result<()> {
|
|
||||||
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
|
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
|
||||||
Command::new("open")
|
Command::new("open")
|
||||||
.args(["-a", "Terminal"])
|
.args(["-a", "Terminal"])
|
||||||
|
|
@ -50,12 +47,37 @@ fn relaunch_in_terminal() -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn handle_first_launch(anki_bin: &std::path::Path) -> Result<()> {
|
pub fn handle_first_launch(anki_bin: &std::path::Path) -> Result<()> {
|
||||||
|
use std::io::Write;
|
||||||
|
use std::io::{
|
||||||
|
self,
|
||||||
|
};
|
||||||
|
|
||||||
// Pre-validate by running --version to trigger any Gatekeeper checks
|
// Pre-validate by running --version to trigger any Gatekeeper checks
|
||||||
println!("\n\x1B[1mThis may take a few minutes. Please wait...\x1B[0m");
|
print!("\n\x1B[1mThis may take a few minutes. Please wait\x1B[0m");
|
||||||
|
io::stdout().flush().unwrap();
|
||||||
|
|
||||||
|
// Start progress indicator
|
||||||
|
let running = Arc::new(AtomicBool::new(true));
|
||||||
|
let running_clone = running.clone();
|
||||||
|
let progress_thread = thread::spawn(move || {
|
||||||
|
while running_clone.load(Ordering::Relaxed) {
|
||||||
|
print!(".");
|
||||||
|
io::stdout().flush().unwrap();
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
let _ = Command::new(anki_bin)
|
let _ = Command::new(anki_bin)
|
||||||
.env("ANKI_FIRST_RUN", "1")
|
.env("ANKI_FIRST_RUN", "1")
|
||||||
.arg("--version")
|
.arg("--version")
|
||||||
|
.stdout(std::process::Stdio::null())
|
||||||
|
.stderr(std::process::Stdio::null())
|
||||||
.ensure_success();
|
.ensure_success();
|
||||||
|
|
||||||
|
// Stop progress indicator
|
||||||
|
running.store(false, Ordering::Relaxed);
|
||||||
|
progress_thread.join().unwrap();
|
||||||
|
println!(); // New line after dots
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
|
use std::io::IsTerminal;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
|
|
@ -16,11 +17,59 @@ pub fn initial_terminal_setup(_config: &mut Config) {
|
||||||
// No special terminal setup needed on Unix
|
// No special terminal setup needed on Unix
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn handle_terminal_launch() -> Result<()> {
|
pub fn ensure_terminal_shown() -> Result<()> {
|
||||||
print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top
|
let stdout_is_terminal = IsTerminal::is_terminal(&std::io::stdout());
|
||||||
println!("\x1B[1mPreparing to start Anki...\x1B[0m\n");
|
if !stdout_is_terminal {
|
||||||
// Skip terminal relaunch on non-macOS Unix systems as we don't know which
|
// If launched from GUI, try to relaunch in a terminal
|
||||||
// terminal is installed
|
crate::platform::relaunch_in_terminal()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set terminal title to "Anki Launcher"
|
||||||
|
print!("\x1b]2;Anki Launcher\x07");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "macos"))]
|
||||||
|
pub fn relaunch_in_terminal() -> Result<()> {
|
||||||
|
let current_exe = std::env::current_exe().context("Failed to get current executable path")?;
|
||||||
|
|
||||||
|
// Try terminals in order of preference
|
||||||
|
let terminals = [
|
||||||
|
("x-terminal-emulator", vec!["-e"]),
|
||||||
|
("gnome-terminal", vec!["--"]),
|
||||||
|
("konsole", vec!["-e"]),
|
||||||
|
("xfce4-terminal", vec!["-e"]),
|
||||||
|
("alacritty", vec!["-e"]),
|
||||||
|
("kitty", vec![]),
|
||||||
|
("foot", vec![]),
|
||||||
|
("urxvt", vec!["-e"]),
|
||||||
|
("xterm", vec!["-e"]),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (terminal_cmd, args) in &terminals {
|
||||||
|
// Check if terminal exists
|
||||||
|
if Command::new("which")
|
||||||
|
.arg(terminal_cmd)
|
||||||
|
.output()
|
||||||
|
.map(|o| o.status.success())
|
||||||
|
.unwrap_or(false)
|
||||||
|
{
|
||||||
|
// Try to launch the terminal
|
||||||
|
let mut cmd = Command::new(terminal_cmd);
|
||||||
|
if args.is_empty() {
|
||||||
|
cmd.arg(¤t_exe);
|
||||||
|
} else {
|
||||||
|
cmd.args(args).arg(¤t_exe);
|
||||||
|
}
|
||||||
|
|
||||||
|
if cmd.spawn().is_ok() {
|
||||||
|
std::process::exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no terminal worked, continue without relaunching
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,27 +7,77 @@ use std::process::Command;
|
||||||
use anki_process::CommandExt;
|
use anki_process::CommandExt;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use winapi::um::consoleapi;
|
||||||
|
use winapi::um::errhandlingapi;
|
||||||
|
use winapi::um::wincon;
|
||||||
|
|
||||||
use crate::Config;
|
use crate::Config;
|
||||||
|
|
||||||
pub fn handle_terminal_launch() -> Result<()> {
|
pub fn ensure_terminal_shown() -> Result<()> {
|
||||||
// uv will do this itself
|
ensure_console();
|
||||||
|
// // Check if we're already relaunched to prevent infinite recursion
|
||||||
|
// if std::env::var("ANKI_LAUNCHER_IN_TERMINAL").is_ok() {
|
||||||
|
// println!("Recurse: Preparing to start Anki...\n");
|
||||||
|
// return Ok(());
|
||||||
|
// }
|
||||||
|
|
||||||
|
// if have_console {
|
||||||
|
// } else {
|
||||||
|
// relaunch_in_cmd()?;
|
||||||
|
// }
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn ensure_console() {
|
||||||
|
unsafe {
|
||||||
|
if !wincon::GetConsoleWindow().is_null() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if consoleapi::AllocConsole() == 0 {
|
||||||
|
let error_code = errhandlingapi::GetLastError();
|
||||||
|
eprintln!("unexpected AllocConsole error: {}", error_code);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This black magic triggers Windows to switch to the new
|
||||||
|
// ANSI-supporting console host, which is usually only available
|
||||||
|
// when the app is built with the console subsystem.
|
||||||
|
let _ = Command::new("cmd").args(&["/C", ""]).status();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn attach_to_parent_console() -> bool {
|
||||||
|
unsafe {
|
||||||
|
if !wincon::GetConsoleWindow().is_null() {
|
||||||
|
// we have a console already
|
||||||
|
println!("attach: already had console, false");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if wincon::AttachConsole(wincon::ATTACH_PARENT_PROCESS) != 0 {
|
||||||
|
// successfully attached to parent
|
||||||
|
println!("attach: true");
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
println!("attach: false");
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// If parent process has a console (eg cmd.exe), redirect our output there.
|
/// If parent process has a console (eg cmd.exe), redirect our output there.
|
||||||
/// Sets config.show_console to true if successfully attached to console.
|
/// Sets config.show_console to true if successfully attached to console.
|
||||||
pub fn initial_terminal_setup(config: &mut Config) {
|
pub fn initial_terminal_setup(config: &mut Config) {
|
||||||
use std::ffi::CString;
|
use std::ffi::CString;
|
||||||
|
|
||||||
use libc_stdhandle::*;
|
use libc_stdhandle::*;
|
||||||
use winapi::um::wincon;
|
|
||||||
|
|
||||||
let console_attached = unsafe { wincon::AttachConsole(wincon::ATTACH_PARENT_PROCESS) };
|
if !attach_to_parent_console() {
|
||||||
if console_attached == 0 {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// we launched without a console, so we'll need to open stdin/out/err
|
||||||
let conin = CString::new("CONIN$").unwrap();
|
let conin = CString::new("CONIN$").unwrap();
|
||||||
let conout = CString::new("CONOUT$").unwrap();
|
let conout = CString::new("CONOUT$").unwrap();
|
||||||
let r = CString::new("r").unwrap();
|
let r = CString::new("r").unwrap();
|
||||||
|
|
@ -113,6 +163,5 @@ pub fn get_exe_and_resources_dirs() -> Result<(PathBuf, PathBuf)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_uv_binary_name() -> &'static str {
|
pub fn get_uv_binary_name() -> &'static str {
|
||||||
// Windows uses standard uv binary name
|
|
||||||
"uv.exe"
|
"uv.exe"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
test -f update.sh || {
|
test -f build.sh || {
|
||||||
echo "run from release folder"
|
echo "run from release folder"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
@ -63,6 +63,9 @@ echo "Generated pyproject.toml with version $VERSION"
|
||||||
# Show diff if .old file exists
|
# Show diff if .old file exists
|
||||||
if [ -f pyproject.toml.old ]; then
|
if [ -f pyproject.toml.old ]; then
|
||||||
echo
|
echo
|
||||||
echo "Differences from previous version:"
|
echo "Differences from previous release version:"
|
||||||
diff -u --color=always pyproject.toml.old pyproject.toml || true
|
diff -u --color=always pyproject.toml.old pyproject.toml || true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo "Building wheel..."
|
||||||
|
"$UV" build --wheel --out-dir "$PROJ_ROOT/out/wheels"
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Get the project root (two levels up from qt/release)
|
|
||||||
PROJ_ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
|
||||||
|
|
||||||
# Use extracted uv binary
|
|
||||||
UV="$PROJ_ROOT/out/extracted/uv/uv"
|
|
||||||
|
|
||||||
rm -rf dist
|
|
||||||
"$UV" build --wheel
|
|
||||||
|
|
||||||
#UV_PUBLISH_TOKEN=$(pass show w/pypi-api-test) "$UV" publish --index testpypi
|
|
||||||
UV_PUBLISH_TOKEN=$(pass show w/pypi-api) "$UV" publish
|
|
||||||
|
|
@ -185,12 +185,16 @@ impl Card {
|
||||||
self.usn = usn;
|
self.usn = usn;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Caller must ensure provided deck exists and is not filtered.
|
pub fn clear_fsrs_data(&mut self) {
|
||||||
fn set_deck(&mut self, deck: DeckId) {
|
|
||||||
self.remove_from_filtered_deck_restoring_queue();
|
|
||||||
self.memory_state = None;
|
self.memory_state = None;
|
||||||
self.desired_retention = None;
|
self.desired_retention = None;
|
||||||
self.decay = None;
|
self.decay = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Caller must ensure provided deck exists and is not filtered.
|
||||||
|
fn set_deck(&mut self, deck: DeckId) {
|
||||||
|
self.remove_from_filtered_deck_restoring_queue();
|
||||||
|
self.clear_fsrs_data();
|
||||||
self.deck_id = deck;
|
self.deck_id = deck;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,14 +14,14 @@ use nom::combinator::recognize;
|
||||||
use nom::combinator::rest;
|
use nom::combinator::rest;
|
||||||
use nom::combinator::success;
|
use nom::combinator::success;
|
||||||
use nom::combinator::value;
|
use nom::combinator::value;
|
||||||
use nom::multi::fold_many0;
|
|
||||||
use nom::multi::many0;
|
use nom::multi::many0;
|
||||||
use nom::sequence::delimited;
|
use nom::sequence::delimited;
|
||||||
use nom::sequence::pair;
|
use nom::sequence::pair;
|
||||||
use nom::sequence::preceded;
|
use nom::sequence::preceded;
|
||||||
use nom::sequence::separated_pair;
|
use nom::sequence::separated_pair;
|
||||||
use nom::sequence::terminated;
|
use nom::sequence::terminated;
|
||||||
use nom::sequence::tuple;
|
use nom::Input;
|
||||||
|
use nom::Parser;
|
||||||
|
|
||||||
use super::CardNodes;
|
use super::CardNodes;
|
||||||
use super::Directive;
|
use super::Directive;
|
||||||
|
|
@ -86,9 +86,12 @@ impl<'a> Directive<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume 0 or more of anything in " \t\r\n" after `parser`.
|
/// Consume 0 or more of anything in " \t\r\n" after `parser`.
|
||||||
fn trailing_whitespace0<'parser, 's, P, O>(parser: P) -> impl FnMut(&'s str) -> IResult<'s, O>
|
fn trailing_whitespace0<I, O, E, P>(parser: P) -> impl Parser<I, Output = O, Error = E>
|
||||||
where
|
where
|
||||||
P: FnMut(&'s str) -> IResult<'s, O> + 'parser,
|
I: Input,
|
||||||
|
<I as Input>::Item: nom::AsChar,
|
||||||
|
E: nom::error::ParseError<I>,
|
||||||
|
P: Parser<I, Output = O, Error = E>,
|
||||||
{
|
{
|
||||||
terminated(parser, multispace0)
|
terminated(parser, multispace0)
|
||||||
}
|
}
|
||||||
|
|
@ -97,11 +100,11 @@ where
|
||||||
fn is_not0<'parser, 'arr: 'parser, 's: 'parser>(
|
fn is_not0<'parser, 'arr: 'parser, 's: 'parser>(
|
||||||
arr: &'arr str,
|
arr: &'arr str,
|
||||||
) -> impl FnMut(&'s str) -> IResult<'s, &'s str> + 'parser {
|
) -> impl FnMut(&'s str) -> IResult<'s, &'s str> + 'parser {
|
||||||
alt((is_not(arr), success("")))
|
move |s| alt((is_not(arr), success(""))).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn node(s: &str) -> IResult<Node> {
|
fn node(s: &str) -> IResult<Node> {
|
||||||
alt((sound_node, tag_node, text_node))(s)
|
alt((sound_node, tag_node, text_node)).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
|
/// A sound tag `[sound:resource]`, where `resource` is pointing to a sound or
|
||||||
|
|
@ -110,11 +113,11 @@ fn sound_node(s: &str) -> IResult<Node> {
|
||||||
map(
|
map(
|
||||||
delimited(tag("[sound:"), is_not("]"), tag("]")),
|
delimited(tag("[sound:"), is_not("]"), tag("]")),
|
||||||
Node::SoundOrVideo,
|
Node::SoundOrVideo,
|
||||||
)(s)
|
)
|
||||||
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
||||||
use nom::InputTake;
|
|
||||||
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
|
// first char could be '[', but wasn't part of a node, so skip (eof ends parse)
|
||||||
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
|
let (after, offset) = anychar(s).map(|(s, c)| (s, c.len_utf8()))?;
|
||||||
Ok(match after.find('[') {
|
Ok(match after.find('[') {
|
||||||
|
|
@ -127,7 +130,7 @@ fn take_till_potential_tag_start(s: &str) -> IResult<&str> {
|
||||||
fn tag_node(s: &str) -> IResult<Node> {
|
fn tag_node(s: &str) -> IResult<Node> {
|
||||||
/// Match the start of an opening tag and return its name.
|
/// Match the start of an opening tag and return its name.
|
||||||
fn name(s: &str) -> IResult<&str> {
|
fn name(s: &str) -> IResult<&str> {
|
||||||
preceded(tag("[anki:"), is_not("] \t\r\n"))(s)
|
preceded(tag("[anki:"), is_not("] \t\r\n")).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a parser to match an opening `name` tag and return its options.
|
/// Return a parser to match an opening `name` tag and return its options.
|
||||||
|
|
@ -138,31 +141,35 @@ fn tag_node(s: &str) -> IResult<Node> {
|
||||||
/// empty.
|
/// empty.
|
||||||
fn options(s: &str) -> IResult<Vec<(&str, &str)>> {
|
fn options(s: &str) -> IResult<Vec<(&str, &str)>> {
|
||||||
fn key(s: &str) -> IResult<&str> {
|
fn key(s: &str) -> IResult<&str> {
|
||||||
is_not("] \t\r\n=")(s)
|
is_not("] \t\r\n=").parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn val(s: &str) -> IResult<&str> {
|
fn val(s: &str) -> IResult<&str> {
|
||||||
alt((
|
alt((
|
||||||
delimited(tag("\""), is_not0("\""), tag("\"")),
|
delimited(tag("\""), is_not0("\""), tag("\"")),
|
||||||
is_not0("] \t\r\n\""),
|
is_not0("] \t\r\n\""),
|
||||||
))(s)
|
))
|
||||||
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
many0(trailing_whitespace0(separated_pair(key, tag("="), val)))(s)
|
many0(trailing_whitespace0(separated_pair(key, tag("="), val))).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
move |s| {
|
||||||
delimited(
|
delimited(
|
||||||
pair(tag("[anki:"), trailing_whitespace0(tag(name))),
|
pair(tag("[anki:"), trailing_whitespace0(tag(name))),
|
||||||
options,
|
options,
|
||||||
tag("]"),
|
tag("]"),
|
||||||
)
|
)
|
||||||
|
.parse(s)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a parser to match a closing `name` tag.
|
/// Return a parser to match a closing `name` tag.
|
||||||
fn closing_parser<'parser, 'name: 'parser, 's: 'parser>(
|
fn closing_parser<'parser, 'name: 'parser, 's: 'parser>(
|
||||||
name: &'name str,
|
name: &'name str,
|
||||||
) -> impl FnMut(&'s str) -> IResult<'s, ()> + 'parser {
|
) -> impl FnMut(&'s str) -> IResult<'s, ()> + 'parser {
|
||||||
value((), tuple((tag("[/anki:"), tag(name), tag("]"))))
|
move |s| value((), (tag("[/anki:"), tag(name), tag("]"))).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a parser to match and return anything until a closing `name` tag
|
/// Return a parser to match and return anything until a closing `name` tag
|
||||||
|
|
@ -170,12 +177,13 @@ fn tag_node(s: &str) -> IResult<Node> {
|
||||||
fn content_parser<'parser, 'name: 'parser, 's: 'parser>(
|
fn content_parser<'parser, 'name: 'parser, 's: 'parser>(
|
||||||
name: &'name str,
|
name: &'name str,
|
||||||
) -> impl FnMut(&'s str) -> IResult<'s, &'s str> + 'parser {
|
) -> impl FnMut(&'s str) -> IResult<'s, &'s str> + 'parser {
|
||||||
recognize(fold_many0(
|
move |s| {
|
||||||
pair(not(closing_parser(name)), take_till_potential_tag_start),
|
recognize(many0(pair(
|
||||||
// we don't need to accumulate anything
|
not(closing_parser(name)),
|
||||||
|| (),
|
take_till_potential_tag_start,
|
||||||
|_, _| (),
|
)))
|
||||||
))
|
.parse(s)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (_, tag_name) = name(s)?;
|
let (_, tag_name) = name(s)?;
|
||||||
|
|
@ -185,11 +193,12 @@ fn tag_node(s: &str) -> IResult<Node> {
|
||||||
closing_parser(tag_name),
|
closing_parser(tag_name),
|
||||||
),
|
),
|
||||||
|(options, content)| Node::Directive(Directive::new(tag_name, options, content)),
|
|(options, content)| Node::Directive(Directive::new(tag_name, options, content)),
|
||||||
)(s)
|
)
|
||||||
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn text_node(s: &str) -> IResult<Node> {
|
fn text_node(s: &str) -> IResult<Node> {
|
||||||
map(take_till_potential_tag_start, Node::Text)(s)
|
map(take_till_potential_tag_start, Node::Text).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,7 @@ use nom::bytes::complete::tag;
|
||||||
use nom::bytes::complete::take_while;
|
use nom::bytes::complete::take_while;
|
||||||
use nom::combinator::map;
|
use nom::combinator::map;
|
||||||
use nom::IResult;
|
use nom::IResult;
|
||||||
|
use nom::Parser;
|
||||||
use regex::Captures;
|
use regex::Captures;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
|
@ -72,7 +73,7 @@ fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn close_cloze(text: &str) -> IResult<&str, Token> {
|
fn close_cloze(text: &str) -> IResult<&str, Token> {
|
||||||
map(tag("}}"), |_| Token::CloseCloze)(text)
|
map(tag("}}"), |_| Token::CloseCloze).parse(text)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Match a run of text until an open/close marker is encountered.
|
/// Match a run of text until an open/close marker is encountered.
|
||||||
|
|
@ -87,7 +88,7 @@ fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
|
||||||
// start with the no-match case
|
// start with the no-match case
|
||||||
let mut index = text.len();
|
let mut index = text.len();
|
||||||
for (idx, _) in text.char_indices() {
|
for (idx, _) in text.char_indices() {
|
||||||
if other_token(&text[idx..]).is_ok() {
|
if other_token.parse(&text[idx..]).is_ok() {
|
||||||
index = idx;
|
index = idx;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
@ -99,8 +100,9 @@ fn tokenize(mut text: &str) -> impl Iterator<Item = Token> {
|
||||||
if text.is_empty() {
|
if text.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
let (remaining_text, token) =
|
let (remaining_text, token) = alt((open_cloze, close_cloze, normal_text))
|
||||||
alt((open_cloze, close_cloze, normal_text))(text).unwrap();
|
.parse(text)
|
||||||
|
.unwrap();
|
||||||
text = remaining_text;
|
text = remaining_text;
|
||||||
Some(token)
|
Some(token)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ use nom::character::complete::char;
|
||||||
use nom::error::ErrorKind;
|
use nom::error::ErrorKind;
|
||||||
use nom::sequence::preceded;
|
use nom::sequence::preceded;
|
||||||
use nom::sequence::separated_pair;
|
use nom::sequence::separated_pair;
|
||||||
|
use nom::Parser;
|
||||||
|
|
||||||
fn unescape(text: &str) -> String {
|
fn unescape(text: &str) -> String {
|
||||||
text.replace("\\:", ":")
|
text.replace("\\:", ":")
|
||||||
|
|
@ -22,11 +23,12 @@ pub fn parse_image_cloze(text: &str) -> Option<ImageOcclusionShape> {
|
||||||
if let Some((shape, _)) = text.split_once(':') {
|
if let Some((shape, _)) = text.split_once(':') {
|
||||||
let mut properties = vec![];
|
let mut properties = vec![];
|
||||||
let mut remaining = &text[shape.len()..];
|
let mut remaining = &text[shape.len()..];
|
||||||
while let Ok((rem, (name, value))) = separated_pair::<_, _, _, _, (_, ErrorKind), _, _, _>(
|
while let Ok((rem, (name, value))) = separated_pair::<_, _, _, (_, ErrorKind), _, _, _>(
|
||||||
preceded(tag(":"), is_not("=")),
|
preceded(tag(":"), is_not("=")),
|
||||||
tag("="),
|
tag("="),
|
||||||
escaped(is_not("\\:"), '\\', char(':')),
|
escaped(is_not("\\:"), '\\', char(':')),
|
||||||
)(remaining)
|
)
|
||||||
|
.parse(remaining)
|
||||||
{
|
{
|
||||||
remaining = rem;
|
remaining = rem;
|
||||||
let value = unescape(value);
|
let value = unescape(value);
|
||||||
|
|
|
||||||
|
|
@ -105,10 +105,14 @@ impl Collection {
|
||||||
progress.update(true, |state| state.current_cards = idx as u32 + 1)?;
|
progress.update(true, |state| state.current_cards = idx as u32 + 1)?;
|
||||||
let mut card = self.storage.get_card(card_id)?.or_not_found(card_id)?;
|
let mut card = self.storage.get_card(card_id)?.or_not_found(card_id)?;
|
||||||
let original = card.clone();
|
let original = card.clone();
|
||||||
if let (Some(req), Some(item)) = (&req, item) {
|
if let Some(req) = &req {
|
||||||
card.set_memory_state(&fsrs, Some(item), historical_retention.unwrap())?;
|
// Store decay and desired retention in the card so that add-ons, card info,
|
||||||
|
// stats and browser search/sorts don't need to access the deck config.
|
||||||
|
// Unlike memory states, scheduler doesn't use decay and dr stored in the card.
|
||||||
card.desired_retention = desired_retention;
|
card.desired_retention = desired_retention;
|
||||||
card.decay = decay;
|
card.decay = decay;
|
||||||
|
if let Some(item) = item {
|
||||||
|
card.set_memory_state(&fsrs, Some(item), historical_retention.unwrap())?;
|
||||||
// if rescheduling
|
// if rescheduling
|
||||||
if let Some(reviews) = &last_revlog_info {
|
if let Some(reviews) = &last_revlog_info {
|
||||||
// and we have a last review time for the card
|
// and we have a last review time for the card
|
||||||
|
|
@ -128,7 +132,7 @@ impl Collection {
|
||||||
let original_interval = card.interval;
|
let original_interval = card.interval;
|
||||||
let interval = fsrs.next_interval(
|
let interval = fsrs.next_interval(
|
||||||
Some(state.stability),
|
Some(state.stability),
|
||||||
card.desired_retention.unwrap(),
|
desired_retention.unwrap(),
|
||||||
0,
|
0,
|
||||||
);
|
);
|
||||||
card.interval = rescheduler
|
card.interval = rescheduler
|
||||||
|
|
@ -156,7 +160,8 @@ impl Collection {
|
||||||
} else {
|
} else {
|
||||||
&mut card.due
|
&mut card.due
|
||||||
};
|
};
|
||||||
let new_due = (timing.days_elapsed as i32) - days_elapsed
|
let new_due = (timing.days_elapsed as i32)
|
||||||
|
- days_elapsed
|
||||||
+ card.interval as i32;
|
+ card.interval as i32;
|
||||||
if let Some(rescheduler) = &mut rescheduler {
|
if let Some(rescheduler) = &mut rescheduler {
|
||||||
rescheduler.update_due_cnt_per_day(
|
rescheduler.update_due_cnt_per_day(
|
||||||
|
|
@ -167,15 +172,23 @@ impl Collection {
|
||||||
}
|
}
|
||||||
*due = new_due;
|
*due = new_due;
|
||||||
// Add a rescheduled revlog entry
|
// Add a rescheduled revlog entry
|
||||||
self.log_rescheduled_review(&card, original_interval, usn)?;
|
self.log_rescheduled_review(
|
||||||
|
&card,
|
||||||
|
original_interval,
|
||||||
|
usn,
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
// clear memory states if item is None
|
||||||
card.memory_state = None;
|
card.memory_state = None;
|
||||||
card.desired_retention = None;
|
}
|
||||||
|
} else {
|
||||||
|
// clear FSRS data if FSRS is disabled
|
||||||
|
card.clear_fsrs_data();
|
||||||
}
|
}
|
||||||
self.update_card_inner(&mut card, original, usn)?;
|
self.update_card_inner(&mut card, original, usn)?;
|
||||||
}
|
}
|
||||||
|
|
@ -213,8 +226,6 @@ impl Collection {
|
||||||
decay,
|
decay,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
card.memory_state = None;
|
|
||||||
card.desired_retention = None;
|
|
||||||
Ok(ComputeMemoryStateResponse {
|
Ok(ComputeMemoryStateResponse {
|
||||||
state: None,
|
state: None,
|
||||||
desired_retention,
|
desired_retention,
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ use crate::collection::Collection;
|
||||||
use crate::config::StringKey;
|
use crate::config::StringKey;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
use crate::scheduler::timing::is_unix_epoch_timestamp;
|
||||||
|
|
||||||
impl Card {
|
impl Card {
|
||||||
/// Make card due in `days_from_today`.
|
/// Make card due in `days_from_today`.
|
||||||
|
|
@ -27,6 +28,7 @@ impl Card {
|
||||||
fn set_due_date(
|
fn set_due_date(
|
||||||
&mut self,
|
&mut self,
|
||||||
today: u32,
|
today: u32,
|
||||||
|
next_day_start: i64,
|
||||||
days_from_today: u32,
|
days_from_today: u32,
|
||||||
ease_factor: f32,
|
ease_factor: f32,
|
||||||
force_reset: bool,
|
force_reset: bool,
|
||||||
|
|
@ -34,8 +36,15 @@ impl Card {
|
||||||
let new_due = (today + days_from_today) as i32;
|
let new_due = (today + days_from_today) as i32;
|
||||||
let fsrs_enabled = self.memory_state.is_some();
|
let fsrs_enabled = self.memory_state.is_some();
|
||||||
let new_interval = if fsrs_enabled {
|
let new_interval = if fsrs_enabled {
|
||||||
self.interval
|
let due = self.original_or_current_due();
|
||||||
.saturating_add_signed(new_due - self.original_or_current_due())
|
let due_diff = if is_unix_epoch_timestamp(due) {
|
||||||
|
let offset = (due as i64 - next_day_start) / 86_400;
|
||||||
|
let due = (today as i64 + offset) as i32;
|
||||||
|
new_due - due
|
||||||
|
} else {
|
||||||
|
new_due - due
|
||||||
|
};
|
||||||
|
self.interval.saturating_add_signed(due_diff)
|
||||||
} else if force_reset || !matches!(self.ctype, CardType::Review | CardType::Relearn) {
|
} else if force_reset || !matches!(self.ctype, CardType::Review | CardType::Relearn) {
|
||||||
days_from_today.max(1)
|
days_from_today.max(1)
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -114,6 +123,7 @@ impl Collection {
|
||||||
let spec = parse_due_date_str(days)?;
|
let spec = parse_due_date_str(days)?;
|
||||||
let usn = self.usn()?;
|
let usn = self.usn()?;
|
||||||
let today = self.timing_today()?.days_elapsed;
|
let today = self.timing_today()?.days_elapsed;
|
||||||
|
let next_day_start = self.timing_today()?.next_day_at.0;
|
||||||
let mut rng = rand::rng();
|
let mut rng = rand::rng();
|
||||||
let distribution = Uniform::new_inclusive(spec.min, spec.max).unwrap();
|
let distribution = Uniform::new_inclusive(spec.min, spec.max).unwrap();
|
||||||
let mut decks_initial_ease: HashMap<DeckId, f32> = HashMap::new();
|
let mut decks_initial_ease: HashMap<DeckId, f32> = HashMap::new();
|
||||||
|
|
@ -137,7 +147,13 @@ impl Collection {
|
||||||
};
|
};
|
||||||
let original = card.clone();
|
let original = card.clone();
|
||||||
let days_from_today = distribution.sample(&mut rng);
|
let days_from_today = distribution.sample(&mut rng);
|
||||||
card.set_due_date(today, days_from_today, ease_factor, spec.force_reset);
|
card.set_due_date(
|
||||||
|
today,
|
||||||
|
next_day_start,
|
||||||
|
days_from_today,
|
||||||
|
ease_factor,
|
||||||
|
spec.force_reset,
|
||||||
|
);
|
||||||
col.log_manually_scheduled_review(&card, original.interval, usn)?;
|
col.log_manually_scheduled_review(&card, original.interval, usn)?;
|
||||||
col.update_card_inner(&mut card, original, usn)?;
|
col.update_card_inner(&mut card, original, usn)?;
|
||||||
}
|
}
|
||||||
|
|
@ -228,26 +244,26 @@ mod test {
|
||||||
let mut c = Card::new(NoteId(0), 0, DeckId(0), 0);
|
let mut c = Card::new(NoteId(0), 0, DeckId(0), 0);
|
||||||
|
|
||||||
// setting the due date of a new card will convert it
|
// setting the due date of a new card will convert it
|
||||||
c.set_due_date(5, 2, 1.8, false);
|
c.set_due_date(5, 0, 2, 1.8, false);
|
||||||
assert_eq!(c.ctype, CardType::Review);
|
assert_eq!(c.ctype, CardType::Review);
|
||||||
assert_eq!(c.due, 7);
|
assert_eq!(c.due, 7);
|
||||||
assert_eq!(c.interval, 2);
|
assert_eq!(c.interval, 2);
|
||||||
assert_eq!(c.ease_factor, 1800);
|
assert_eq!(c.ease_factor, 1800);
|
||||||
|
|
||||||
// reschedule it again the next day, shifting it from day 7 to day 9
|
// reschedule it again the next day, shifting it from day 7 to day 9
|
||||||
c.set_due_date(6, 3, 2.5, false);
|
c.set_due_date(6, 0, 3, 2.5, false);
|
||||||
assert_eq!(c.due, 9);
|
assert_eq!(c.due, 9);
|
||||||
assert_eq!(c.interval, 2);
|
assert_eq!(c.interval, 2);
|
||||||
assert_eq!(c.ease_factor, 1800); // interval doesn't change
|
assert_eq!(c.ease_factor, 1800); // interval doesn't change
|
||||||
|
|
||||||
// we can bring cards forward too - return it to its original due date
|
// we can bring cards forward too - return it to its original due date
|
||||||
c.set_due_date(6, 1, 2.4, false);
|
c.set_due_date(6, 0, 1, 2.4, false);
|
||||||
assert_eq!(c.due, 7);
|
assert_eq!(c.due, 7);
|
||||||
assert_eq!(c.interval, 2);
|
assert_eq!(c.interval, 2);
|
||||||
assert_eq!(c.ease_factor, 1800); // interval doesn't change
|
assert_eq!(c.ease_factor, 1800); // interval doesn't change
|
||||||
|
|
||||||
// we can force the interval to be reset instead of shifted
|
// we can force the interval to be reset instead of shifted
|
||||||
c.set_due_date(6, 3, 2.3, true);
|
c.set_due_date(6, 0, 3, 2.3, true);
|
||||||
assert_eq!(c.due, 9);
|
assert_eq!(c.due, 9);
|
||||||
assert_eq!(c.interval, 3);
|
assert_eq!(c.interval, 3);
|
||||||
assert_eq!(c.ease_factor, 1800); // interval doesn't change
|
assert_eq!(c.ease_factor, 1800); // interval doesn't change
|
||||||
|
|
@ -259,7 +275,7 @@ mod test {
|
||||||
c.original_deck_id = DeckId(1);
|
c.original_deck_id = DeckId(1);
|
||||||
c.due = -10000;
|
c.due = -10000;
|
||||||
c.queue = CardQueue::New;
|
c.queue = CardQueue::New;
|
||||||
c.set_due_date(6, 1, 2.2, false);
|
c.set_due_date(6, 0, 1, 2.2, false);
|
||||||
assert_eq!(c.due, 7);
|
assert_eq!(c.due, 7);
|
||||||
assert_eq!(c.interval, 2);
|
assert_eq!(c.interval, 2);
|
||||||
assert_eq!(c.ease_factor, 2200);
|
assert_eq!(c.ease_factor, 2200);
|
||||||
|
|
@ -271,7 +287,7 @@ mod test {
|
||||||
c.ctype = CardType::Relearn;
|
c.ctype = CardType::Relearn;
|
||||||
c.original_due = c.due;
|
c.original_due = c.due;
|
||||||
c.due = 12345678;
|
c.due = 12345678;
|
||||||
c.set_due_date(6, 10, 2.1, false);
|
c.set_due_date(6, 0, 10, 2.1, false);
|
||||||
assert_eq!(c.due, 16);
|
assert_eq!(c.due, 16);
|
||||||
assert_eq!(c.interval, 2);
|
assert_eq!(c.interval, 2);
|
||||||
assert_eq!(c.ease_factor, 2200); // interval doesn't change
|
assert_eq!(c.ease_factor, 2200); // interval doesn't change
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,7 @@ use nom::error::ErrorKind as NomErrorKind;
|
||||||
use nom::multi::many0;
|
use nom::multi::many0;
|
||||||
use nom::sequence::preceded;
|
use nom::sequence::preceded;
|
||||||
use nom::sequence::separated_pair;
|
use nom::sequence::separated_pair;
|
||||||
|
use nom::Parser;
|
||||||
use regex::Captures;
|
use regex::Captures;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
|
@ -202,18 +203,19 @@ fn group_inner(input: &str) -> IResult<Vec<Node>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn whitespace0(s: &str) -> IResult<Vec<char>> {
|
fn whitespace0(s: &str) -> IResult<Vec<char>> {
|
||||||
many0(one_of(" \u{3000}"))(s)
|
many0(one_of(" \u{3000}")).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Optional leading space, then a (negated) group or text
|
/// Optional leading space, then a (negated) group or text
|
||||||
fn node(s: &str) -> IResult<Node> {
|
fn node(s: &str) -> IResult<Node> {
|
||||||
preceded(whitespace0, alt((negated_node, group, text)))(s)
|
preceded(whitespace0, alt((negated_node, group, text))).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn negated_node(s: &str) -> IResult<Node> {
|
fn negated_node(s: &str) -> IResult<Node> {
|
||||||
map(preceded(char('-'), alt((group, text))), |node| {
|
map(preceded(char('-'), alt((group, text))), |node| {
|
||||||
Node::Not(Box::new(node))
|
Node::Not(Box::new(node))
|
||||||
})(s)
|
})
|
||||||
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// One or more nodes surrounded by brackets, eg (one OR two)
|
/// One or more nodes surrounded by brackets, eg (one OR two)
|
||||||
|
|
@ -233,7 +235,7 @@ fn group(s: &str) -> IResult<Node> {
|
||||||
|
|
||||||
/// Either quoted or unquoted text
|
/// Either quoted or unquoted text
|
||||||
fn text(s: &str) -> IResult<Node> {
|
fn text(s: &str) -> IResult<Node> {
|
||||||
alt((quoted_term, partially_quoted_term, unquoted_term))(s)
|
alt((quoted_term, partially_quoted_term, unquoted_term)).parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Quoted text, including the outer double quotes.
|
/// Quoted text, including the outer double quotes.
|
||||||
|
|
@ -248,7 +250,8 @@ fn partially_quoted_term(s: &str) -> IResult<Node> {
|
||||||
escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")),
|
escaped(is_not("\"(): \u{3000}\\"), '\\', none_of(" \u{3000}")),
|
||||||
char(':'),
|
char(':'),
|
||||||
quoted_term_str,
|
quoted_term_str,
|
||||||
)(s)?;
|
)
|
||||||
|
.parse(s)?;
|
||||||
Ok((
|
Ok((
|
||||||
remaining,
|
remaining,
|
||||||
Node::Search(search_node_for_text_with_argument(key, val)?),
|
Node::Search(search_node_for_text_with_argument(key, val)?),
|
||||||
|
|
@ -296,7 +299,7 @@ fn unquoted_term(s: &str) -> IResult<Node> {
|
||||||
fn quoted_term_str(s: &str) -> IResult<&str> {
|
fn quoted_term_str(s: &str) -> IResult<&str> {
|
||||||
let (opened, _) = char('"')(s)?;
|
let (opened, _) = char('"')(s)?;
|
||||||
if let Ok((tail, inner)) =
|
if let Ok((tail, inner)) =
|
||||||
escaped::<_, ParseError, _, _, _, _>(is_not(r#""\"#), '\\', anychar)(opened)
|
escaped::<_, ParseError, _, _>(is_not(r#""\"#), '\\', anychar).parse(opened)
|
||||||
{
|
{
|
||||||
if let Ok((remaining, _)) = char::<_, ParseError>('"')(tail) {
|
if let Ok((remaining, _)) = char::<_, ParseError>('"')(tail) {
|
||||||
Ok((remaining, inner))
|
Ok((remaining, inner))
|
||||||
|
|
@ -321,7 +324,8 @@ fn search_node_for_text(s: &str) -> ParseResult<SearchNode> {
|
||||||
// leading : is only possible error for well-formed input
|
// leading : is only possible error for well-formed input
|
||||||
let (tail, head) = verify(escaped(is_not(r":\"), '\\', anychar), |t: &str| {
|
let (tail, head) = verify(escaped(is_not(r":\"), '\\', anychar), |t: &str| {
|
||||||
!t.is_empty()
|
!t.is_empty()
|
||||||
})(s)
|
})
|
||||||
|
.parse(s)
|
||||||
.map_err(|_: nom::Err<ParseError>| parse_failure(s, FailKind::MissingKey))?;
|
.map_err(|_: nom::Err<ParseError>| parse_failure(s, FailKind::MissingKey))?;
|
||||||
if tail.is_empty() {
|
if tail.is_empty() {
|
||||||
Ok(SearchNode::UnqualifiedText(unescape(head)?))
|
Ok(SearchNode::UnqualifiedText(unescape(head)?))
|
||||||
|
|
@ -407,7 +411,7 @@ fn parse_resched(s: &str) -> ParseResult<SearchNode> {
|
||||||
|
|
||||||
/// eg prop:ivl>3, prop:ease!=2.5
|
/// eg prop:ivl>3, prop:ease!=2.5
|
||||||
fn parse_prop(prop_clause: &str) -> ParseResult<SearchNode> {
|
fn parse_prop(prop_clause: &str) -> ParseResult<SearchNode> {
|
||||||
let (tail, prop) = alt::<_, _, ParseError, _>((
|
let (tail, prop) = alt((
|
||||||
tag("ivl"),
|
tag("ivl"),
|
||||||
tag("due"),
|
tag("due"),
|
||||||
tag("reps"),
|
tag("reps"),
|
||||||
|
|
@ -421,8 +425,9 @@ fn parse_prop(prop_clause: &str) -> ParseResult<SearchNode> {
|
||||||
tag("r"),
|
tag("r"),
|
||||||
recognize(preceded(tag("cdn:"), alphanumeric1)),
|
recognize(preceded(tag("cdn:"), alphanumeric1)),
|
||||||
recognize(preceded(tag("cds:"), alphanumeric1)),
|
recognize(preceded(tag("cds:"), alphanumeric1)),
|
||||||
))(prop_clause)
|
))
|
||||||
.map_err(|_| {
|
.parse(prop_clause)
|
||||||
|
.map_err(|_: nom::Err<ParseError>| {
|
||||||
parse_failure(
|
parse_failure(
|
||||||
prop_clause,
|
prop_clause,
|
||||||
FailKind::InvalidPropProperty {
|
FailKind::InvalidPropProperty {
|
||||||
|
|
@ -431,15 +436,16 @@ fn parse_prop(prop_clause: &str) -> ParseResult<SearchNode> {
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let (num, operator) = alt::<_, _, ParseError, _>((
|
let (num, operator) = alt((
|
||||||
tag("<="),
|
tag("<="),
|
||||||
tag(">="),
|
tag(">="),
|
||||||
tag("!="),
|
tag("!="),
|
||||||
tag("="),
|
tag("="),
|
||||||
tag("<"),
|
tag("<"),
|
||||||
tag(">"),
|
tag(">"),
|
||||||
))(tail)
|
))
|
||||||
.map_err(|_| {
|
.parse(tail)
|
||||||
|
.map_err(|_: nom::Err<ParseError>| {
|
||||||
parse_failure(
|
parse_failure(
|
||||||
prop_clause,
|
prop_clause,
|
||||||
FailKind::InvalidPropOperator {
|
FailKind::InvalidPropOperator {
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ use nom::bytes::complete::tag;
|
||||||
use nom::bytes::complete::take_until;
|
use nom::bytes::complete::take_until;
|
||||||
use nom::combinator::map;
|
use nom::combinator::map;
|
||||||
use nom::sequence::delimited;
|
use nom::sequence::delimited;
|
||||||
|
use nom::Parser;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
use crate::cloze::cloze_number_in_fields;
|
use crate::cloze::cloze_number_in_fields;
|
||||||
|
|
@ -67,7 +68,8 @@ impl TemplateMode {
|
||||||
tag(self.end_tag()),
|
tag(self.end_tag()),
|
||||||
),
|
),
|
||||||
|out| classify_handle(out),
|
|out| classify_handle(out),
|
||||||
)(s)
|
)
|
||||||
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the next handlebar, comment or text token.
|
/// Return the next handlebar, comment or text token.
|
||||||
|
|
@ -127,7 +129,8 @@ fn comment_token(s: &str) -> nom::IResult<&str, Token> {
|
||||||
tag(COMMENT_END),
|
tag(COMMENT_END),
|
||||||
),
|
),
|
||||||
Token::Comment,
|
Token::Comment,
|
||||||
)(s)
|
)
|
||||||
|
.parse(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tokens(mut template: &str) -> impl Iterator<Item = TemplateResult<Token<'_>>> {
|
fn tokens(mut template: &str) -> impl Iterator<Item = TemplateResult<Token<'_>>> {
|
||||||
|
|
|
||||||
|
|
@ -13,4 +13,4 @@ path = "main.rs"
|
||||||
name = "anki-sync-server"
|
name = "anki-sync-server"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anki.workspace = true
|
anki = { workspace = true, features = ["rustls"] }
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -e
|
set -eo pipefail
|
||||||
|
|
||||||
rm -rf out/wheels/*
|
rm -rf out/wheels/*
|
||||||
RELEASE=2 ./ninja wheels
|
RELEASE=2 ./ninja wheels
|
||||||
|
(cd qt/release && ./build.sh)
|
||||||
echo "wheels are in out/wheels"
|
echo "wheels are in out/wheels"
|
||||||
|
|
|
||||||
|
|
@ -510,7 +510,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
border: 1px solid var(--border);
|
border: 1px solid var(--border);
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
padding: 6px;
|
padding: 6px;
|
||||||
margin: 1px;
|
margin: 1px 3px 3px 1px;
|
||||||
|
|
||||||
&:focus-within {
|
&:focus-within {
|
||||||
outline-offset: -1px;
|
outline-offset: -1px;
|
||||||
|
|
|
||||||
|
|
@ -166,7 +166,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
}
|
}
|
||||||
|
|
||||||
function onKeydown(event: KeyboardEvent): void {
|
function onKeydown(event: KeyboardEvent): void {
|
||||||
switch (event.code) {
|
switch (event.key) {
|
||||||
case "Enter":
|
case "Enter":
|
||||||
onEnter(event);
|
onEnter(event);
|
||||||
break;
|
break;
|
||||||
|
|
|
||||||
|
|
@ -90,7 +90,7 @@ export function keyToPlatformString(key: string): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isArrowLeft(event: KeyboardEvent): boolean {
|
export function isArrowLeft(event: KeyboardEvent): boolean {
|
||||||
if (event.code === "ArrowLeft") {
|
if (event.key === "ArrowLeft") {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -98,7 +98,7 @@ export function isArrowLeft(event: KeyboardEvent): boolean {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isArrowRight(event: KeyboardEvent): boolean {
|
export function isArrowRight(event: KeyboardEvent): boolean {
|
||||||
if (event.code === "ArrowRight") {
|
if (event.key === "ArrowRight") {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -106,7 +106,7 @@ export function isArrowRight(event: KeyboardEvent): boolean {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isArrowUp(event: KeyboardEvent): boolean {
|
export function isArrowUp(event: KeyboardEvent): boolean {
|
||||||
if (event.code === "ArrowUp") {
|
if (event.key === "ArrowUp") {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -114,7 +114,7 @@ export function isArrowUp(event: KeyboardEvent): boolean {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isArrowDown(event: KeyboardEvent): boolean {
|
export function isArrowDown(event: KeyboardEvent): boolean {
|
||||||
if (event.code === "ArrowDown") {
|
if (event.key === "ArrowDown") {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -391,7 +391,9 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="m-2">
|
<hr />
|
||||||
|
|
||||||
|
<div class="m-1">
|
||||||
<button class="btn btn-primary" on:click={() => (showSimulator = true)}>
|
<button class="btn btn-primary" on:click={() => (showSimulator = true)}>
|
||||||
{tr.deckConfigFsrsSimulatorExperimental()}
|
{tr.deckConfigFsrsSimulatorExperimental()}
|
||||||
</button>
|
</button>
|
||||||
|
|
@ -419,4 +421,9 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
align-content: center;
|
align-content: center;
|
||||||
flex-wrap: wrap;
|
flex-wrap: wrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
hr {
|
||||||
|
border-top: 1px solid var(--border);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
||||||
|
|
@ -141,7 +141,7 @@ export function renderReviews(
|
||||||
|
|
||||||
const yTickFormat = (n: number): string => {
|
const yTickFormat = (n: number): string => {
|
||||||
if (showTime) {
|
if (showTime) {
|
||||||
return timeSpan(n / 1000, true, false, TimespanUnit.Hours);
|
return timeSpan(n / 1000, true, true, TimespanUnit.Hours);
|
||||||
} else {
|
} else {
|
||||||
if (Math.round(n) != n) {
|
if (Math.round(n) != n) {
|
||||||
return "";
|
return "";
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue