mirror of
https://github.com/ankitects/anki.git
synced 2026-01-14 22:39:06 -05:00
* Add crate csv
* Add start of csv importing on backend
* Add Menomosyne serializer
* Add csv and json importing on backend
* Add plaintext importing on frontend
* Add csv metadata extraction on backend
* Add csv importing with GUI
* Fix missing dfa file in build
Added compile_data_attr, then re-ran cargo/update.py.
* Don't use doubly buffered reader in csv
* Escape HTML entities if CSV is not HTML
Also use name 'is_html' consistently.
* Use decimal number as foreign ease (like '2.5')
* ForeignCard.ivl → ForeignCard.interval
* Only allow fixed set of CSV delimiters
* Map timestamp of ForeignCard to native due time
* Don't trim CSV records
* Document use of empty strings for defaults
* Avoid creating CardGenContexts for every note
This requires CardGenContext to be generic, so it works both with an
owned and borrowed notetype.
* Show all accepted file types in import file picker
* Add import_json_file()
* factor → ease_factor
* delimter_from_value → delimiter_from_value
* Map columns to fields, not the other way around
* Fallback to current config for csv metadata
* Add start of new import csv screen
* Temporary fix for compilation issue on Linux/Mac
* Disable jest bazel action for import-csv
Jest fails with an error code if no tests are available, but this would
not be noticable on Windows as Jest is not run there.
* Fix field mapping issue
* Revert "Temporary fix for compilation issue on Linux/Mac"
This reverts commit 21f8a26140.
* Add HtmlSwitch and move Switch to components
* Fix spacing and make selectors consistent
* Fix shortcut tooltip
* Place import button at the top with path
* Fix meta column indices
* Remove NotetypeForString
* Fix queue and type of foreign cards
* Support different dupe resolution strategies
* Allow dupe resolution selection when importing CSV
* Test import of unnormalized text
Close #1863.
* Fix logging of foreign notes
* Implement CSV exports
* Use db_scalar() in notes_table_len()
* Rework CSV metadata
- Notetypes and decks are either defined by a global id or by a column.
- If a notetype id is provided, its field map must also be specified.
- If a notetype column is provided, fields are now mapped by index
instead of name at import time. So the first non-meta column is used for
the first field of every note, regardless of notetype. This makes
importing easier and should improve compatiblity with files without a
notetype column.
- Ensure first field can be mapped to a column.
- Meta columns must be defined as `#[meta name]:[column index]` instead
of in the `#columns` tag.
- Column labels contain the raw names defined by the file and must be
prettified by the frontend.
* Adjust frontend to new backend column mapping
* Add force flags for is_html and delimiter
* Detect if CSV is HTML by field content
* Update dupe resolution labels
* Simplify selectors
* Fix coalescence of oneofs in TS
* Disable meta columns from selection
Plus a lot of refactoring.
* Make import button stick to the bottom
* Write delimiter and html flag into csv
* Refetch field map after notetype change
* Fix log labels for csv import
* Log notes whose deck/notetype was missing
* Fix hiding of empty log queues
* Implement adding tags to all notes of a csv
* Fix dupe resolution not being set in log
* Implement adding tags to updated notes of a csv
* Check first note field is not empty
* Temporary fix for build on Linux/Mac
* Fix inverted html check (dae)
* Remove unused ftl string
* Delimiter → Separator
* Remove commented-out line
* Don't accept .json files
* Tweak tag ftl strings
* Remove redundant blur call
* Strip sound and add spaces in csv export
* Export HTML by default
* Fix unset deck in Mnemosyne import
Also accept both numbers and strings for notetypes and decks in JSON.
* Make DupeResolution::Update the default
* Fix missing dot in extension
* Make column indices 1-based
* Remove StickContainer from TagEditor
Fixes line breaking, border and z index on ImportCsvPage.
* Assign different key combos to tag editors
* Log all updated duplicates
Add a log field for the true number of found notes.
* Show identical notes as skipped
* Split tag-editor into separate ts module (dae)
* Add progress for CSV export
* Add progress for text import
* Tidy-ups after tag-editor split (dae)
- import-csv no longer depends on editor
- remove some commented lines
181 lines
6.5 KiB
Rust
181 lines
6.5 KiB
Rust
// Copyright: Ankitects Pty Ltd and contributors
|
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
|
|
use std::sync::{Arc, Mutex};
|
|
|
|
use futures::future::AbortHandle;
|
|
|
|
use super::Backend;
|
|
use crate::{
|
|
backend_proto as pb,
|
|
dbcheck::DatabaseCheckProgress,
|
|
i18n::I18n,
|
|
import_export::{ExportProgress, ImportProgress},
|
|
media::sync::MediaSyncProgress,
|
|
sync::{FullSyncProgress, NormalSyncProgress, SyncStage},
|
|
};
|
|
|
|
pub(super) struct ThrottlingProgressHandler {
|
|
pub state: Arc<Mutex<ProgressState>>,
|
|
pub last_update: coarsetime::Instant,
|
|
}
|
|
|
|
impl ThrottlingProgressHandler {
|
|
/// Returns true if should continue.
|
|
pub(super) fn update(&mut self, progress: impl Into<Progress>, throttle: bool) -> bool {
|
|
let now = coarsetime::Instant::now();
|
|
if throttle && now.duration_since(self.last_update).as_f64() < 0.1 {
|
|
return true;
|
|
}
|
|
self.last_update = now;
|
|
let mut guard = self.state.lock().unwrap();
|
|
guard.last_progress.replace(progress.into());
|
|
let want_abort = guard.want_abort;
|
|
guard.want_abort = false;
|
|
!want_abort
|
|
}
|
|
}
|
|
|
|
pub(super) struct ProgressState {
|
|
pub want_abort: bool,
|
|
pub last_progress: Option<Progress>,
|
|
}
|
|
|
|
// fixme: this should support multiple abort handles.
|
|
pub(super) type AbortHandleSlot = Arc<Mutex<Option<AbortHandle>>>;
|
|
|
|
#[derive(Clone, Copy)]
|
|
pub(super) enum Progress {
|
|
MediaSync(MediaSyncProgress),
|
|
MediaCheck(u32),
|
|
FullSync(FullSyncProgress),
|
|
NormalSync(NormalSyncProgress),
|
|
DatabaseCheck(DatabaseCheckProgress),
|
|
Import(ImportProgress),
|
|
Export(ExportProgress),
|
|
}
|
|
|
|
pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Progress {
|
|
let progress = if let Some(progress) = progress {
|
|
match progress {
|
|
Progress::MediaSync(p) => pb::progress::Value::MediaSync(media_sync_progress(p, tr)),
|
|
Progress::MediaCheck(n) => {
|
|
pb::progress::Value::MediaCheck(tr.media_check_checked(n).into())
|
|
}
|
|
Progress::FullSync(p) => pb::progress::Value::FullSync(pb::progress::FullSync {
|
|
transferred: p.transferred_bytes as u32,
|
|
total: p.total_bytes as u32,
|
|
}),
|
|
Progress::NormalSync(p) => {
|
|
let stage = match p.stage {
|
|
SyncStage::Connecting => tr.sync_syncing(),
|
|
SyncStage::Syncing => tr.sync_syncing(),
|
|
SyncStage::Finalizing => tr.sync_checking(),
|
|
}
|
|
.to_string();
|
|
let added = tr
|
|
.sync_added_updated_count(p.local_update, p.remote_update)
|
|
.into();
|
|
let removed = tr
|
|
.sync_media_removed_count(p.local_remove, p.remote_remove)
|
|
.into();
|
|
pb::progress::Value::NormalSync(pb::progress::NormalSync {
|
|
stage,
|
|
added,
|
|
removed,
|
|
})
|
|
}
|
|
Progress::DatabaseCheck(p) => {
|
|
let mut stage_total = 0;
|
|
let mut stage_current = 0;
|
|
let stage = match p {
|
|
DatabaseCheckProgress::Integrity => tr.database_check_checking_integrity(),
|
|
DatabaseCheckProgress::Optimize => tr.database_check_rebuilding(),
|
|
DatabaseCheckProgress::Cards => tr.database_check_checking_cards(),
|
|
DatabaseCheckProgress::Notes { current, total } => {
|
|
stage_total = total;
|
|
stage_current = current;
|
|
tr.database_check_checking_notes()
|
|
}
|
|
DatabaseCheckProgress::History => tr.database_check_checking_history(),
|
|
}
|
|
.to_string();
|
|
pb::progress::Value::DatabaseCheck(pb::progress::DatabaseCheck {
|
|
stage,
|
|
stage_total,
|
|
stage_current,
|
|
})
|
|
}
|
|
Progress::Import(progress) => pb::progress::Value::Importing(
|
|
match progress {
|
|
ImportProgress::File => tr.importing_importing_file(),
|
|
ImportProgress::Media(n) => tr.importing_processed_media_file(n),
|
|
ImportProgress::MediaCheck(n) => tr.media_check_checked(n),
|
|
ImportProgress::Notes(n) => tr.importing_processed_notes(n),
|
|
ImportProgress::Extracting => tr.importing_extracting(),
|
|
ImportProgress::Gathering => tr.importing_gathering(),
|
|
}
|
|
.into(),
|
|
),
|
|
Progress::Export(progress) => pb::progress::Value::Exporting(
|
|
match progress {
|
|
ExportProgress::File => tr.exporting_exporting_file(),
|
|
ExportProgress::Media(n) => tr.exporting_processed_media_files(n),
|
|
ExportProgress::Notes(n) => tr.importing_processed_notes(n),
|
|
ExportProgress::Cards(n) => tr.importing_processed_cards(n),
|
|
ExportProgress::Gathering => tr.importing_gathering(),
|
|
}
|
|
.into(),
|
|
),
|
|
}
|
|
} else {
|
|
pb::progress::Value::None(pb::Empty {})
|
|
};
|
|
pb::Progress {
|
|
value: Some(progress),
|
|
}
|
|
}
|
|
|
|
fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::progress::MediaSync {
|
|
pb::progress::MediaSync {
|
|
checked: tr.sync_media_checked_count(p.checked).into(),
|
|
added: tr
|
|
.sync_media_added_count(p.uploaded_files, p.downloaded_files)
|
|
.into(),
|
|
removed: tr
|
|
.sync_media_removed_count(p.uploaded_deletions, p.downloaded_deletions)
|
|
.into(),
|
|
}
|
|
}
|
|
|
|
impl From<FullSyncProgress> for Progress {
|
|
fn from(p: FullSyncProgress) -> Self {
|
|
Progress::FullSync(p)
|
|
}
|
|
}
|
|
|
|
impl From<MediaSyncProgress> for Progress {
|
|
fn from(p: MediaSyncProgress) -> Self {
|
|
Progress::MediaSync(p)
|
|
}
|
|
}
|
|
|
|
impl From<NormalSyncProgress> for Progress {
|
|
fn from(p: NormalSyncProgress) -> Self {
|
|
Progress::NormalSync(p)
|
|
}
|
|
}
|
|
|
|
impl Backend {
|
|
pub(super) fn new_progress_handler(&self) -> ThrottlingProgressHandler {
|
|
{
|
|
let mut guard = self.progress_state.lock().unwrap();
|
|
guard.want_abort = false;
|
|
guard.last_progress = None;
|
|
}
|
|
ThrottlingProgressHandler {
|
|
state: Arc::clone(&self.progress_state),
|
|
last_update: coarsetime::Instant::now(),
|
|
}
|
|
}
|
|
}
|