mirror of
https://github.com/ankitects/anki.git
synced 2025-09-19 14:32:22 -04:00
change sync label to indicate sync state
- blue for normal sync, red for full sync required - refactor status fetching code so we don't hold a collection lock during the network request, which slows things down - fix sync spinner restarting when returning to deck list
This commit is contained in:
parent
ac219ae728
commit
4d7e23111e
14 changed files with 261 additions and 71 deletions
|
@ -164,7 +164,7 @@ service BackendService {
|
||||||
rpc AbortMediaSync (Empty) returns (Empty);
|
rpc AbortMediaSync (Empty) returns (Empty);
|
||||||
rpc BeforeUpload (Empty) returns (Empty);
|
rpc BeforeUpload (Empty) returns (Empty);
|
||||||
rpc SyncLogin (SyncLoginIn) returns (SyncAuth);
|
rpc SyncLogin (SyncLoginIn) returns (SyncAuth);
|
||||||
rpc SyncStatus (SyncAuth) returns (SyncCollectionOut);
|
rpc SyncStatus (SyncAuth) returns (SyncStatusOut);
|
||||||
rpc SyncCollection (SyncAuth) returns (SyncCollectionOut);
|
rpc SyncCollection (SyncAuth) returns (SyncCollectionOut);
|
||||||
rpc FullUpload (SyncAuth) returns (Empty);
|
rpc FullUpload (SyncAuth) returns (Empty);
|
||||||
rpc FullDownload (SyncAuth) returns (Empty);
|
rpc FullDownload (SyncAuth) returns (Empty);
|
||||||
|
@ -904,6 +904,15 @@ message SyncLoginIn {
|
||||||
string password = 2;
|
string password = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message SyncStatusOut {
|
||||||
|
enum Required {
|
||||||
|
NO_CHANGES = 0;
|
||||||
|
NORMAL_SYNC = 1;
|
||||||
|
FULL_SYNC = 2;
|
||||||
|
}
|
||||||
|
Required required = 1;
|
||||||
|
}
|
||||||
|
|
||||||
message SyncCollectionOut {
|
message SyncCollectionOut {
|
||||||
enum ChangesRequired {
|
enum ChangesRequired {
|
||||||
NO_CHANGES = 0;
|
NO_CHANGES = 0;
|
||||||
|
|
|
@ -54,6 +54,7 @@ DeckTreeNode = pb.DeckTreeNode
|
||||||
StockNoteType = pb.StockNoteType
|
StockNoteType = pb.StockNoteType
|
||||||
SyncAuth = pb.SyncAuth
|
SyncAuth = pb.SyncAuth
|
||||||
SyncOutput = pb.SyncCollectionOut
|
SyncOutput = pb.SyncCollectionOut
|
||||||
|
SyncStatus = pb.SyncStatusOut
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import orjson
|
import orjson
|
||||||
|
|
|
@ -107,7 +107,10 @@ def render_method(method, idx):
|
||||||
input_args = f"self, input: {fullname(method.input_type.full_name)}"
|
input_args = f"self, input: {fullname(method.input_type.full_name)}"
|
||||||
input_assign_outer = ""
|
input_assign_outer = ""
|
||||||
name = fix_snakecase(stringcase.snakecase(method.name))
|
name = fix_snakecase(stringcase.snakecase(method.name))
|
||||||
if len(method.output_type.fields) == 1:
|
if (
|
||||||
|
len(method.output_type.fields) == 1
|
||||||
|
and method.output_type.fields[0].type != TYPE_ENUM
|
||||||
|
):
|
||||||
# unwrap single return arg
|
# unwrap single return arg
|
||||||
f = method.output_type.fields[0]
|
f = method.output_type.fields[0]
|
||||||
single_field = f".{f.name}"
|
single_field = f".{f.name}"
|
||||||
|
|
1
qt/.gitignore
vendored
1
qt/.gitignore
vendored
|
@ -19,6 +19,7 @@ aqt_data/web/overview.js
|
||||||
aqt_data/web/reviewer-bottom.js
|
aqt_data/web/reviewer-bottom.js
|
||||||
aqt_data/web/reviewer.js
|
aqt_data/web/reviewer.js
|
||||||
aqt_data/web/webview.js
|
aqt_data/web/webview.js
|
||||||
|
aqt_data/web/toolbar.js
|
||||||
dist
|
dist
|
||||||
aqt.egg-info
|
aqt.egg-info
|
||||||
build
|
build
|
||||||
|
|
|
@ -15,6 +15,7 @@ from anki.utils import ids2str
|
||||||
from aqt import AnkiQt, gui_hooks
|
from aqt import AnkiQt, gui_hooks
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
from aqt.sound import av_player
|
from aqt.sound import av_player
|
||||||
|
from aqt.sync import get_sync_status
|
||||||
from aqt.toolbar import BottomBar
|
from aqt.toolbar import BottomBar
|
||||||
from aqt.utils import askUser, getOnlyText, openLink, shortcut, showWarning, tr
|
from aqt.utils import askUser, getOnlyText, openLink, shortcut, showWarning, tr
|
||||||
|
|
||||||
|
@ -57,7 +58,7 @@ class DeckBrowser:
|
||||||
self.web.set_bridge_command(self._linkHandler, self)
|
self.web.set_bridge_command(self._linkHandler, self)
|
||||||
self._renderPage()
|
self._renderPage()
|
||||||
# redraw top bar for theme change
|
# redraw top bar for theme change
|
||||||
self.mw.toolbar.draw()
|
self.mw.toolbar.redraw()
|
||||||
|
|
||||||
def refresh(self):
|
def refresh(self):
|
||||||
self._renderPage()
|
self._renderPage()
|
||||||
|
|
|
@ -78,6 +78,7 @@ class ProfileManager:
|
||||||
def __init__(self, base=None):
|
def __init__(self, base=None):
|
||||||
self.name = None
|
self.name = None
|
||||||
self.db = None
|
self.db = None
|
||||||
|
self.profile: Optional[Dict] = None
|
||||||
# instantiate base folder
|
# instantiate base folder
|
||||||
self._setBaseFolder(base)
|
self._setBaseFolder(base)
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ from anki.rsbackend import (
|
||||||
SyncError,
|
SyncError,
|
||||||
SyncErrorKind,
|
SyncErrorKind,
|
||||||
SyncOutput,
|
SyncOutput,
|
||||||
|
SyncStatus,
|
||||||
)
|
)
|
||||||
from aqt.qt import (
|
from aqt.qt import (
|
||||||
QDialog,
|
QDialog,
|
||||||
|
@ -37,13 +38,19 @@ class FullSyncChoice(enum.Enum):
|
||||||
DOWNLOAD = 2
|
DOWNLOAD = 2
|
||||||
|
|
||||||
|
|
||||||
def get_sync_status(mw: aqt.main.AnkiQt, callback: Callable[[SyncOutput], None]):
|
def get_sync_status(mw: aqt.main.AnkiQt, callback: Callable[[SyncStatus], None]):
|
||||||
auth = mw.pm.sync_auth()
|
auth = mw.pm.sync_auth()
|
||||||
if not auth:
|
if not auth:
|
||||||
return
|
return SyncStatus(required=SyncStatus.NO_CHANGES)
|
||||||
|
|
||||||
def on_future_done(fut):
|
def on_future_done(fut):
|
||||||
callback(fut.result())
|
try:
|
||||||
|
out = fut.result()
|
||||||
|
except Exception as e:
|
||||||
|
# swallow errors
|
||||||
|
print("sync status check failed:", str(e))
|
||||||
|
return
|
||||||
|
callback(out)
|
||||||
|
|
||||||
mw.taskman.run_in_background(
|
mw.taskman.run_in_background(
|
||||||
lambda: mw.col.backend.sync_status(auth), on_future_done
|
lambda: mw.col.backend.sync_status(auth), on_future_done
|
||||||
|
|
|
@ -8,8 +8,10 @@ from typing import Any, Dict, Optional
|
||||||
|
|
||||||
import aqt
|
import aqt
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
|
from anki.rsbackend import SyncStatus
|
||||||
from aqt import gui_hooks
|
from aqt import gui_hooks
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
|
from aqt.sync import get_sync_status
|
||||||
from aqt.webview import AnkiWebView
|
from aqt.webview import AnkiWebView
|
||||||
|
|
||||||
|
|
||||||
|
@ -45,11 +47,16 @@ class Toolbar:
|
||||||
link_handler = link_handler or self._linkHandler
|
link_handler = link_handler or self._linkHandler
|
||||||
self.web.set_bridge_command(link_handler, web_context)
|
self.web.set_bridge_command(link_handler, web_context)
|
||||||
self.web.stdHtml(
|
self.web.stdHtml(
|
||||||
self._body % self._centerLinks(), css=["toolbar.css"], context=web_context,
|
self._body % self._centerLinks(),
|
||||||
|
css=["toolbar.css"],
|
||||||
|
js=["webview.js", "jquery.js", "toolbar.js"],
|
||||||
|
context=web_context,
|
||||||
)
|
)
|
||||||
self.web.adjustHeightToFit()
|
self.web.adjustHeightToFit()
|
||||||
if self.mw.media_syncer.is_syncing():
|
|
||||||
self.set_sync_active(True)
|
def redraw(self) -> None:
|
||||||
|
self.set_sync_active(self.mw.media_syncer.is_syncing())
|
||||||
|
self.update_sync_status()
|
||||||
|
|
||||||
# Available links
|
# Available links
|
||||||
######################################################################
|
######################################################################
|
||||||
|
@ -128,6 +135,9 @@ class Toolbar:
|
||||||
|
|
||||||
return "\n".join(links)
|
return "\n".join(links)
|
||||||
|
|
||||||
|
# Sync
|
||||||
|
######################################################################
|
||||||
|
|
||||||
def _create_sync_link(self) -> str:
|
def _create_sync_link(self) -> str:
|
||||||
name = _("Sync")
|
name = _("Sync")
|
||||||
title = _("Shortcut key: %s") % "Y"
|
title = _("Shortcut key: %s") % "Y"
|
||||||
|
@ -146,6 +156,12 @@ class Toolbar:
|
||||||
meth = "removeClass"
|
meth = "removeClass"
|
||||||
self.web.eval(f"$('#sync-spinner').{meth}('spin')")
|
self.web.eval(f"$('#sync-spinner').{meth}('spin')")
|
||||||
|
|
||||||
|
def set_sync_status(self, status: SyncStatus) -> None:
|
||||||
|
self.web.eval(f"updateSyncColor({status.required})")
|
||||||
|
|
||||||
|
def update_sync_status(self) -> None:
|
||||||
|
get_sync_status(self.mw, self.mw.toolbar.set_sync_status)
|
||||||
|
|
||||||
# Link handling
|
# Link handling
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
|
|
|
@ -45,6 +45,14 @@ body {
|
||||||
#header {
|
#header {
|
||||||
border-bottom-color: vars.$night-faint-border;
|
border-bottom-color: vars.$night-faint-border;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.normal-sync {
|
||||||
|
color: vars.$night-new-count;
|
||||||
|
}
|
||||||
|
|
||||||
|
.full-sync {
|
||||||
|
color: vars.$night-learn-count;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.isMac.nightMode #header {
|
.isMac.nightMode #header {
|
||||||
|
@ -70,4 +78,14 @@ body {
|
||||||
height: 16px;
|
height: 16px;
|
||||||
margin-bottom: -3px;
|
margin-bottom: -3px;
|
||||||
visibility: hidden;
|
visibility: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.normal-sync {
|
||||||
|
color: vars.$day-new-count;
|
||||||
|
}
|
||||||
|
|
||||||
|
.full-sync {
|
||||||
|
color: vars.$day-learn-count;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
22
qt/ts/src/toolbar.ts
Normal file
22
qt/ts/src/toolbar.ts
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
enum SyncState {
|
||||||
|
NoChanges = 0,
|
||||||
|
Normal,
|
||||||
|
Full,
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateSyncColor(state: SyncState) {
|
||||||
|
const elem = document.getElementById("sync");
|
||||||
|
switch (state) {
|
||||||
|
case SyncState.NoChanges:
|
||||||
|
elem.classList.remove("full-sync", "normal-sync");
|
||||||
|
break;
|
||||||
|
case SyncState.Normal:
|
||||||
|
elem.classList.add("normal-sync");
|
||||||
|
elem.classList.remove("full-sync");
|
||||||
|
break;
|
||||||
|
case SyncState.Full:
|
||||||
|
elem.classList.add("full-sync");
|
||||||
|
elem.classList.remove("normal-sync");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
|
@ -33,8 +33,8 @@ use crate::{
|
||||||
sched::timespan::{answer_button_time, learning_congrats, studied_today, time_span},
|
sched::timespan::{answer_button_time, learning_congrats, studied_today, time_span},
|
||||||
search::SortMode,
|
search::SortMode,
|
||||||
sync::{
|
sync::{
|
||||||
sync_abort, sync_login, FullSyncProgress, NormalSyncProgress, SyncActionRequired, SyncAuth,
|
get_remote_sync_meta, sync_abort, sync_login, FullSyncProgress, NormalSyncProgress,
|
||||||
SyncOutput, SyncStage,
|
SyncActionRequired, SyncAuth, SyncMeta, SyncOutput, SyncStage,
|
||||||
},
|
},
|
||||||
template::RenderedNode,
|
template::RenderedNode,
|
||||||
text::{extract_av_tags, strip_av_tags, AVTag},
|
text::{extract_av_tags, strip_av_tags, AVTag},
|
||||||
|
@ -44,7 +44,7 @@ use crate::{
|
||||||
use fluent::FluentValue;
|
use fluent::FluentValue;
|
||||||
use futures::future::{AbortHandle, Abortable};
|
use futures::future::{AbortHandle, Abortable};
|
||||||
use log::error;
|
use log::error;
|
||||||
use pb::BackendService;
|
use pb::{sync_status_out, BackendService};
|
||||||
use prost::Message;
|
use prost::Message;
|
||||||
use serde_json::Value as JsonValue;
|
use serde_json::Value as JsonValue;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
|
@ -92,6 +92,27 @@ pub struct Backend {
|
||||||
media_sync_abort: Option<AbortHandle>,
|
media_sync_abort: Option<AbortHandle>,
|
||||||
progress_state: Arc<Mutex<ProgressState>>,
|
progress_state: Arc<Mutex<ProgressState>>,
|
||||||
runtime: Option<Runtime>,
|
runtime: Option<Runtime>,
|
||||||
|
state: Arc<Mutex<BackendState>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// fixme: move other items like runtime into here as well
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct BackendState {
|
||||||
|
remote_sync_status: RemoteSyncStatus,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub(crate) struct RemoteSyncStatus {
|
||||||
|
last_check: TimestampSecs,
|
||||||
|
last_response: sync_status_out::Required,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RemoteSyncStatus {
|
||||||
|
fn update(&mut self, required: sync_status_out::Required) {
|
||||||
|
self.last_check = TimestampSecs::now();
|
||||||
|
self.last_response = required
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
|
@ -971,12 +992,12 @@ impl BackendService for Backend {
|
||||||
self.sync_login_inner(input)
|
self.sync_login_inner(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync_status(&mut self, input: pb::SyncAuth) -> BackendResult<pb::SyncCollectionOut> {
|
fn sync_status(&mut self, input: pb::SyncAuth) -> BackendResult<pb::SyncStatusOut> {
|
||||||
self.sync_collection_inner(input, true)
|
self.sync_status_inner(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync_collection(&mut self, input: pb::SyncAuth) -> BackendResult<pb::SyncCollectionOut> {
|
fn sync_collection(&mut self, input: pb::SyncAuth) -> BackendResult<pb::SyncCollectionOut> {
|
||||||
self.sync_collection_inner(input, false)
|
self.sync_collection_inner(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn full_upload(&mut self, input: pb::SyncAuth) -> BackendResult<Empty> {
|
fn full_upload(&mut self, input: pb::SyncAuth) -> BackendResult<Empty> {
|
||||||
|
@ -1157,6 +1178,7 @@ impl Backend {
|
||||||
last_progress: None,
|
last_progress: None,
|
||||||
})),
|
})),
|
||||||
runtime: None,
|
runtime: None,
|
||||||
|
state: Arc::new(Mutex::new(BackendState::default())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1265,10 +1287,38 @@ impl Backend {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn sync_status_inner(&mut self, input: pb::SyncAuth) -> BackendResult<pb::SyncStatusOut> {
|
||||||
|
// any local changes mean we can skip the network round-trip
|
||||||
|
let req = self.with_col(|col| col.get_local_sync_status())?;
|
||||||
|
if req != pb::sync_status_out::Required::NoChanges {
|
||||||
|
return Ok(req.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// return cached server response if only a short time has elapsed
|
||||||
|
{
|
||||||
|
let guard = self.state.lock().unwrap();
|
||||||
|
if guard.remote_sync_status.last_check.elapsed_secs() < 300 {
|
||||||
|
return Ok(guard.remote_sync_status.last_response.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetch and cache result
|
||||||
|
let rt = self.runtime_handle();
|
||||||
|
let remote: SyncMeta = rt.block_on(get_remote_sync_meta(input.into()))?;
|
||||||
|
let response = self.with_col(|col| col.get_sync_status(remote).map(Into::into))?;
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut guard = self.state.lock().unwrap();
|
||||||
|
guard.remote_sync_status.last_check = TimestampSecs::now();
|
||||||
|
guard.remote_sync_status.last_response = response;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(response.into())
|
||||||
|
}
|
||||||
|
|
||||||
fn sync_collection_inner(
|
fn sync_collection_inner(
|
||||||
&mut self,
|
&mut self,
|
||||||
input: pb::SyncAuth,
|
input: pb::SyncAuth,
|
||||||
check_only: bool,
|
|
||||||
) -> BackendResult<pb::SyncCollectionOut> {
|
) -> BackendResult<pb::SyncCollectionOut> {
|
||||||
let (abort_handle, abort_reg) = AbortHandle::new_pair();
|
let (abort_handle, abort_reg) = AbortHandle::new_pair();
|
||||||
self.sync_abort = Some(abort_handle);
|
self.sync_abort = Some(abort_handle);
|
||||||
|
@ -1277,39 +1327,36 @@ impl Backend {
|
||||||
let input_copy = input.clone();
|
let input_copy = input.clone();
|
||||||
|
|
||||||
let ret = self.with_col(|col| {
|
let ret = self.with_col(|col| {
|
||||||
let result = if check_only {
|
let mut handler = self.new_progress_handler();
|
||||||
let sync_fut = col.get_sync_status(input.into());
|
let progress_fn = move |progress: NormalSyncProgress, throttle: bool| {
|
||||||
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
handler.update(progress, throttle);
|
||||||
rt.block_on(abortable_sync)
|
|
||||||
} else {
|
|
||||||
let mut handler = self.new_progress_handler();
|
|
||||||
let progress_fn = move |progress: NormalSyncProgress, throttle: bool| {
|
|
||||||
handler.update(progress, throttle);
|
|
||||||
};
|
|
||||||
|
|
||||||
let sync_fut = col.normal_sync(input.into(), progress_fn);
|
|
||||||
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
|
||||||
rt.block_on(abortable_sync)
|
|
||||||
};
|
};
|
||||||
match result {
|
|
||||||
|
let sync_fut = col.normal_sync(input.into(), progress_fn);
|
||||||
|
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
||||||
|
|
||||||
|
match rt.block_on(abortable_sync) {
|
||||||
Ok(sync_result) => sync_result,
|
Ok(sync_result) => sync_result,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
// if the user aborted, we'll need to clean up the transaction
|
// if the user aborted, we'll need to clean up the transaction
|
||||||
if !check_only {
|
col.storage.rollback_trx()?;
|
||||||
col.storage.rollback_trx()?;
|
// and tell AnkiWeb to clean up
|
||||||
// and tell AnkiWeb to clean up
|
let _handle = std::thread::spawn(move || {
|
||||||
let _handle = std::thread::spawn(move || {
|
let _ = rt.block_on(sync_abort(input_copy.hkey, input_copy.host_number));
|
||||||
let _ =
|
});
|
||||||
rt.block_on(sync_abort(input_copy.hkey, input_copy.host_number));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(AnkiError::Interrupted)
|
Err(AnkiError::Interrupted)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
self.sync_abort = None;
|
self.sync_abort = None;
|
||||||
|
|
||||||
let output: SyncOutput = ret?;
|
let output: SyncOutput = ret?;
|
||||||
|
self.state
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.remote_sync_status
|
||||||
|
.update(output.required.into());
|
||||||
Ok(output.into())
|
Ok(output.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1361,7 +1408,16 @@ impl Backend {
|
||||||
)?);
|
)?);
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(sync_result) => sync_result,
|
Ok(sync_result) => {
|
||||||
|
if sync_result.is_ok() {
|
||||||
|
self.state
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.remote_sync_status
|
||||||
|
.update(sync_status_out::Required::NoChanges);
|
||||||
|
}
|
||||||
|
sync_result
|
||||||
|
}
|
||||||
Err(_) => Err(AnkiError::Interrupted),
|
Err(_) => Err(AnkiError::Interrupted),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -307,6 +307,15 @@ impl SqliteStorage {
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_last_sync(&self) -> Result<TimestampMillis> {
|
||||||
|
self.db
|
||||||
|
.prepare_cached("select ls from col")?
|
||||||
|
.query_and_then(NO_PARAMS, |r| r.get(0))?
|
||||||
|
.next()
|
||||||
|
.ok_or_else(|| AnkiError::invalid_input("missing col"))?
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn set_last_sync(&self, stamp: TimestampMillis) -> Result<()> {
|
pub(crate) fn set_last_sync(&self, stamp: TimestampMillis) -> Result<()> {
|
||||||
self.db
|
self.db
|
||||||
.prepare("update col set ls = ?")?
|
.prepare("update col set ls = ?")?
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
mod http_client;
|
mod http_client;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
backend_proto::{sync_status_out, SyncStatusOut},
|
||||||
card::{Card, CardQueue, CardType},
|
card::{Card, CardQueue, CardType},
|
||||||
deckconf::DeckConfSchema11,
|
deckconf::DeckConfSchema11,
|
||||||
decks::DeckSchema11,
|
decks::DeckSchema11,
|
||||||
|
@ -211,7 +212,7 @@ pub struct FullSyncProgress {
|
||||||
pub total_bytes: usize,
|
pub total_bytes: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Debug)]
|
#[derive(PartialEq, Debug, Clone, Copy)]
|
||||||
pub enum SyncActionRequired {
|
pub enum SyncActionRequired {
|
||||||
NoChanges,
|
NoChanges,
|
||||||
FullSyncRequired { upload_ok: bool, download_ok: bool },
|
FullSyncRequired { upload_ok: bool, download_ok: bool },
|
||||||
|
@ -262,6 +263,35 @@ impl Usn {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl SyncMeta {
|
||||||
|
fn compared_to_remote(&self, remote: SyncMeta) -> SyncState {
|
||||||
|
let local = self;
|
||||||
|
let required = if remote.modified == local.modified {
|
||||||
|
SyncActionRequired::NoChanges
|
||||||
|
} else if remote.schema != local.schema {
|
||||||
|
let upload_ok = !local.empty || remote.empty;
|
||||||
|
let download_ok = !remote.empty || local.empty;
|
||||||
|
SyncActionRequired::FullSyncRequired {
|
||||||
|
upload_ok,
|
||||||
|
download_ok,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
SyncActionRequired::NormalSyncRequired
|
||||||
|
};
|
||||||
|
|
||||||
|
SyncState {
|
||||||
|
required,
|
||||||
|
local_is_newer: local.modified > remote.modified,
|
||||||
|
usn_at_last_sync: local.usn,
|
||||||
|
latest_usn: remote.usn,
|
||||||
|
pending_usn: Usn(-1),
|
||||||
|
new_usn: Some(remote.usn),
|
||||||
|
server_message: remote.server_message,
|
||||||
|
host_number: remote.host_number,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<F> NormalSyncer<'_, F>
|
impl<F> NormalSyncer<'_, F>
|
||||||
where
|
where
|
||||||
F: FnMut(NormalSyncProgress, bool),
|
F: FnMut(NormalSyncProgress, bool),
|
||||||
|
@ -338,29 +368,7 @@ where
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let required = if remote.modified == local.modified {
|
Ok(local.compared_to_remote(remote))
|
||||||
SyncActionRequired::NoChanges
|
|
||||||
} else if remote.schema != local.schema {
|
|
||||||
let upload_ok = !local.empty || remote.empty;
|
|
||||||
let download_ok = !remote.empty || local.empty;
|
|
||||||
SyncActionRequired::FullSyncRequired {
|
|
||||||
upload_ok,
|
|
||||||
download_ok,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
SyncActionRequired::NormalSyncRequired
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(SyncState {
|
|
||||||
required,
|
|
||||||
local_is_newer: local.modified > remote.modified,
|
|
||||||
usn_at_last_sync: local.usn,
|
|
||||||
latest_usn: remote.usn,
|
|
||||||
pending_usn: Usn(-1),
|
|
||||||
new_usn: Some(remote.usn),
|
|
||||||
server_message: remote.server_message,
|
|
||||||
host_number: remote.host_number,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sync. Caller must have created a transaction, and should call
|
/// Sync. Caller must have created a transaction, and should call
|
||||||
|
@ -593,12 +601,29 @@ pub async fn sync_abort(hkey: String, host_number: u32) -> Result<()> {
|
||||||
remote.abort().await
|
remote.abort().await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn get_remote_sync_meta(auth: SyncAuth) -> Result<SyncMeta> {
|
||||||
|
let remote = HTTPSyncClient::new(Some(auth.hkey), auth.host_number);
|
||||||
|
remote.meta().await
|
||||||
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
pub async fn get_sync_status(&mut self, auth: SyncAuth) -> Result<SyncOutput> {
|
pub fn get_local_sync_status(&mut self) -> Result<sync_status_out::Required> {
|
||||||
NormalSyncer::new(self, auth, |_p, _t| ())
|
let last_sync = self.storage.get_last_sync()?;
|
||||||
.get_sync_state()
|
let schema_mod = self.storage.get_schema_mtime()?;
|
||||||
.await
|
let normal_mod = self.storage.get_modified_time()?;
|
||||||
.map(Into::into)
|
let required = if schema_mod > last_sync {
|
||||||
|
sync_status_out::Required::FullSync
|
||||||
|
} else if normal_mod > last_sync {
|
||||||
|
sync_status_out::Required::NormalSync
|
||||||
|
} else {
|
||||||
|
sync_status_out::Required::NoChanges
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(required)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_sync_status(&self, remote: SyncMeta) -> Result<sync_status_out::Required> {
|
||||||
|
Ok(self.sync_meta()?.compared_to_remote(remote).required.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn normal_sync<F>(&mut self, auth: SyncAuth, progress_fn: F) -> Result<SyncOutput>
|
pub async fn normal_sync<F>(&mut self, auth: SyncAuth, progress_fn: F) -> Result<SyncOutput>
|
||||||
|
@ -1130,6 +1155,22 @@ impl From<SyncState> for SyncOutput {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<sync_status_out::Required> for SyncStatusOut {
|
||||||
|
fn from(r: sync_status_out::Required) -> Self {
|
||||||
|
SyncStatusOut { required: r.into() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SyncActionRequired> for sync_status_out::Required {
|
||||||
|
fn from(r: SyncActionRequired) -> Self {
|
||||||
|
match r {
|
||||||
|
SyncActionRequired::NoChanges => sync_status_out::Required::NoChanges,
|
||||||
|
SyncActionRequired::FullSyncRequired { .. } => sync_status_out::Required::FullSync,
|
||||||
|
SyncActionRequired::NormalSyncRequired => sync_status_out::Required::NormalSync,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -1246,8 +1287,9 @@ mod test {
|
||||||
// col1.storage.set_creation_stamp(TimestampSecs(12345))?;
|
// col1.storage.set_creation_stamp(TimestampSecs(12345))?;
|
||||||
|
|
||||||
// and sync our changes
|
// and sync our changes
|
||||||
let out: SyncOutput = col1.get_sync_status(ctx.auth.clone()).await?;
|
let remote = get_remote_sync_meta(ctx.auth.clone()).await?;
|
||||||
assert_eq!(out.required, SyncActionRequired::NormalSyncRequired);
|
let out = col1.get_sync_status(remote)?;
|
||||||
|
assert_eq!(out, sync_status_out::Required::NormalSync);
|
||||||
|
|
||||||
let out: SyncOutput = col1.normal_sync(ctx.auth.clone(), norm_progress).await?;
|
let out: SyncOutput = col1.normal_sync(ctx.auth.clone(), norm_progress).await?;
|
||||||
assert_eq!(out.required, SyncActionRequired::NoChanges);
|
assert_eq!(out.required, SyncActionRequired::NoChanges);
|
||||||
|
|
|
@ -13,6 +13,10 @@ impl TimestampSecs {
|
||||||
pub fn now() -> Self {
|
pub fn now() -> Self {
|
||||||
Self(elapsed().as_secs() as i64)
|
Self(elapsed().as_secs() as i64)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn elapsed_secs(self) -> u64 {
|
||||||
|
(Self::now().0 - self.0).max(0) as u64
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TimestampMillis {
|
impl TimestampMillis {
|
||||||
|
|
Loading…
Reference in a new issue