mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
Change Anki's version scheme; bump to 23.09 (#2640)
* Accept iterables as inputs to backend methods * Shift add-on check to backend; use new endpoint The new endpoint will return info on a suitable branch if found, instead of returning all branches. This simplifies the frontend code, and means that you can now drop support for certain versions without it also remotely disabling the add-on for people who are running one of the excluded versions, like in https://forums.ankiweb.net/t/prevent-add-ons-from-being-disabled-remote-stealthily-surreptitiously/33427 * Bump version to 23.09 This changes Anki's version numbering system to year.month.patch, as previously mentioned on https://forums.ankiweb.net/t/use-a-different-versioning-system-semver-perhaps/20046/5 This is shaping up to be a big release, with the introduction of FSRS and image occlusion, and it seems like a good time to be finally updating the version scheme as well. AnkiWeb has been updated to understand the new format, and add-on authors will now specify version compatibility using the full version number, as can be seen here: https://ankiweb.net/shared/info/3918629684 * Shift update check to backend, and tidy up update.py * Use the shared client for sync connections too
This commit is contained in:
parent
6d34d19808
commit
ffd392de21
23 changed files with 336 additions and 277 deletions
2
.version
2
.version
|
@ -1 +1 @@
|
||||||
2.1.66
|
23.09
|
||||||
|
|
49
proto/anki/ankiweb.proto
Normal file
49
proto/anki/ankiweb.proto
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
option java_multiple_files = true;
|
||||||
|
|
||||||
|
package anki.ankiweb;
|
||||||
|
|
||||||
|
service AnkiwebService {}
|
||||||
|
|
||||||
|
service BackendAnkiwebService {
|
||||||
|
// Fetch info on add-ons from AnkiWeb. A maximum of 25 can be queried at one
|
||||||
|
// time. If an add-on doesn't have a branch compatible with the provided
|
||||||
|
// version, that add-on will not be included in the returned list.
|
||||||
|
rpc GetAddonInfo(GetAddonInfoRequest) returns (GetAddonInfoResponse);
|
||||||
|
rpc CheckForUpdate(CheckForUpdateRequest) returns (CheckForUpdateResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
message GetAddonInfoRequest {
|
||||||
|
uint32 client_version = 1;
|
||||||
|
repeated uint32 addon_ids = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message GetAddonInfoResponse {
|
||||||
|
repeated AddonInfo info = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message AddonInfo {
|
||||||
|
uint32 id = 1;
|
||||||
|
int64 modified = 2;
|
||||||
|
uint32 min_version = 3;
|
||||||
|
uint32 max_version = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message CheckForUpdateRequest {
|
||||||
|
uint32 version = 1;
|
||||||
|
string buildhash = 2;
|
||||||
|
string os = 3;
|
||||||
|
int64 install_id = 4;
|
||||||
|
uint32 last_message_id = 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
message CheckForUpdateResponse {
|
||||||
|
optional string new_version = 1;
|
||||||
|
int64 current_time = 2;
|
||||||
|
optional string message = 3;
|
||||||
|
uint32 last_message_id = 4;
|
||||||
|
}
|
|
@ -6,6 +6,7 @@ from __future__ import annotations
|
||||||
from typing import Any, Generator, Literal, Sequence, Union, cast
|
from typing import Any, Generator, Literal, Sequence, Union, cast
|
||||||
|
|
||||||
from anki import (
|
from anki import (
|
||||||
|
ankiweb_pb2,
|
||||||
card_rendering_pb2,
|
card_rendering_pb2,
|
||||||
collection_pb2,
|
collection_pb2,
|
||||||
config_pb2,
|
config_pb2,
|
||||||
|
@ -45,6 +46,8 @@ TtsVoice = card_rendering_pb2.AllTtsVoicesResponse.TtsVoice
|
||||||
GetImageForOcclusionResponse = image_occlusion_pb2.GetImageForOcclusionResponse
|
GetImageForOcclusionResponse = image_occlusion_pb2.GetImageForOcclusionResponse
|
||||||
AddImageOcclusionNoteRequest = image_occlusion_pb2.AddImageOcclusionNoteRequest
|
AddImageOcclusionNoteRequest = image_occlusion_pb2.AddImageOcclusionNoteRequest
|
||||||
GetImageOcclusionNoteResponse = image_occlusion_pb2.GetImageOcclusionNoteResponse
|
GetImageOcclusionNoteResponse = image_occlusion_pb2.GetImageOcclusionNoteResponse
|
||||||
|
AddonInfo = ankiweb_pb2.AddonInfo
|
||||||
|
CheckForUpdateResponse = ankiweb_pb2.CheckForUpdateResponse
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -304,14 +304,29 @@ def version_with_build() -> str:
|
||||||
return f"{version} ({buildhash})"
|
return f"{version} ({buildhash})"
|
||||||
|
|
||||||
|
|
||||||
def point_version() -> int:
|
def int_version() -> int:
|
||||||
|
"""Anki's version as an integer in the form YYMMPP, e.g. 230900.
|
||||||
|
(year, month, patch).
|
||||||
|
In 2.1.x releases, this was just the last number."""
|
||||||
from anki.buildinfo import version
|
from anki.buildinfo import version
|
||||||
|
|
||||||
return int(version.rsplit(".", maxsplit=1)[-1])
|
try:
|
||||||
|
[year, month, patch] = version.split(".")
|
||||||
|
except ValueError:
|
||||||
|
[year, month] = version.split(".")
|
||||||
|
patch = "0"
|
||||||
|
|
||||||
|
year_num = int(year)
|
||||||
|
month_num = int(month)
|
||||||
|
patch_num = int(patch)
|
||||||
|
|
||||||
|
return year_num * 10_000 + month_num * 100 + patch_num
|
||||||
|
|
||||||
|
|
||||||
# keep the legacy alias around without a deprecation warning for now
|
# these two legacy aliases are provided without deprecation warnings, as add-ons that want to support
|
||||||
pointVersion = point_version
|
# old versions could not use the new name without catching cases where it doesn't exist
|
||||||
|
point_version = int_version
|
||||||
|
pointVersion = int_version
|
||||||
|
|
||||||
_deprecated_names = DeprecatedNamesMixinForModule(globals())
|
_deprecated_names = DeprecatedNamesMixinForModule(globals())
|
||||||
_deprecated_names.register_deprecated_aliases(
|
_deprecated_names.register_deprecated_aliases(
|
||||||
|
|
195
qt/aqt/addons.py
195
qt/aqt/addons.py
|
@ -14,7 +14,7 @@ from concurrent.futures import Future
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import IO, Any, Callable, Iterable, Union
|
from typing import IO, Any, Callable, Iterable, Sequence, Union
|
||||||
from urllib.parse import parse_qs, urlparse
|
from urllib.parse import parse_qs, urlparse
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
|
@ -28,6 +28,7 @@ import anki.utils
|
||||||
import aqt
|
import aqt
|
||||||
import aqt.forms
|
import aqt.forms
|
||||||
import aqt.main
|
import aqt.main
|
||||||
|
from anki.collection import AddonInfo
|
||||||
from anki.httpclient import HttpClient
|
from anki.httpclient import HttpClient
|
||||||
from anki.lang import without_unicode_isolation
|
from anki.lang import without_unicode_isolation
|
||||||
from aqt import gui_hooks
|
from aqt import gui_hooks
|
||||||
|
@ -92,18 +93,9 @@ class DownloadError:
|
||||||
DownloadLogEntry = tuple[int, Union[DownloadError, InstallError, InstallOk]]
|
DownloadLogEntry = tuple[int, Union[DownloadError, InstallError, InstallOk]]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class UpdateInfo:
|
|
||||||
id: int
|
|
||||||
suitable_branch_last_modified: int
|
|
||||||
current_branch_last_modified: int
|
|
||||||
current_branch_min_point_ver: int
|
|
||||||
current_branch_max_point_ver: int
|
|
||||||
|
|
||||||
|
|
||||||
ANKIWEB_ID_RE = re.compile(r"^\d+$")
|
ANKIWEB_ID_RE = re.compile(r"^\d+$")
|
||||||
|
|
||||||
current_point_version = anki.utils.point_version()
|
_current_version = anki.utils.int_version()
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -113,8 +105,8 @@ class AddonMeta:
|
||||||
enabled: bool
|
enabled: bool
|
||||||
installed_at: int
|
installed_at: int
|
||||||
conflicts: list[str]
|
conflicts: list[str]
|
||||||
min_point_version: int
|
min_version: int
|
||||||
max_point_version: int
|
max_version: int
|
||||||
branch_index: int
|
branch_index: int
|
||||||
human_version: str | None
|
human_version: str | None
|
||||||
update_enabled: bool
|
update_enabled: bool
|
||||||
|
@ -131,11 +123,11 @@ class AddonMeta:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def compatible(self) -> bool:
|
def compatible(self) -> bool:
|
||||||
min = self.min_point_version
|
min = self.min_version
|
||||||
if min is not None and current_point_version < min:
|
if min is not None and _current_version < min:
|
||||||
return False
|
return False
|
||||||
max = self.max_point_version
|
max = self.max_version
|
||||||
if max is not None and max < 0 and current_point_version > abs(max):
|
if max is not None and max < 0 and _current_version > abs(max):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -155,8 +147,8 @@ class AddonMeta:
|
||||||
enabled=not json_meta.get("disabled"),
|
enabled=not json_meta.get("disabled"),
|
||||||
installed_at=json_meta.get("mod", 0),
|
installed_at=json_meta.get("mod", 0),
|
||||||
conflicts=json_meta.get("conflicts", []),
|
conflicts=json_meta.get("conflicts", []),
|
||||||
min_point_version=json_meta.get("min_point_version", 0) or 0,
|
min_version=json_meta.get("min_point_version", 0) or 0,
|
||||||
max_point_version=json_meta.get("max_point_version", 0) or 0,
|
max_version=json_meta.get("max_point_version", 0) or 0,
|
||||||
branch_index=json_meta.get("branch_index", 0) or 0,
|
branch_index=json_meta.get("branch_index", 0) or 0,
|
||||||
human_version=json_meta.get("human_version"),
|
human_version=json_meta.get("human_version"),
|
||||||
update_enabled=json_meta.get("update_enabled", True),
|
update_enabled=json_meta.get("update_enabled", True),
|
||||||
|
@ -191,9 +183,10 @@ class AddonManager:
|
||||||
"mod": {"type": "number", "meta": True},
|
"mod": {"type": "number", "meta": True},
|
||||||
# a list of other packages that conflict
|
# a list of other packages that conflict
|
||||||
"conflicts": {"type": "array", "items": {"type": "string"}, "meta": True},
|
"conflicts": {"type": "array", "items": {"type": "string"}, "meta": True},
|
||||||
# the minimum 2.1.x version this add-on supports
|
# x for anki 2.1.x; int_version() for more recent releases
|
||||||
"min_point_version": {"type": "number", "meta": True},
|
"min_point_version": {"type": "number", "meta": True},
|
||||||
# if negative, abs(n) is the maximum 2.1.x version this add-on supports
|
# x for anki 2.1.x; int_version() for more recent releases
|
||||||
|
# if negative, abs(n) is the maximum version this add-on supports
|
||||||
# if positive, indicates version tested on, and is ignored
|
# if positive, indicates version tested on, and is ignored
|
||||||
"max_point_version": {"type": "number", "meta": True},
|
"max_point_version": {"type": "number", "meta": True},
|
||||||
# AnkiWeb sends this to indicate which branch the user downloaded.
|
# AnkiWeb sends this to indicate which branch the user downloaded.
|
||||||
|
@ -280,8 +273,8 @@ class AddonManager:
|
||||||
json_obj["disabled"] = not addon.enabled
|
json_obj["disabled"] = not addon.enabled
|
||||||
json_obj["mod"] = addon.installed_at
|
json_obj["mod"] = addon.installed_at
|
||||||
json_obj["conflicts"] = addon.conflicts
|
json_obj["conflicts"] = addon.conflicts
|
||||||
json_obj["max_point_version"] = addon.max_point_version
|
json_obj["max_point_version"] = addon.max_version
|
||||||
json_obj["min_point_version"] = addon.min_point_version
|
json_obj["min_point_version"] = addon.min_version
|
||||||
json_obj["branch_index"] = addon.branch_index
|
json_obj["branch_index"] = addon.branch_index
|
||||||
if addon.human_version is not None:
|
if addon.human_version is not None:
|
||||||
json_obj["human_version"] = addon.human_version
|
json_obj["human_version"] = addon.human_version
|
||||||
|
@ -552,60 +545,39 @@ class AddonManager:
|
||||||
# Updating
|
# Updating
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
||||||
def extract_update_info(self, items: list[dict]) -> list[UpdateInfo]:
|
def update_supported_versions(self, items: list[AddonInfo]) -> None:
|
||||||
def extract_one(item: dict) -> UpdateInfo:
|
"""Adjust the supported version range after an update check.
|
||||||
id = item["id"]
|
|
||||||
meta = self.addon_meta(str(id))
|
|
||||||
branch_idx = meta.branch_index
|
|
||||||
return extract_update_info(current_point_version, branch_idx, item)
|
|
||||||
|
|
||||||
return list(map(extract_one, items))
|
AnkiWeb will not have sent us any add-ons that don't support our
|
||||||
|
version, so this cannot disable add-ons that users are using. It
|
||||||
|
does allow the add-on author to mark an add-on as not supporting
|
||||||
|
a future release, causing the add-on to be disabled when the user
|
||||||
|
upgrades.
|
||||||
|
"""
|
||||||
|
|
||||||
def update_supported_versions(self, items: list[UpdateInfo]) -> None:
|
|
||||||
for item in items:
|
for item in items:
|
||||||
self.update_supported_version(item)
|
addon = self.addon_meta(str(item.id))
|
||||||
|
updated = False
|
||||||
|
|
||||||
def update_supported_version(self, item: UpdateInfo) -> None:
|
if addon.max_version != item.max_version:
|
||||||
addon = self.addon_meta(str(item.id))
|
addon.max_version = item.max_version
|
||||||
updated = False
|
|
||||||
is_latest = addon.is_latest(item.current_branch_last_modified)
|
|
||||||
|
|
||||||
# if max different to the stored value
|
|
||||||
cur_max = item.current_branch_max_point_ver
|
|
||||||
if addon.max_point_version != cur_max:
|
|
||||||
if is_latest:
|
|
||||||
addon.max_point_version = cur_max
|
|
||||||
updated = True
|
updated = True
|
||||||
else:
|
if addon.min_version != item.min_version:
|
||||||
# user is not up to date; only update if new version is stricter
|
addon.min_version = item.min_version
|
||||||
if cur_max is not None and cur_max < addon.max_point_version:
|
|
||||||
addon.max_point_version = cur_max
|
|
||||||
updated = True
|
|
||||||
|
|
||||||
# if min different to the stored value
|
|
||||||
cur_min = item.current_branch_min_point_ver
|
|
||||||
if addon.min_point_version != cur_min:
|
|
||||||
if is_latest:
|
|
||||||
addon.min_point_version = cur_min
|
|
||||||
updated = True
|
updated = True
|
||||||
else:
|
|
||||||
# user is not up to date; only update if new version is stricter
|
|
||||||
if cur_min is not None and cur_min > addon.min_point_version:
|
|
||||||
addon.min_point_version = cur_min
|
|
||||||
updated = True
|
|
||||||
|
|
||||||
if updated:
|
if updated:
|
||||||
self.write_addon_meta(addon)
|
self.write_addon_meta(addon)
|
||||||
|
|
||||||
def updates_required(self, items: list[UpdateInfo]) -> list[UpdateInfo]:
|
def get_updated_addons(self, items: list[AddonInfo]) -> list[AddonInfo]:
|
||||||
"""Return ids of add-ons requiring an update."""
|
"""Return ids of add-ons requiring an update."""
|
||||||
need_update = []
|
need_update = []
|
||||||
for item in items:
|
for item in items:
|
||||||
addon = self.addon_meta(str(item.id))
|
addon = self.addon_meta(str(item.id))
|
||||||
# update if server mtime is newer
|
# update if server mtime is newer
|
||||||
if not addon.is_latest(item.suitable_branch_last_modified):
|
if not addon.is_latest(item.modified):
|
||||||
need_update.append(item)
|
need_update.append(item)
|
||||||
elif not addon.compatible() and item.suitable_branch_last_modified > 0:
|
elif not addon.compatible():
|
||||||
# Addon is currently disabled, and a suitable branch was found on the
|
# Addon is currently disabled, and a suitable branch was found on the
|
||||||
# server. Ignore our stored mtime (which may have been set incorrectly
|
# server. Ignore our stored mtime (which may have been set incorrectly
|
||||||
# in the past) and require an update.
|
# in the past) and require an update.
|
||||||
|
@ -808,11 +780,11 @@ class AddonsDialog(QDialog):
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def compatible_string(self, addon: AddonMeta) -> str:
|
def compatible_string(self, addon: AddonMeta) -> str:
|
||||||
min = addon.min_point_version
|
min = addon.min_version
|
||||||
if min is not None and min > current_point_version:
|
if min is not None and min > _current_version:
|
||||||
return f"Anki >= 2.1.{min}"
|
return f"Anki >= 2.1.{min}"
|
||||||
else:
|
else:
|
||||||
max = abs(addon.max_point_version)
|
max = abs(addon.max_version)
|
||||||
return f"Anki <= 2.1.{max}"
|
return f"Anki <= 2.1.{max}"
|
||||||
|
|
||||||
def should_grey(self, addon: AddonMeta) -> bool:
|
def should_grey(self, addon: AddonMeta) -> bool:
|
||||||
|
@ -1020,9 +992,7 @@ class GetAddons(QDialog):
|
||||||
def download_addon(client: HttpClient, id: int) -> DownloadOk | DownloadError:
|
def download_addon(client: HttpClient, id: int) -> DownloadOk | DownloadError:
|
||||||
"Fetch a single add-on from AnkiWeb."
|
"Fetch a single add-on from AnkiWeb."
|
||||||
try:
|
try:
|
||||||
resp = client.get(
|
resp = client.get(f"{aqt.appShared}download/{id}?v=2.1&p={_current_version}")
|
||||||
f"{aqt.appShared}download/{id}?v=2.1&p={current_point_version}"
|
|
||||||
)
|
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
return DownloadError(status_code=resp.status_code)
|
return DownloadError(status_code=resp.status_code)
|
||||||
|
|
||||||
|
@ -1230,13 +1200,11 @@ class ChooseAddonsToUpdateList(QListWidget):
|
||||||
self,
|
self,
|
||||||
parent: QWidget,
|
parent: QWidget,
|
||||||
mgr: AddonManager,
|
mgr: AddonManager,
|
||||||
updated_addons: list[UpdateInfo],
|
updated_addons: list[AddonInfo],
|
||||||
) -> None:
|
) -> None:
|
||||||
QListWidget.__init__(self, parent)
|
QListWidget.__init__(self, parent)
|
||||||
self.mgr = mgr
|
self.mgr = mgr
|
||||||
self.updated_addons = sorted(
|
self.updated_addons = sorted(updated_addons, key=lambda addon: addon.modified)
|
||||||
updated_addons, key=lambda addon: addon.suitable_branch_last_modified
|
|
||||||
)
|
|
||||||
self.ignore_check_evt = False
|
self.ignore_check_evt = False
|
||||||
self.setup()
|
self.setup()
|
||||||
self.setContextMenuPolicy(Qt.ContextMenuPolicy.CustomContextMenu)
|
self.setContextMenuPolicy(Qt.ContextMenuPolicy.CustomContextMenu)
|
||||||
|
@ -1256,7 +1224,7 @@ class ChooseAddonsToUpdateList(QListWidget):
|
||||||
addon_meta = self.mgr.addon_meta(str(addon_id))
|
addon_meta = self.mgr.addon_meta(str(addon_id))
|
||||||
update_enabled = addon_meta.update_enabled
|
update_enabled = addon_meta.update_enabled
|
||||||
addon_name = addon_meta.human_name()
|
addon_name = addon_meta.human_name()
|
||||||
update_timestamp = update_info.suitable_branch_last_modified
|
update_timestamp = update_info.modified
|
||||||
update_time = datetime.fromtimestamp(update_timestamp)
|
update_time = datetime.fromtimestamp(update_timestamp)
|
||||||
|
|
||||||
addon_label = f"{update_time:%Y-%m-%d} {addon_name}"
|
addon_label = f"{update_time:%Y-%m-%d} {addon_name}"
|
||||||
|
@ -1344,7 +1312,7 @@ class ChooseAddonsToUpdateList(QListWidget):
|
||||||
|
|
||||||
class ChooseAddonsToUpdateDialog(QDialog):
|
class ChooseAddonsToUpdateDialog(QDialog):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, parent: QWidget, mgr: AddonManager, updated_addons: list[UpdateInfo]
|
self, parent: QWidget, mgr: AddonManager, updated_addons: list[AddonInfo]
|
||||||
) -> None:
|
) -> None:
|
||||||
QDialog.__init__(self, parent)
|
QDialog.__init__(self, parent)
|
||||||
self.setWindowTitle(tr.addons_choose_update_window_title())
|
self.setWindowTitle(tr.addons_choose_update_window_title())
|
||||||
|
@ -1386,32 +1354,25 @@ class ChooseAddonsToUpdateDialog(QDialog):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def fetch_update_info(client: HttpClient, ids: list[int]) -> list[dict]:
|
def fetch_update_info(ids: list[int]) -> list[AddonInfo]:
|
||||||
"""Fetch update info from AnkiWeb in one or more batches."""
|
"""Fetch update info from AnkiWeb in one or more batches."""
|
||||||
all_info: list[dict] = []
|
all_info: list[AddonInfo] = []
|
||||||
|
|
||||||
while ids:
|
while ids:
|
||||||
# get another chunk
|
# get another chunk
|
||||||
chunk = ids[:25]
|
chunk = ids[:25]
|
||||||
del ids[:25]
|
del ids[:25]
|
||||||
|
|
||||||
batch_results = _fetch_update_info_batch(client, map(str, chunk))
|
batch_results = _fetch_update_info_batch(chunk)
|
||||||
all_info.extend(batch_results)
|
all_info.extend(batch_results)
|
||||||
|
|
||||||
return all_info
|
return all_info
|
||||||
|
|
||||||
|
|
||||||
def _fetch_update_info_batch(
|
def _fetch_update_info_batch(chunk: Iterable[int]) -> Sequence[AddonInfo]:
|
||||||
client: HttpClient, chunk: Iterable[str]
|
return aqt.mw.backend.get_addon_info(
|
||||||
) -> Iterable[dict]:
|
client_version=_current_version, addon_ids=chunk
|
||||||
"""Get update info from AnkiWeb.
|
)
|
||||||
|
|
||||||
Chunk must not contain more than 25 ids."""
|
|
||||||
resp = client.get(f"{aqt.appShared}updates/{','.join(chunk)}?v=3")
|
|
||||||
if resp.status_code == 200:
|
|
||||||
return resp.json()
|
|
||||||
else:
|
|
||||||
raise Exception(f"Unexpected response code from AnkiWeb: {resp.status_code}")
|
|
||||||
|
|
||||||
|
|
||||||
def check_and_prompt_for_updates(
|
def check_and_prompt_for_updates(
|
||||||
|
@ -1420,19 +1381,17 @@ def check_and_prompt_for_updates(
|
||||||
on_done: Callable[[list[DownloadLogEntry]], None],
|
on_done: Callable[[list[DownloadLogEntry]], None],
|
||||||
requested_by_user: bool = True,
|
requested_by_user: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
def on_updates_received(client: HttpClient, items: list[dict]) -> None:
|
def on_updates_received(items: list[AddonInfo]) -> None:
|
||||||
handle_update_info(parent, mgr, client, items, on_done, requested_by_user)
|
handle_update_info(parent, mgr, items, on_done, requested_by_user)
|
||||||
|
|
||||||
check_for_updates(mgr, on_updates_received)
|
check_for_updates(mgr, on_updates_received)
|
||||||
|
|
||||||
|
|
||||||
def check_for_updates(
|
def check_for_updates(
|
||||||
mgr: AddonManager, on_done: Callable[[HttpClient, list[dict]], None]
|
mgr: AddonManager, on_done: Callable[[list[AddonInfo]], None]
|
||||||
) -> None:
|
) -> None:
|
||||||
client = HttpClient()
|
def check() -> list[AddonInfo]:
|
||||||
|
return fetch_update_info(mgr.ankiweb_addons())
|
||||||
def check() -> list[dict]:
|
|
||||||
return fetch_update_info(client, mgr.ankiweb_addons())
|
|
||||||
|
|
||||||
def update_info_received(future: Future) -> None:
|
def update_info_received(future: Future) -> None:
|
||||||
# if syncing/in profile screen, defer message delivery
|
# if syncing/in profile screen, defer message delivery
|
||||||
|
@ -1451,66 +1410,36 @@ def check_for_updates(
|
||||||
else:
|
else:
|
||||||
result = future.result()
|
result = future.result()
|
||||||
|
|
||||||
on_done(client, result)
|
on_done(result)
|
||||||
|
|
||||||
mgr.mw.taskman.run_in_background(check, update_info_received)
|
mgr.mw.taskman.run_in_background(check, update_info_received)
|
||||||
|
|
||||||
|
|
||||||
def extract_update_info(
|
|
||||||
current_point_version: int, current_branch_idx: int, info_json: dict
|
|
||||||
) -> UpdateInfo:
|
|
||||||
"Process branches to determine the updated mod time and min/max versions."
|
|
||||||
branches = info_json["branches"]
|
|
||||||
try:
|
|
||||||
current = branches[current_branch_idx]
|
|
||||||
except IndexError:
|
|
||||||
current = branches[0]
|
|
||||||
|
|
||||||
last_mod = 0
|
|
||||||
for branch in branches:
|
|
||||||
if branch["minpt"] > current_point_version:
|
|
||||||
continue
|
|
||||||
if branch["maxpt"] < 0 and abs(branch["maxpt"]) < current_point_version:
|
|
||||||
continue
|
|
||||||
last_mod = branch["fmod"]
|
|
||||||
|
|
||||||
return UpdateInfo(
|
|
||||||
id=info_json["id"],
|
|
||||||
suitable_branch_last_modified=last_mod,
|
|
||||||
current_branch_last_modified=current["fmod"],
|
|
||||||
current_branch_min_point_ver=current["minpt"],
|
|
||||||
current_branch_max_point_ver=current["maxpt"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_update_info(
|
def handle_update_info(
|
||||||
parent: QWidget,
|
parent: QWidget,
|
||||||
mgr: AddonManager,
|
mgr: AddonManager,
|
||||||
client: HttpClient,
|
items: list[AddonInfo],
|
||||||
items: list[dict],
|
|
||||||
on_done: Callable[[list[DownloadLogEntry]], None],
|
on_done: Callable[[list[DownloadLogEntry]], None],
|
||||||
requested_by_user: bool = True,
|
requested_by_user: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
update_info = mgr.extract_update_info(items)
|
mgr.update_supported_versions(items)
|
||||||
mgr.update_supported_versions(update_info)
|
updated_addons = mgr.get_updated_addons(items)
|
||||||
|
|
||||||
updated_addons = mgr.updates_required(update_info)
|
|
||||||
|
|
||||||
if not updated_addons:
|
if not updated_addons:
|
||||||
on_done([])
|
on_done([])
|
||||||
return
|
return
|
||||||
|
|
||||||
prompt_to_update(parent, mgr, client, updated_addons, on_done, requested_by_user)
|
prompt_to_update(parent, mgr, updated_addons, on_done, requested_by_user)
|
||||||
|
|
||||||
|
|
||||||
def prompt_to_update(
|
def prompt_to_update(
|
||||||
parent: QWidget,
|
parent: QWidget,
|
||||||
mgr: AddonManager,
|
mgr: AddonManager,
|
||||||
client: HttpClient,
|
updated_addons: list[AddonInfo],
|
||||||
updated_addons: list[UpdateInfo],
|
|
||||||
on_done: Callable[[list[DownloadLogEntry]], None],
|
on_done: Callable[[list[DownloadLogEntry]], None],
|
||||||
requested_by_user: bool = True,
|
requested_by_user: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
client = HttpClient()
|
||||||
if not requested_by_user:
|
if not requested_by_user:
|
||||||
prompt_update = False
|
prompt_update = False
|
||||||
for addon in updated_addons:
|
for addon in updated_addons:
|
||||||
|
|
|
@ -37,10 +37,10 @@ from anki.utils import (
|
||||||
dev_mode,
|
dev_mode,
|
||||||
ids2str,
|
ids2str,
|
||||||
int_time,
|
int_time,
|
||||||
|
int_version,
|
||||||
is_lin,
|
is_lin,
|
||||||
is_mac,
|
is_mac,
|
||||||
is_win,
|
is_win,
|
||||||
point_version,
|
|
||||||
split_fields,
|
split_fields,
|
||||||
)
|
)
|
||||||
from aqt import gui_hooks
|
from aqt import gui_hooks
|
||||||
|
@ -954,7 +954,7 @@ title="{}" {}>{}</button>""".format(
|
||||||
if on_done:
|
if on_done:
|
||||||
on_done()
|
on_done()
|
||||||
|
|
||||||
if elap > 86_400 or self.pm.last_run_version != point_version():
|
if elap > 86_400 or self.pm.last_run_version != int_version():
|
||||||
check_and_prompt_for_updates(
|
check_and_prompt_for_updates(
|
||||||
self,
|
self,
|
||||||
self.addonManager,
|
self.addonManager,
|
||||||
|
@ -1401,29 +1401,9 @@ title="{}" {}>{}</button>""".format(
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def setupAutoUpdate(self) -> None:
|
def setupAutoUpdate(self) -> None:
|
||||||
import aqt.update
|
from aqt.update import check_for_update
|
||||||
|
|
||||||
self.autoUpdate = aqt.update.LatestVersionFinder(self)
|
check_for_update()
|
||||||
qconnect(self.autoUpdate.newVerAvail, self.newVerAvail)
|
|
||||||
qconnect(self.autoUpdate.newMsg, self.newMsg)
|
|
||||||
qconnect(self.autoUpdate.clockIsOff, self.clockIsOff)
|
|
||||||
self.autoUpdate.start()
|
|
||||||
|
|
||||||
def newVerAvail(self, ver: str) -> None:
|
|
||||||
if self.pm.meta.get("suppressUpdate", None) != ver:
|
|
||||||
aqt.update.askAndUpdate(self, ver)
|
|
||||||
|
|
||||||
def newMsg(self, data: dict) -> None:
|
|
||||||
aqt.update.showMessages(self, data)
|
|
||||||
|
|
||||||
def clockIsOff(self, diff: int) -> None:
|
|
||||||
if dev_mode:
|
|
||||||
print("clock is off; ignoring")
|
|
||||||
return
|
|
||||||
diffText = tr.qt_misc_second(count=diff)
|
|
||||||
warn = tr.qt_misc_in_order_to_ensure_your_collection(val="%s") % diffText
|
|
||||||
showWarning(warn)
|
|
||||||
self.app.closeAllWindows()
|
|
||||||
|
|
||||||
# Timers
|
# Timers
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
|
@ -20,7 +20,7 @@ from anki.collection import Collection
|
||||||
from anki.db import DB
|
from anki.db import DB
|
||||||
from anki.lang import without_unicode_isolation
|
from anki.lang import without_unicode_isolation
|
||||||
from anki.sync import SyncAuth
|
from anki.sync import SyncAuth
|
||||||
from anki.utils import int_time, is_mac, is_win, point_version
|
from anki.utils import int_time, int_version, is_mac, is_win
|
||||||
from aqt import appHelpSite, gui_hooks
|
from aqt import appHelpSite, gui_hooks
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
from aqt.theme import Theme, WidgetStyle, theme_manager
|
from aqt.theme import Theme, WidgetStyle, theme_manager
|
||||||
|
@ -81,7 +81,7 @@ metaConf = dict(
|
||||||
updates=True,
|
updates=True,
|
||||||
created=int_time(),
|
created=int_time(),
|
||||||
id=random.randrange(0, 2**63),
|
id=random.randrange(0, 2**63),
|
||||||
lastMsg=-1,
|
lastMsg=0,
|
||||||
suppressUpdate=False,
|
suppressUpdate=False,
|
||||||
firstRun=True,
|
firstRun=True,
|
||||||
defaultLang=None,
|
defaultLang=None,
|
||||||
|
@ -134,7 +134,7 @@ class ProfileManager:
|
||||||
res = self._loadMeta()
|
res = self._loadMeta()
|
||||||
self.firstRun = res.firstTime
|
self.firstRun = res.firstTime
|
||||||
self.last_run_version = self.meta.get("last_run_version", self.last_run_version)
|
self.last_run_version = self.meta.get("last_run_version", self.last_run_version)
|
||||||
self.meta["last_run_version"] = point_version()
|
self.meta["last_run_version"] = int_version()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
# -p profile provided on command line.
|
# -p profile provided on command line.
|
||||||
|
|
116
qt/aqt/update.py
116
qt/aqt/update.py
|
@ -1,77 +1,75 @@
|
||||||
# Copyright: Ankitects Pty Ltd and contributors
|
# Copyright: Ankitects Pty Ltd and contributors
|
||||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
import time
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
import aqt
|
import aqt
|
||||||
from anki.utils import plat_desc, version_with_build
|
from anki.buildinfo import buildhash
|
||||||
from aqt.main import AnkiQt
|
from anki.collection import CheckForUpdateResponse, Collection
|
||||||
|
from anki.utils import dev_mode, int_time, int_version, plat_desc
|
||||||
|
from aqt.operations import QueryOp
|
||||||
from aqt.qt import *
|
from aqt.qt import *
|
||||||
from aqt.utils import openLink, showText, tr
|
from aqt.utils import openLink, show_warning, showText, tr
|
||||||
|
|
||||||
|
|
||||||
class LatestVersionFinder(QThread):
|
def check_for_update() -> None:
|
||||||
newVerAvail = pyqtSignal(str)
|
from aqt import mw
|
||||||
newMsg = pyqtSignal(dict)
|
|
||||||
clockIsOff = pyqtSignal(float)
|
|
||||||
|
|
||||||
def __init__(self, main: AnkiQt) -> None:
|
def do_check(_col: Collection) -> CheckForUpdateResponse:
|
||||||
QThread.__init__(self)
|
return mw.backend.check_for_update(
|
||||||
self.main = main
|
version=int_version(),
|
||||||
self.config = main.pm.meta
|
buildhash=buildhash,
|
||||||
|
os=plat_desc(),
|
||||||
|
install_id=mw.pm.meta["id"],
|
||||||
|
last_message_id=max(0, mw.pm.meta["lastMsg"]),
|
||||||
|
)
|
||||||
|
|
||||||
def _data(self) -> dict[str, Any]:
|
def on_done(resp: CheckForUpdateResponse) -> None:
|
||||||
return {
|
# is clock off?
|
||||||
"ver": version_with_build(),
|
if not dev_mode:
|
||||||
"os": plat_desc(),
|
diff = abs(resp.current_time - int_time())
|
||||||
"id": self.config["id"],
|
if diff > 300:
|
||||||
"lm": self.config["lastMsg"],
|
diff_text = tr.qt_misc_second(count=diff)
|
||||||
"crt": self.config["created"],
|
warn = (
|
||||||
}
|
tr.qt_misc_in_order_to_ensure_your_collection(val="%s") % diff_text
|
||||||
|
)
|
||||||
|
show_warning(warn)
|
||||||
|
mw.app.closeAllWindows()
|
||||||
|
return
|
||||||
|
# should we show a message?
|
||||||
|
if msg := resp.message:
|
||||||
|
showText(msg, parent=mw, type="html")
|
||||||
|
mw.pm.meta["lastMsg"] = resp.last_message_id
|
||||||
|
# has Anki been updated?
|
||||||
|
if ver := resp.new_version:
|
||||||
|
prompt_to_update(mw, ver)
|
||||||
|
|
||||||
def run(self) -> None:
|
def on_fail(exc: Exception) -> None:
|
||||||
if not self.config["updates"]:
|
print(f"update check failed: {exc}")
|
||||||
return
|
|
||||||
d = self._data()
|
|
||||||
d["proto"] = 1
|
|
||||||
|
|
||||||
try:
|
QueryOp(parent=mw, op=do_check, success=on_done).failure(
|
||||||
r = requests.post(aqt.appUpdate, data=d, timeout=60)
|
on_fail
|
||||||
r.raise_for_status()
|
).run_in_background()
|
||||||
resp = r.json()
|
|
||||||
except:
|
|
||||||
# behind proxy, corrupt message, etc
|
|
||||||
print("update check failed")
|
|
||||||
return
|
|
||||||
if resp["msg"]:
|
|
||||||
self.newMsg.emit(resp) # type: ignore
|
|
||||||
if resp["ver"]:
|
|
||||||
self.newVerAvail.emit(resp["ver"]) # type: ignore
|
|
||||||
diff = resp["time"] - time.time()
|
|
||||||
if abs(diff) > 300:
|
|
||||||
self.clockIsOff.emit(diff) # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def askAndUpdate(mw: aqt.AnkiQt, ver: str) -> None:
|
def prompt_to_update(mw: aqt.AnkiQt, ver: str) -> None:
|
||||||
baseStr = tr.qt_misc_anki_updatedanki_has_been_released(val=ver)
|
msg = (
|
||||||
msg = QMessageBox(mw)
|
tr.qt_misc_anki_updatedanki_has_been_released(val=ver)
|
||||||
msg.setStandardButtons(QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No) # type: ignore
|
+ tr.qt_misc_would_you_like_to_download_it()
|
||||||
msg.setIcon(QMessageBox.Icon.Information)
|
)
|
||||||
msg.setText(baseStr + tr.qt_misc_would_you_like_to_download_it())
|
|
||||||
|
msgbox = QMessageBox(mw)
|
||||||
|
msgbox.setStandardButtons(
|
||||||
|
QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No
|
||||||
|
)
|
||||||
|
msgbox.setIcon(QMessageBox.Icon.Information)
|
||||||
|
msgbox.setText(msg)
|
||||||
|
|
||||||
button = QPushButton(tr.qt_misc_ignore_this_update())
|
button = QPushButton(tr.qt_misc_ignore_this_update())
|
||||||
msg.addButton(button, QMessageBox.ButtonRole.RejectRole)
|
msgbox.addButton(button, QMessageBox.ButtonRole.RejectRole)
|
||||||
msg.setDefaultButton(QMessageBox.StandardButton.Yes)
|
msgbox.setDefaultButton(QMessageBox.StandardButton.Yes)
|
||||||
ret = msg.exec()
|
ret = msgbox.exec()
|
||||||
if msg.clickedButton() == button:
|
|
||||||
|
if msgbox.clickedButton() == button:
|
||||||
# ignore this update
|
# ignore this update
|
||||||
mw.pm.meta["suppressUpdate"] = ver
|
mw.pm.meta["suppressUpdate"] = ver
|
||||||
elif ret == QMessageBox.StandardButton.Yes:
|
elif ret == QMessageBox.StandardButton.Yes:
|
||||||
openLink(aqt.appWebsiteDownloadSection)
|
openLink(aqt.appWebsiteDownloadSection)
|
||||||
|
|
||||||
|
|
||||||
def showMessages(mw: aqt.AnkiQt, data: dict) -> None:
|
|
||||||
showText(data["msg"], parent=mw, type="html")
|
|
||||||
mw.pm.meta["lastMsg"] = data["msgId"]
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ from zipfile import ZipFile
|
||||||
|
|
||||||
from mock import MagicMock
|
from mock import MagicMock
|
||||||
|
|
||||||
from aqt.addons import AddonManager, extract_update_info, package_name_valid
|
from aqt.addons import AddonManager, package_name_valid
|
||||||
|
|
||||||
|
|
||||||
def test_readMinimalManifest():
|
def test_readMinimalManifest():
|
||||||
|
@ -69,33 +69,6 @@ def assertReadManifest(contents, expectedManifest, nameInZip="manifest.json"):
|
||||||
assert adm.readManifestFile(zfile) == expectedManifest
|
assert adm.readManifestFile(zfile) == expectedManifest
|
||||||
|
|
||||||
|
|
||||||
def test_update_info():
|
|
||||||
json_info = dict(
|
|
||||||
id=999,
|
|
||||||
branches=[
|
|
||||||
{"minpt": 0, "maxpt": -15, "fmod": 222},
|
|
||||||
{"minpt": 20, "maxpt": -25, "fmod": 333},
|
|
||||||
{"minpt": 30, "maxpt": 35, "fmod": 444},
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
r = extract_update_info(5, 0, json_info)
|
|
||||||
assert r.current_branch_max_point_ver == -15
|
|
||||||
assert r.suitable_branch_last_modified == 222
|
|
||||||
|
|
||||||
r = extract_update_info(5, 1, json_info)
|
|
||||||
assert r.current_branch_max_point_ver == -25
|
|
||||||
assert r.suitable_branch_last_modified == 222
|
|
||||||
|
|
||||||
r = extract_update_info(19, 1, json_info)
|
|
||||||
assert r.current_branch_max_point_ver == -25
|
|
||||||
assert r.suitable_branch_last_modified == 0
|
|
||||||
|
|
||||||
r = extract_update_info(20, 1, json_info)
|
|
||||||
assert r.current_branch_max_point_ver == -25
|
|
||||||
assert r.suitable_branch_last_modified == 333
|
|
||||||
|
|
||||||
|
|
||||||
def test_package_name_validation():
|
def test_package_name_validation():
|
||||||
assert not package_name_valid("")
|
assert not package_name_valid("")
|
||||||
assert not package_name_valid("/")
|
assert not package_name_valid("/")
|
||||||
|
|
|
@ -33,6 +33,7 @@ syn.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
async-stream.workspace = true
|
async-stream.workspace = true
|
||||||
|
reqwest = { workspace = true, features = ["native-tls"] }
|
||||||
wiremock.workspace = true
|
wiremock.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
|
|
@ -124,7 +124,7 @@ fn build_method_arguments(input: &MessageDescriptor) -> String {
|
||||||
args.push("*".to_string());
|
args.push("*".to_string());
|
||||||
}
|
}
|
||||||
for field in fields {
|
for field in fields {
|
||||||
let arg = format!("{}: {}", field.name(), python_type(&field));
|
let arg = format!("{}: {}", field.name(), python_type(&field, false));
|
||||||
args.push(arg);
|
args.push(arg);
|
||||||
}
|
}
|
||||||
args.join(", ")
|
args.join(", ")
|
||||||
|
@ -150,14 +150,17 @@ fn maybe_destructured_output(output: &MessageDescriptor) -> (String, String) {
|
||||||
if output.fields().len() == 1 && !matches!(first_field.as_ref().unwrap().kind(), Kind::Enum(_))
|
if output.fields().len() == 1 && !matches!(first_field.as_ref().unwrap().kind(), Kind::Enum(_))
|
||||||
{
|
{
|
||||||
let field = first_field.unwrap();
|
let field = first_field.unwrap();
|
||||||
(format!("output.{}", field.name()), python_type(&field))
|
(
|
||||||
|
format!("output.{}", field.name()),
|
||||||
|
python_type(&field, true),
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
("output".into(), full_name_to_python(output.full_name()))
|
("output".into(), full_name_to_python(output.full_name()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// e.g. uint32 -> int; repeated bool -> Sequence[bool]
|
/// e.g. uint32 -> int; repeated bool -> Sequence[bool]
|
||||||
fn python_type(field: &FieldDescriptor) -> String {
|
fn python_type(field: &FieldDescriptor, output: bool) -> String {
|
||||||
let kind = match field.kind() {
|
let kind = match field.kind() {
|
||||||
Kind::Int32
|
Kind::Int32
|
||||||
| Kind::Int64
|
| Kind::Int64
|
||||||
|
@ -177,11 +180,15 @@ fn python_type(field: &FieldDescriptor) -> String {
|
||||||
Kind::Enum(en) => format!("{}.V", full_name_to_python(en.full_name())),
|
Kind::Enum(en) => format!("{}.V", full_name_to_python(en.full_name())),
|
||||||
};
|
};
|
||||||
if field.is_list() {
|
if field.is_list() {
|
||||||
format!("Sequence[{}]", kind)
|
if output {
|
||||||
|
format!("Sequence[{}]", kind)
|
||||||
|
} else {
|
||||||
|
format!("Iterable[{}]", kind)
|
||||||
|
}
|
||||||
} else if field.is_map() {
|
} else if field.is_map() {
|
||||||
let map_kind = field.kind();
|
let map_kind = field.kind();
|
||||||
let map_kind = map_kind.as_message().unwrap();
|
let map_kind = map_kind.as_message().unwrap();
|
||||||
let map_kv: Vec<_> = map_kind.fields().map(|f| python_type(&f)).collect();
|
let map_kv: Vec<_> = map_kind.fields().map(|f| python_type(&f, output)).collect();
|
||||||
format!("Mapping[{}, {}]", map_kv[0], map_kv[1])
|
format!("Mapping[{}, {}]", map_kv[0], map_kv[1])
|
||||||
} else {
|
} else {
|
||||||
kind
|
kind
|
||||||
|
@ -220,6 +227,7 @@ col.decks.all_config()
|
||||||
from typing import *
|
from typing import *
|
||||||
|
|
||||||
import anki
|
import anki
|
||||||
|
import anki.ankiweb_pb2
|
||||||
import anki.backend_pb2
|
import anki.backend_pb2
|
||||||
import anki.card_rendering_pb2
|
import anki.card_rendering_pb2
|
||||||
import anki.cards_pb2
|
import anki.cards_pb2
|
||||||
|
|
|
@ -15,6 +15,7 @@ macro_rules! protobuf {
|
||||||
}
|
}
|
||||||
|
|
||||||
protobuf!(ankidroid, "ankidroid");
|
protobuf!(ankidroid, "ankidroid");
|
||||||
|
protobuf!(ankiweb, "ankiweb");
|
||||||
protobuf!(backend, "backend");
|
protobuf!(backend, "backend");
|
||||||
protobuf!(card_rendering, "card_rendering");
|
protobuf!(card_rendering, "card_rendering");
|
||||||
protobuf!(cards, "cards");
|
protobuf!(cards, "cards");
|
||||||
|
|
72
rslib/src/backend/ankiweb.rs
Normal file
72
rslib/src/backend/ankiweb.rs
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use anki_proto::ankiweb::CheckForUpdateRequest;
|
||||||
|
use anki_proto::ankiweb::CheckForUpdateResponse;
|
||||||
|
use anki_proto::ankiweb::GetAddonInfoRequest;
|
||||||
|
use anki_proto::ankiweb::GetAddonInfoResponse;
|
||||||
|
use prost::Message;
|
||||||
|
|
||||||
|
use super::Backend;
|
||||||
|
use crate::prelude::*;
|
||||||
|
use crate::services::BackendAnkiwebService;
|
||||||
|
|
||||||
|
fn service_url(service: &str) -> String {
|
||||||
|
format!("https://ankiweb.net/svc/{service}")
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Backend {
|
||||||
|
fn post<I, O>(&self, service: &str, input: I) -> Result<O>
|
||||||
|
where
|
||||||
|
I: Message,
|
||||||
|
O: Message + Default,
|
||||||
|
{
|
||||||
|
self.runtime_handle().block_on(async move {
|
||||||
|
let out = self
|
||||||
|
.web_client()
|
||||||
|
.post(service_url(service))
|
||||||
|
.body(input.encode_to_vec())
|
||||||
|
.timeout(Duration::from_secs(60))
|
||||||
|
.send()
|
||||||
|
.await?
|
||||||
|
.error_for_status()?
|
||||||
|
.bytes()
|
||||||
|
.await?;
|
||||||
|
let out: O = O::decode(&out[..])?;
|
||||||
|
Ok(out)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BackendAnkiwebService for Backend {
|
||||||
|
fn get_addon_info(&self, input: GetAddonInfoRequest) -> Result<GetAddonInfoResponse> {
|
||||||
|
self.post("desktop/addon-info", input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_for_update(&self, input: CheckForUpdateRequest) -> Result<CheckForUpdateResponse> {
|
||||||
|
self.post("desktop/check-for-update", input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn addon_info() -> Result<()> {
|
||||||
|
if std::env::var("ONLINE_TESTS").is_err() {
|
||||||
|
println!("test disabled; ONLINE_TESTS not set");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let backend = Backend::new(I18n::template_only(), false);
|
||||||
|
let info = backend.get_addon_info(GetAddonInfoRequest {
|
||||||
|
client_version: 30,
|
||||||
|
addon_ids: vec![3918629684],
|
||||||
|
})?;
|
||||||
|
assert_eq!(info.info[0].min_version, 0);
|
||||||
|
assert_eq!(info.info[0].max_version, 49);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
mod adding;
|
mod adding;
|
||||||
mod ankidroid;
|
mod ankidroid;
|
||||||
|
mod ankiweb;
|
||||||
mod card_rendering;
|
mod card_rendering;
|
||||||
mod collection;
|
mod collection;
|
||||||
mod config;
|
mod config;
|
||||||
|
@ -20,6 +21,7 @@ use std::thread::JoinHandle;
|
||||||
|
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
use prost::Message;
|
use prost::Message;
|
||||||
|
use reqwest::Client;
|
||||||
use tokio::runtime;
|
use tokio::runtime;
|
||||||
use tokio::runtime::Runtime;
|
use tokio::runtime::Runtime;
|
||||||
|
|
||||||
|
@ -40,6 +42,7 @@ pub struct Backend {
|
||||||
runtime: OnceCell<Runtime>,
|
runtime: OnceCell<Runtime>,
|
||||||
state: Arc<Mutex<BackendState>>,
|
state: Arc<Mutex<BackendState>>,
|
||||||
backup_task: Arc<Mutex<Option<JoinHandle<Result<()>>>>>,
|
backup_task: Arc<Mutex<Option<JoinHandle<Result<()>>>>>,
|
||||||
|
web_client: OnceCell<Client>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -73,6 +76,7 @@ impl Backend {
|
||||||
runtime: OnceCell::new(),
|
runtime: OnceCell::new(),
|
||||||
state: Arc::new(Mutex::new(BackendState::default())),
|
state: Arc::new(Mutex::new(BackendState::default())),
|
||||||
backup_task: Arc::new(Mutex::new(None)),
|
backup_task: Arc::new(Mutex::new(None)),
|
||||||
|
web_client: OnceCell::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,6 +122,12 @@ impl Backend {
|
||||||
.clone()
|
.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn web_client(&self) -> &Client {
|
||||||
|
// currently limited to http1, as nginx doesn't support http2 proxies
|
||||||
|
self.web_client
|
||||||
|
.get_or_init(|| Client::builder().http1_only().build().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
fn db_command(&self, input: &[u8]) -> Result<Vec<u8>> {
|
fn db_command(&self, input: &[u8]) -> Result<Vec<u8>> {
|
||||||
self.with_col(|col| db_command_bytes(col, input))
|
self.with_col(|col| db_command_bytes(col, input))
|
||||||
}
|
}
|
||||||
|
|
|
@ -198,7 +198,7 @@ impl Backend {
|
||||||
(col.media()?, col.new_progress_handler())
|
(col.media()?, col.new_progress_handler())
|
||||||
};
|
};
|
||||||
let rt = self.runtime_handle();
|
let rt = self.runtime_handle();
|
||||||
let sync_fut = mgr.sync_media(progress, auth);
|
let sync_fut = mgr.sync_media(progress, auth, self.web_client().clone());
|
||||||
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
||||||
let result = rt.block_on(abortable_sync);
|
let result = rt.block_on(abortable_sync);
|
||||||
|
|
||||||
|
@ -238,7 +238,12 @@ impl Backend {
|
||||||
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
||||||
|
|
||||||
let rt = self.runtime_handle();
|
let rt = self.runtime_handle();
|
||||||
let sync_fut = sync_login(input.username, input.password, input.endpoint);
|
let sync_fut = sync_login(
|
||||||
|
input.username,
|
||||||
|
input.password,
|
||||||
|
input.endpoint,
|
||||||
|
self.web_client().clone(),
|
||||||
|
);
|
||||||
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
||||||
let ret = match rt.block_on(abortable_sync) {
|
let ret = match rt.block_on(abortable_sync) {
|
||||||
Ok(sync_result) => sync_result,
|
Ok(sync_result) => sync_result,
|
||||||
|
@ -276,7 +281,7 @@ impl Backend {
|
||||||
let rt = self.runtime_handle();
|
let rt = self.runtime_handle();
|
||||||
let time_at_check_begin = TimestampSecs::now();
|
let time_at_check_begin = TimestampSecs::now();
|
||||||
let local = self.with_col(|col| col.sync_meta())?;
|
let local = self.with_col(|col| col.sync_meta())?;
|
||||||
let mut client = HttpSyncClient::new(auth);
|
let mut client = HttpSyncClient::new(auth, self.web_client().clone());
|
||||||
let state = rt.block_on(online_sync_status_check(local, &mut client))?;
|
let state = rt.block_on(online_sync_status_check(local, &mut client))?;
|
||||||
{
|
{
|
||||||
let mut guard = self.state.lock().unwrap();
|
let mut guard = self.state.lock().unwrap();
|
||||||
|
@ -301,9 +306,10 @@ impl Backend {
|
||||||
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
let (_guard, abort_reg) = self.sync_abort_handle()?;
|
||||||
|
|
||||||
let rt = self.runtime_handle();
|
let rt = self.runtime_handle();
|
||||||
|
let client = self.web_client().clone();
|
||||||
|
|
||||||
let ret = self.with_col(|col| {
|
let ret = self.with_col(|col| {
|
||||||
let sync_fut = col.normal_sync(auth.clone());
|
let sync_fut = col.normal_sync(auth.clone(), client.clone());
|
||||||
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
||||||
|
|
||||||
match rt.block_on(abortable_sync) {
|
match rt.block_on(abortable_sync) {
|
||||||
|
@ -313,7 +319,7 @@ impl Backend {
|
||||||
col.storage.rollback_trx()?;
|
col.storage.rollback_trx()?;
|
||||||
// and tell AnkiWeb to clean up
|
// and tell AnkiWeb to clean up
|
||||||
let _handle = std::thread::spawn(move || {
|
let _handle = std::thread::spawn(move || {
|
||||||
let _ = rt.block_on(sync_abort(auth));
|
let _ = rt.block_on(sync_abort(auth, client));
|
||||||
});
|
});
|
||||||
|
|
||||||
Err(AnkiError::Interrupted)
|
Err(AnkiError::Interrupted)
|
||||||
|
@ -353,11 +359,11 @@ impl Backend {
|
||||||
let mut builder = col_inner.as_builder();
|
let mut builder = col_inner.as_builder();
|
||||||
|
|
||||||
let result = if upload {
|
let result = if upload {
|
||||||
let sync_fut = col_inner.full_upload(auth);
|
let sync_fut = col_inner.full_upload(auth, self.web_client().clone());
|
||||||
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
||||||
rt.block_on(abortable_sync)
|
rt.block_on(abortable_sync)
|
||||||
} else {
|
} else {
|
||||||
let sync_fut = col_inner.full_download(auth);
|
let sync_fut = col_inner.full_download(auth, self.web_client().clone());
|
||||||
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
let abortable_sync = Abortable::new(sync_fut, abort_reg);
|
||||||
rt.block_on(abortable_sync)
|
rt.block_on(abortable_sync)
|
||||||
};
|
};
|
||||||
|
|
|
@ -11,6 +11,7 @@ use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anki_io::create_dir_all;
|
use anki_io::create_dir_all;
|
||||||
|
use reqwest::Client;
|
||||||
|
|
||||||
use crate::media::files::add_data_to_folder_uniquely;
|
use crate::media::files::add_data_to_folder_uniquely;
|
||||||
use crate::media::files::mtime_as_i64;
|
use crate::media::files::mtime_as_i64;
|
||||||
|
@ -145,8 +146,9 @@ impl MediaManager {
|
||||||
self,
|
self,
|
||||||
progress: ThrottlingProgressHandler<MediaSyncProgress>,
|
progress: ThrottlingProgressHandler<MediaSyncProgress>,
|
||||||
auth: SyncAuth,
|
auth: SyncAuth,
|
||||||
|
client: Client,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let client = HttpSyncClient::new(auth);
|
let client = HttpSyncClient::new(auth, client);
|
||||||
let mut syncer = MediaSyncer::new(self, progress, client)?;
|
let mut syncer = MediaSyncer::new(self, progress, client)?;
|
||||||
syncer.sync().await
|
syncer.sync().await
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ use anki_io::atomic_rename;
|
||||||
use anki_io::new_tempfile_in_parent_of;
|
use anki_io::new_tempfile_in_parent_of;
|
||||||
use anki_io::read_file;
|
use anki_io::read_file;
|
||||||
use anki_io::write_file;
|
use anki_io::write_file;
|
||||||
|
use reqwest::Client;
|
||||||
|
|
||||||
use crate::collection::CollectionBuilder;
|
use crate::collection::CollectionBuilder;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
@ -17,8 +18,8 @@ use crate::sync::login::SyncAuth;
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
/// Download collection from AnkiWeb. Caller must re-open afterwards.
|
/// Download collection from AnkiWeb. Caller must re-open afterwards.
|
||||||
pub async fn full_download(self, auth: SyncAuth) -> Result<()> {
|
pub async fn full_download(self, auth: SyncAuth, client: Client) -> Result<()> {
|
||||||
self.full_download_with_server(HttpSyncClient::new(auth))
|
self.full_download_with_server(HttpSyncClient::new(auth, client))
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use reqwest::Client;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::collection::Collection;
|
use crate::collection::Collection;
|
||||||
|
@ -152,8 +153,12 @@ impl From<ClientSyncState> for SyncOutput {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
pub async fn normal_sync(&mut self, auth: SyncAuth) -> error::Result<SyncOutput> {
|
pub async fn normal_sync(
|
||||||
NormalSyncer::new(self, HttpSyncClient::new(auth))
|
&mut self,
|
||||||
|
auth: SyncAuth,
|
||||||
|
client: Client,
|
||||||
|
) -> error::Result<SyncOutput> {
|
||||||
|
NormalSyncer::new(self, HttpSyncClient::new(auth, client))
|
||||||
.sync()
|
.sync()
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use reqwest::Client;
|
||||||
|
|
||||||
use crate::error;
|
use crate::error;
|
||||||
use crate::sync::collection::protocol::EmptyInput;
|
use crate::sync::collection::protocol::EmptyInput;
|
||||||
use crate::sync::collection::protocol::SyncProtocol;
|
use crate::sync::collection::protocol::SyncProtocol;
|
||||||
|
@ -21,8 +23,8 @@ pub struct FullSyncProgress {
|
||||||
pub total_bytes: usize,
|
pub total_bytes: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn sync_abort(auth: SyncAuth) -> error::Result<()> {
|
pub async fn sync_abort(auth: SyncAuth, client: Client) -> error::Result<()> {
|
||||||
HttpSyncClient::new(auth)
|
HttpSyncClient::new(auth, client)
|
||||||
.abort(EmptyInput::request())
|
.abort(EmptyInput::request())
|
||||||
.await?
|
.await?
|
||||||
.json()
|
.json()
|
||||||
|
|
|
@ -7,6 +7,7 @@ use std::future::Future;
|
||||||
|
|
||||||
use axum::http::StatusCode;
|
use axum::http::StatusCode;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
|
use reqwest::Client;
|
||||||
use reqwest::Url;
|
use reqwest::Url;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
|
@ -106,7 +107,7 @@ where
|
||||||
endpoint: Some(endpoint),
|
endpoint: Some(endpoint),
|
||||||
io_timeout_secs: None,
|
io_timeout_secs: None,
|
||||||
};
|
};
|
||||||
let client = HttpSyncClient::new(auth);
|
let client = HttpSyncClient::new(auth, Client::new());
|
||||||
op(client).await
|
op(client).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ use axum::response::Response;
|
||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
|
use reqwest::Client;
|
||||||
use tokio_util::io::ReaderStream;
|
use tokio_util::io::ReaderStream;
|
||||||
|
|
||||||
use crate::collection::CollectionBuilder;
|
use crate::collection::CollectionBuilder;
|
||||||
|
@ -32,8 +33,8 @@ pub const CORRUPT_MESSAGE: &str =
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
/// Upload collection to AnkiWeb. Caller must re-open afterwards.
|
/// Upload collection to AnkiWeb. Caller must re-open afterwards.
|
||||||
pub async fn full_upload(self, auth: SyncAuth) -> Result<()> {
|
pub async fn full_upload(self, auth: SyncAuth, client: Client) -> Result<()> {
|
||||||
self.full_upload_with_server(HttpSyncClient::new(auth))
|
self.full_upload_with_server(HttpSyncClient::new(auth, client))
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,12 +34,12 @@ pub struct HttpSyncClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HttpSyncClient {
|
impl HttpSyncClient {
|
||||||
pub fn new(auth: SyncAuth) -> HttpSyncClient {
|
pub fn new(auth: SyncAuth, client: Client) -> HttpSyncClient {
|
||||||
let io_timeout = Duration::from_secs(auth.io_timeout_secs.unwrap_or(30) as u64);
|
let io_timeout = Duration::from_secs(auth.io_timeout_secs.unwrap_or(30) as u64);
|
||||||
HttpSyncClient {
|
HttpSyncClient {
|
||||||
sync_key: auth.hkey,
|
sync_key: auth.hkey,
|
||||||
session_key: simple_session_id(),
|
session_key: simple_session_id(),
|
||||||
client: Client::builder().http1_only().build().unwrap(),
|
client,
|
||||||
endpoint: auth
|
endpoint: auth
|
||||||
.endpoint
|
.endpoint
|
||||||
.unwrap_or_else(|| Url::try_from("https://sync.ankiweb.net/").unwrap()),
|
.unwrap_or_else(|| Url::try_from("https://sync.ankiweb.net/").unwrap()),
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
|
use reqwest::Client;
|
||||||
use reqwest::Url;
|
use reqwest::Url;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
@ -34,13 +35,14 @@ pub async fn sync_login<S: Into<String>>(
|
||||||
username: S,
|
username: S,
|
||||||
password: S,
|
password: S,
|
||||||
endpoint: Option<String>,
|
endpoint: Option<String>,
|
||||||
|
client: Client,
|
||||||
) -> Result<SyncAuth> {
|
) -> Result<SyncAuth> {
|
||||||
let auth = anki_proto::sync::SyncAuth {
|
let auth = anki_proto::sync::SyncAuth {
|
||||||
endpoint,
|
endpoint,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
.try_into()?;
|
.try_into()?;
|
||||||
let client = HttpSyncClient::new(auth);
|
let client = HttpSyncClient::new(auth, client);
|
||||||
let resp = client
|
let resp = client
|
||||||
.host_key(
|
.host_key(
|
||||||
HostKeyRequest {
|
HostKeyRequest {
|
||||||
|
|
Loading…
Reference in a new issue