mirror of
https://github.com/ankitects/anki.git
synced 2025-09-20 15:02:21 -04:00
Remove legacy editor code
We can worry about add-on compatibility later
This commit is contained in:
parent
f4293c1f9e
commit
4dd402334f
1 changed files with 3 additions and 458 deletions
461
qt/aqt/editor.py
461
qt/aqt/editor.py
|
@ -5,37 +5,25 @@ from __future__ import annotations
|
|||
|
||||
import base64
|
||||
import functools
|
||||
import html
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from random import randrange
|
||||
from typing import Any, Iterable, Match
|
||||
|
||||
import bs4
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from typing import Any
|
||||
|
||||
from anki._legacy import deprecated
|
||||
from anki.cards import Card
|
||||
from anki.collection import Config
|
||||
from anki.hooks import runFilter
|
||||
from anki.httpclient import HttpClient
|
||||
from anki.models import NotetypeDict, StockNotetype
|
||||
from anki.notes import Note, NoteId
|
||||
from anki.utils import checksum, is_win, namedtmp
|
||||
from anki.utils import is_win
|
||||
from aqt import AnkiQt, gui_hooks
|
||||
from aqt.operations.notetype import update_notetype_legacy
|
||||
from aqt.qt import *
|
||||
from aqt.sound import av_player
|
||||
from aqt.utils import KeyboardModifiersPressed, shortcut, showWarning, tr
|
||||
from aqt.utils import shortcut, showWarning
|
||||
from aqt.webview import AnkiWebView, AnkiWebViewKind
|
||||
|
||||
pics = ("jpg", "jpeg", "png", "gif", "svg", "webp", "ico", "avif")
|
||||
|
@ -552,251 +540,11 @@ require("anki/ui").loaded.then(() => require("anki/NoteEditor").instances[0].too
|
|||
self.web.cleanup()
|
||||
self.web = None # type: ignore
|
||||
|
||||
# legacy
|
||||
|
||||
setNote = set_note
|
||||
|
||||
# legacy
|
||||
|
||||
def saveAddModeVars(self) -> None:
|
||||
pass
|
||||
|
||||
# Audio/video/images
|
||||
######################################################################
|
||||
|
||||
def addMedia(self, path: str, canDelete: bool = False) -> None:
|
||||
"""Legacy routine used by add-ons to add a media file and update the current field.
|
||||
canDelete is ignored."""
|
||||
|
||||
try:
|
||||
html = self._addMedia(path)
|
||||
except Exception as e:
|
||||
showWarning(str(e))
|
||||
return
|
||||
|
||||
self.web.eval(f"setFormat('inserthtml', {json.dumps(html)});")
|
||||
|
||||
def _addMedia(self, path: str, canDelete: bool = False) -> str:
|
||||
"""Add to media folder and return local img or sound tag."""
|
||||
# copy to media folder
|
||||
fname = self.mw.col.media.add_file(path)
|
||||
# return a local html link
|
||||
return self.fnameToLink(fname)
|
||||
|
||||
def _addMediaFromData(self, fname: str, data: bytes) -> str:
|
||||
return self.mw.col.media._legacy_write_data(fname, data)
|
||||
|
||||
# Media downloads
|
||||
######################################################################
|
||||
|
||||
def urlToLink(self, url: str, allowed_suffixes: Iterable[str] = ()) -> str:
|
||||
fname = (
|
||||
self.urlToFile(url, allowed_suffixes)
|
||||
if allowed_suffixes
|
||||
else self.urlToFile(url)
|
||||
)
|
||||
if not fname:
|
||||
return '<a href="{}">{}</a>'.format(
|
||||
url, html.escape(urllib.parse.unquote(url))
|
||||
)
|
||||
return self.fnameToLink(fname)
|
||||
|
||||
def fnameToLink(self, fname: str) -> str:
|
||||
ext = fname.split(".")[-1].lower()
|
||||
if ext in pics:
|
||||
name = urllib.parse.quote(fname.encode("utf8"))
|
||||
return f'<img src="{name}">'
|
||||
else:
|
||||
av_player.play_file_with_caller(fname, self.editorMode)
|
||||
return f"[sound:{html.escape(fname, quote=False)}]"
|
||||
|
||||
def urlToFile(
|
||||
self, url: str, allowed_suffixes: Iterable[str] = pics + audio
|
||||
) -> str | None:
|
||||
l = url.lower()
|
||||
for suffix in allowed_suffixes:
|
||||
if l.endswith(f".{suffix}"):
|
||||
return self._retrieveURL(url)
|
||||
# not a supported type
|
||||
return None
|
||||
|
||||
def isURL(self, s: str) -> bool:
|
||||
s = s.lower()
|
||||
return (
|
||||
s.startswith("http://")
|
||||
or s.startswith("https://")
|
||||
or s.startswith("ftp://")
|
||||
or s.startswith("file://")
|
||||
)
|
||||
|
||||
def inlinedImageToFilename(self, txt: str) -> str:
|
||||
prefix = "data:image/"
|
||||
suffix = ";base64,"
|
||||
for ext in ("jpg", "jpeg", "png", "gif"):
|
||||
fullPrefix = prefix + ext + suffix
|
||||
if txt.startswith(fullPrefix):
|
||||
b64data = txt[len(fullPrefix) :].strip()
|
||||
data = base64.b64decode(b64data, validate=True)
|
||||
if ext == "jpeg":
|
||||
ext = "jpg"
|
||||
return self._addPastedImage(data, ext)
|
||||
|
||||
return ""
|
||||
|
||||
def inlinedImageToLink(self, src: str) -> str:
|
||||
fname = self.inlinedImageToFilename(src)
|
||||
if fname:
|
||||
return self.fnameToLink(fname)
|
||||
|
||||
return ""
|
||||
|
||||
def _pasted_image_filename(self, data: bytes, ext: str) -> str:
|
||||
csum = checksum(data)
|
||||
return f"paste-{csum}.{ext}"
|
||||
|
||||
def _read_pasted_image(self, mime: QMimeData) -> str:
|
||||
image = QImage(mime.imageData())
|
||||
buffer = QBuffer()
|
||||
buffer.open(QBuffer.OpenModeFlag.ReadWrite)
|
||||
if self.mw.col.get_config_bool(Config.Bool.PASTE_IMAGES_AS_PNG):
|
||||
ext = "png"
|
||||
quality = 50
|
||||
else:
|
||||
ext = "jpg"
|
||||
quality = 80
|
||||
image.save(buffer, ext, quality)
|
||||
buffer.reset()
|
||||
data = bytes(buffer.readAll()) # type: ignore
|
||||
fname = self._pasted_image_filename(data, ext)
|
||||
path = namedtmp(fname)
|
||||
with open(path, "wb") as file:
|
||||
file.write(data)
|
||||
|
||||
return path
|
||||
|
||||
def _addPastedImage(self, data: bytes, ext: str) -> str:
|
||||
# hash and write
|
||||
fname = self._pasted_image_filename(data, ext)
|
||||
return self._addMediaFromData(fname, data)
|
||||
|
||||
def _retrieveURL(self, url: str) -> str | None:
|
||||
"Download file into media folder and return local filename or None."
|
||||
local = url.lower().startswith("file://")
|
||||
# fetch it into a temporary folder
|
||||
self.mw.progress.start(immediate=not local, parent=self.parentWindow)
|
||||
content_type = None
|
||||
error_msg: str | None = None
|
||||
try:
|
||||
if local:
|
||||
# urllib doesn't understand percent-escaped utf8, but requires things like
|
||||
# '#' to be escaped.
|
||||
url = urllib.parse.unquote(url)
|
||||
url = url.replace("%", "%25")
|
||||
url = url.replace("#", "%23")
|
||||
req = urllib.request.Request(
|
||||
url, None, {"User-Agent": "Mozilla/5.0 (compatible; Anki)"}
|
||||
)
|
||||
with urllib.request.urlopen(req) as response:
|
||||
filecontents = response.read()
|
||||
else:
|
||||
with HttpClient() as client:
|
||||
client.timeout = 30
|
||||
with client.get(url) as response:
|
||||
if response.status_code != 200:
|
||||
error_msg = tr.qt_misc_unexpected_response_code(
|
||||
val=response.status_code,
|
||||
)
|
||||
return None
|
||||
filecontents = response.content
|
||||
content_type = response.headers.get("content-type")
|
||||
except (urllib.error.URLError, requests.exceptions.RequestException) as e:
|
||||
error_msg = tr.editing_an_error_occurred_while_opening(val=str(e))
|
||||
return None
|
||||
finally:
|
||||
self.mw.progress.finish()
|
||||
if error_msg:
|
||||
showWarning(error_msg)
|
||||
# strip off any query string
|
||||
url = re.sub(r"\?.*?$", "", url)
|
||||
fname = os.path.basename(urllib.parse.unquote(url))
|
||||
if not fname.strip():
|
||||
fname = "paste"
|
||||
if content_type:
|
||||
fname = self.mw.col.media.add_extension_based_on_mime(fname, content_type)
|
||||
|
||||
return self.mw.col.media.write_data(fname, filecontents)
|
||||
|
||||
# Paste/drag&drop
|
||||
######################################################################
|
||||
|
||||
removeTags = ["script", "iframe", "object", "style"]
|
||||
|
||||
def _pastePreFilter(self, html: str, internal: bool) -> str:
|
||||
# https://anki.tenderapp.com/discussions/ankidesktop/39543-anki-is-replacing-the-character-by-when-i-exit-the-html-edit-mode-ctrlshiftx
|
||||
if html.find(">") < 0:
|
||||
return html
|
||||
|
||||
with warnings.catch_warnings() as w:
|
||||
warnings.simplefilter("ignore", UserWarning)
|
||||
doc = BeautifulSoup(html, "html.parser")
|
||||
|
||||
tag: bs4.element.Tag
|
||||
if not internal:
|
||||
for tag in self.removeTags:
|
||||
for node in doc(tag):
|
||||
node.decompose()
|
||||
|
||||
# convert p tags to divs
|
||||
for node in doc("p"):
|
||||
node.name = "div"
|
||||
|
||||
for tag in doc("img"):
|
||||
try:
|
||||
src = tag["src"]
|
||||
except KeyError:
|
||||
# for some bizarre reason, mnemosyne removes src elements
|
||||
# from missing media
|
||||
continue
|
||||
|
||||
# in internal pastes, rewrite mediasrv references to relative
|
||||
if internal:
|
||||
m = re.match(r"http://127.0.0.1:\d+/(.*)$", src)
|
||||
if m:
|
||||
tag["src"] = m.group(1)
|
||||
else:
|
||||
# in external pastes, download remote media
|
||||
if self.isURL(src):
|
||||
fname = self._retrieveURL(src)
|
||||
if fname:
|
||||
tag["src"] = fname
|
||||
elif src.startswith("data:image/"):
|
||||
# and convert inlined data
|
||||
tag["src"] = self.inlinedImageToFilename(src)
|
||||
|
||||
html = str(doc)
|
||||
return html
|
||||
|
||||
def doPaste(self, html: str, internal: bool, extended: bool = False) -> None:
|
||||
html = self._pastePreFilter(html, internal)
|
||||
if extended:
|
||||
ext = "true"
|
||||
else:
|
||||
ext = "false"
|
||||
self.web.eval(f"pasteHTML({json.dumps(html)}, {json.dumps(internal)}, {ext});")
|
||||
gui_hooks.editor_did_paste(self, html, internal, extended)
|
||||
|
||||
def doDrop(
|
||||
self, html: str, internal: bool, extended: bool, cursor_pos: QPoint
|
||||
) -> None:
|
||||
def pasteIfField(ret: bool) -> None:
|
||||
if ret:
|
||||
self.doPaste(html, internal, extended)
|
||||
|
||||
zoom = self.web.zoomFactor()
|
||||
x, y = int(cursor_pos.x() / zoom), int(cursor_pos.y() / zoom)
|
||||
|
||||
self.web.evalWithCallback(f"focusIfField({x}, {y});", pasteIfField)
|
||||
|
||||
def onPaste(self) -> None:
|
||||
self.web.onPaste()
|
||||
|
||||
|
@ -887,14 +635,6 @@ class EditorWebView(AnkiWebView):
|
|||
AnkiWebView.__init__(self, kind=AnkiWebViewKind.EDITOR)
|
||||
self.editor = editor
|
||||
self.setAcceptDrops(True)
|
||||
self._store_field_content_on_next_clipboard_change = False
|
||||
# when we detect the user copying from a field, we store the content
|
||||
# here, and use it when they paste, so we avoid filtering field content
|
||||
self._internal_field_text_for_paste: str | None = None
|
||||
self._last_known_clipboard_mime: QMimeData | None = None
|
||||
clip = self.editor.mw.app.clipboard()
|
||||
assert clip is not None
|
||||
clip.dataChanged.connect(self._on_clipboard_change)
|
||||
self.settings().setAttribute( # type: ignore
|
||||
QWebEngineSettings.WebAttribute.JavascriptCanPaste, True
|
||||
)
|
||||
|
@ -903,206 +643,11 @@ class EditorWebView(AnkiWebView):
|
|||
)
|
||||
gui_hooks.editor_web_view_did_init(self)
|
||||
|
||||
def _on_clipboard_change(
|
||||
self, mode: QClipboard.Mode = QClipboard.Mode.Clipboard
|
||||
) -> None:
|
||||
self._last_known_clipboard_mime = self._clipboard().mimeData(mode)
|
||||
if self._store_field_content_on_next_clipboard_change:
|
||||
# if the flag was set, save the field data
|
||||
self._internal_field_text_for_paste = self._get_clipboard_html_for_field(
|
||||
mode
|
||||
)
|
||||
self._store_field_content_on_next_clipboard_change = False
|
||||
elif self._internal_field_text_for_paste != self._get_clipboard_html_for_field(
|
||||
mode
|
||||
):
|
||||
# if we've previously saved the field, blank it out if the clipboard state has changed
|
||||
self._internal_field_text_for_paste = None
|
||||
|
||||
def _get_clipboard_html_for_field(self, mode: QClipboard.Mode) -> str | None:
|
||||
clip = self._clipboard()
|
||||
if not (mime := clip.mimeData(mode)):
|
||||
return None
|
||||
if not mime.hasHtml():
|
||||
return None
|
||||
return mime.html()
|
||||
|
||||
def onCut(self) -> None:
|
||||
self.triggerPageAction(QWebEnginePage.WebAction.Cut)
|
||||
|
||||
def onCopy(self) -> None:
|
||||
self.triggerPageAction(QWebEnginePage.WebAction.Copy)
|
||||
|
||||
def _wantsExtendedPaste(self) -> bool:
|
||||
strip_html = self.editor.mw.col.get_config_bool(
|
||||
Config.Bool.PASTE_STRIPS_FORMATTING
|
||||
)
|
||||
if KeyboardModifiersPressed().shift:
|
||||
strip_html = not strip_html
|
||||
return not strip_html
|
||||
|
||||
def _onPaste(self, mode: QClipboard.Mode) -> None:
|
||||
# Since _on_clipboard_change doesn't always trigger properly on macOS, we do a double check if any changes were made before pasting
|
||||
clipboard = self._clipboard()
|
||||
if self._last_known_clipboard_mime != clipboard.mimeData(mode):
|
||||
self._on_clipboard_change(mode)
|
||||
extended = self._wantsExtendedPaste()
|
||||
if html := self._internal_field_text_for_paste:
|
||||
print("reuse internal")
|
||||
self.editor.doPaste(html, True, extended)
|
||||
else:
|
||||
if not (mime := clipboard.mimeData(mode=mode)):
|
||||
return
|
||||
print("use clipboard")
|
||||
html, internal = self._processMime(mime, extended)
|
||||
if html:
|
||||
self.editor.doPaste(html, internal, extended)
|
||||
|
||||
def onPaste(self) -> None:
|
||||
self.triggerPageAction(QWebEnginePage.WebAction.Paste)
|
||||
|
||||
def onMiddleClickPaste(self) -> None:
|
||||
self._onPaste(QClipboard.Mode.Selection)
|
||||
|
||||
# def dragEnterEvent(self, evt: QDragEnterEvent | None) -> None:
|
||||
# assert evt is not None
|
||||
# evt.accept()
|
||||
|
||||
# def dropEvent(self, evt: QDropEvent | None) -> None:
|
||||
# assert evt is not None
|
||||
# extended = self._wantsExtendedPaste()
|
||||
# mime = evt.mimeData()
|
||||
# assert mime is not None
|
||||
|
||||
# if (
|
||||
# self.editor.state is EditorState.IO_PICKER
|
||||
# and (html := self._processUrls(mime, allowed_suffixes=pics))
|
||||
# and (path := self.editor.extract_img_path_from_html(html))
|
||||
# ):
|
||||
# self.editor.setup_mask_editor(path)
|
||||
# return
|
||||
|
||||
# evt_pos = evt.position()
|
||||
# cursor_pos = QPoint(int(evt_pos.x()), int(evt_pos.y()))
|
||||
|
||||
# if evt.source() and mime.hasHtml():
|
||||
# # don't filter html from other fields
|
||||
# html, internal = mime.html(), True
|
||||
# else:
|
||||
# html, internal = self._processMime(mime, extended, drop_event=True)
|
||||
|
||||
# if not html:
|
||||
# return
|
||||
|
||||
# self.editor.doDrop(html, internal, extended, cursor_pos)
|
||||
|
||||
# returns (html, isInternal)
|
||||
def _processMime(
|
||||
self, mime: QMimeData, extended: bool = False, drop_event: bool = False
|
||||
) -> tuple[str, bool]:
|
||||
# print("html=%s image=%s urls=%s txt=%s" % (
|
||||
# mime.hasHtml(), mime.hasImage(), mime.hasUrls(), mime.hasText()))
|
||||
# print("html", mime.html())
|
||||
# print("urls", mime.urls())
|
||||
# print("text", mime.text())
|
||||
|
||||
internal = False
|
||||
|
||||
mime = gui_hooks.editor_will_process_mime(
|
||||
mime, self, internal, extended, drop_event
|
||||
)
|
||||
|
||||
# try various content types in turn
|
||||
if mime.hasHtml():
|
||||
html_content = mime.html()[11:] if internal else mime.html()
|
||||
return html_content, internal
|
||||
|
||||
# given _processUrls' extra allowed_suffixes kwarg, placate the typechecker
|
||||
def process_url(mime: QMimeData, extended: bool = False) -> str | None:
|
||||
return self._processUrls(mime, extended)
|
||||
|
||||
# favour url if it's a local link
|
||||
if (
|
||||
mime.hasUrls()
|
||||
and (urls := mime.urls())
|
||||
and urls[0].toString().startswith("file://")
|
||||
):
|
||||
types = (process_url, self._processImage, self._processText)
|
||||
else:
|
||||
types = (self._processImage, process_url, self._processText)
|
||||
|
||||
for fn in types:
|
||||
html = fn(mime, extended)
|
||||
if html:
|
||||
return html, True
|
||||
return "", False
|
||||
|
||||
def _processUrls(
|
||||
self,
|
||||
mime: QMimeData,
|
||||
extended: bool = False,
|
||||
allowed_suffixes: Iterable[str] = (),
|
||||
) -> str | None:
|
||||
if not mime.hasUrls():
|
||||
return None
|
||||
|
||||
buf = ""
|
||||
for qurl in mime.urls():
|
||||
url = qurl.toString()
|
||||
# chrome likes to give us the URL twice with a \n
|
||||
if lines := url.splitlines():
|
||||
url = lines[0]
|
||||
buf += self.editor.urlToLink(url, allowed_suffixes=allowed_suffixes)
|
||||
|
||||
return buf
|
||||
|
||||
def _processText(self, mime: QMimeData, extended: bool = False) -> str | None:
|
||||
if not mime.hasText():
|
||||
return None
|
||||
|
||||
txt = mime.text()
|
||||
processed = []
|
||||
lines = txt.split("\n")
|
||||
|
||||
for line in lines:
|
||||
for token in re.split(r"(\S+)", line):
|
||||
# inlined data in base64?
|
||||
if extended and token.startswith("data:image/"):
|
||||
processed.append(self.editor.inlinedImageToLink(token))
|
||||
elif extended and self.editor.isURL(token):
|
||||
# if the user is pasting an image or sound link, convert it to local, otherwise paste as a hyperlink
|
||||
link = self.editor.urlToLink(token)
|
||||
processed.append(link)
|
||||
else:
|
||||
token = html.escape(token).replace("\t", " " * 4)
|
||||
|
||||
# if there's more than one consecutive space,
|
||||
# use non-breaking spaces for the second one on
|
||||
def repl(match: Match) -> str:
|
||||
return f"{match.group(1).replace(' ', ' ')} "
|
||||
|
||||
token = re.sub(" ( +)", repl, token)
|
||||
processed.append(token)
|
||||
|
||||
processed.append("<br>")
|
||||
# remove last <br>
|
||||
processed.pop()
|
||||
return "".join(processed)
|
||||
|
||||
def _processImage(self, mime: QMimeData, extended: bool = False) -> str | None:
|
||||
if not mime.hasImage():
|
||||
return None
|
||||
path = self.editor._read_pasted_image(mime)
|
||||
fname = self.editor._addMedia(path)
|
||||
|
||||
return fname
|
||||
|
||||
def contextMenuEvent(self, evt: QContextMenuEvent | None) -> None:
|
||||
m = QMenu(self)
|
||||
gui_hooks.editor_will_show_context_menu(self, m)
|
||||
m.popup(QCursor.pos())
|
||||
|
||||
def _clipboard(self) -> QClipboard:
|
||||
clipboard = self.editor.mw.app.clipboard()
|
||||
assert clipboard is not None
|
||||
return clipboard
|
||||
|
|
Loading…
Reference in a new issue