mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
add add_file() and write_data()
This commit is contained in:
parent
41266f46f1
commit
4096d21c07
9 changed files with 97 additions and 64 deletions
|
@ -19,6 +19,7 @@ message BackendInput {
|
||||||
string strip_av_tags = 23;
|
string strip_av_tags = 23;
|
||||||
ExtractAVTagsIn extract_av_tags = 24;
|
ExtractAVTagsIn extract_av_tags = 24;
|
||||||
string expand_clozes_to_reveal_latex = 25;
|
string expand_clozes_to_reveal_latex = 25;
|
||||||
|
AddFileToMediaFolderIn add_file_to_media_folder = 26;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,6 +35,7 @@ message BackendOutput {
|
||||||
string strip_av_tags = 23;
|
string strip_av_tags = 23;
|
||||||
ExtractAVTagsOut extract_av_tags = 24;
|
ExtractAVTagsOut extract_av_tags = 24;
|
||||||
string expand_clozes_to_reveal_latex = 25;
|
string expand_clozes_to_reveal_latex = 25;
|
||||||
|
string add_file_to_media_folder = 26;
|
||||||
|
|
||||||
BackendError error = 2047;
|
BackendError error = 2047;
|
||||||
}
|
}
|
||||||
|
@ -41,16 +43,13 @@ message BackendOutput {
|
||||||
|
|
||||||
message BackendError {
|
message BackendError {
|
||||||
oneof value {
|
oneof value {
|
||||||
InvalidInputError invalid_input = 1;
|
StringError invalid_input = 1;
|
||||||
TemplateParseError template_parse = 2;
|
StringError template_parse = 2;
|
||||||
|
StringError io_error = 3;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
message InvalidInputError {
|
message StringError {
|
||||||
string info = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
message TemplateParseError {
|
|
||||||
string info = 1;
|
string info = 1;
|
||||||
bool q_side = 2;
|
bool q_side = 2;
|
||||||
}
|
}
|
||||||
|
@ -174,3 +173,8 @@ message TTSTag {
|
||||||
float speed = 4;
|
float speed = 4;
|
||||||
repeated string other_args = 5;
|
repeated string other_args = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message AddFileToMediaFolderIn {
|
||||||
|
string desired_name = 1;
|
||||||
|
bytes data = 2;
|
||||||
|
}
|
|
@ -25,6 +25,10 @@ from anki.latex import render_latex
|
||||||
from anki.utils import checksum, isMac, isWin
|
from anki.utils import checksum, isMac, isWin
|
||||||
|
|
||||||
|
|
||||||
|
def media_folder_from_col_path(col_path: str) -> str:
|
||||||
|
return re.sub(r"(?i)\.(anki2)$", ".media", col_path)
|
||||||
|
|
||||||
|
|
||||||
class MediaManager:
|
class MediaManager:
|
||||||
|
|
||||||
soundRegexps = [r"(?i)(\[sound:(?P<fname>[^]]+)\])"]
|
soundRegexps = [r"(?i)(\[sound:(?P<fname>[^]]+)\])"]
|
||||||
|
@ -43,7 +47,7 @@ class MediaManager:
|
||||||
self._dir = None
|
self._dir = None
|
||||||
return
|
return
|
||||||
# media directory
|
# media directory
|
||||||
self._dir = re.sub(r"(?i)\.(anki2)$", ".media", self.col.path)
|
self._dir = media_folder_from_col_path(self.col.path)
|
||||||
if not os.path.exists(self._dir):
|
if not os.path.exists(self._dir):
|
||||||
os.makedirs(self._dir)
|
os.makedirs(self._dir)
|
||||||
try:
|
try:
|
||||||
|
@ -155,58 +159,42 @@ create table meta (dirMod int, lastUsn int); insert into meta values (0, 0);
|
||||||
|
|
||||||
# Adding media
|
# Adding media
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# opath must be in unicode
|
|
||||||
|
|
||||||
def addFile(self, opath: str) -> Any:
|
def add_file(self, path: str) -> str:
|
||||||
with open(opath, "rb") as f:
|
"""Add basename of path to the media folder, renaming if not unique.
|
||||||
return self.writeData(opath, f.read())
|
|
||||||
|
|
||||||
def writeData(self, opath: str, data: bytes, typeHint: Optional[str] = None) -> Any:
|
Returns possibly-renamed filename."""
|
||||||
# if fname is a full path, use only the basename
|
with open(path, "rb") as f:
|
||||||
fname = os.path.basename(opath)
|
return self.write_data(os.path.basename(path), f.read())
|
||||||
|
|
||||||
# if it's missing an extension and a type hint was provided, use that
|
def write_data(self, desired_fname: str, data: bytes) -> str:
|
||||||
if not os.path.splitext(fname)[1] and typeHint:
|
"""Write the file to the media folder, renaming if not unique.
|
||||||
|
|
||||||
|
Returns possibly-renamed filename."""
|
||||||
|
return self.col.backend.add_file_to_media_folder(desired_fname, data)
|
||||||
|
|
||||||
|
def add_extension_based_on_mime(self, fname: str, content_type: str) -> str:
|
||||||
|
"If jpg or png mime, add .png/.jpg if missing extension."
|
||||||
|
if not os.path.splitext(fname)[1]:
|
||||||
# mimetypes is returning '.jpe' even after calling .init(), so we'll do
|
# mimetypes is returning '.jpe' even after calling .init(), so we'll do
|
||||||
# it manually instead
|
# it manually instead
|
||||||
typeMap = {
|
type_map = {
|
||||||
"image/jpeg": ".jpg",
|
"image/jpeg": ".jpg",
|
||||||
"image/png": ".png",
|
"image/png": ".png",
|
||||||
}
|
}
|
||||||
if typeHint in typeMap:
|
if content_type in type_map:
|
||||||
fname += typeMap[typeHint]
|
fname += type_map[content_type]
|
||||||
|
return fname
|
||||||
|
|
||||||
# make sure we write it in NFC form (pre-APFS Macs will autoconvert to NFD),
|
# legacy
|
||||||
# and return an NFC-encoded reference
|
addFile = add_file
|
||||||
fname = unicodedata.normalize("NFC", fname)
|
|
||||||
# ensure it's a valid filename
|
|
||||||
base = self.cleanFilename(fname)
|
|
||||||
(root, ext) = os.path.splitext(base)
|
|
||||||
|
|
||||||
def repl(match):
|
# legacy
|
||||||
n = int(match.group(1))
|
def writeData(self, opath: str, data: bytes, typeHint: Optional[str] = None) -> str:
|
||||||
return " (%d)" % (n + 1)
|
fname = os.path.basename(opath)
|
||||||
|
if typeHint:
|
||||||
# find the first available name
|
fname = self.add_extension_based_on_mime(fname, typeHint)
|
||||||
csum = checksum(data)
|
return self.write_data(fname, data)
|
||||||
while True:
|
|
||||||
fname = root + ext
|
|
||||||
path = os.path.join(self.dir(), fname)
|
|
||||||
# if it doesn't exist, copy it directly
|
|
||||||
if not os.path.exists(path):
|
|
||||||
with open(path, "wb") as f:
|
|
||||||
f.write(data)
|
|
||||||
return fname
|
|
||||||
# if it's identical, reuse
|
|
||||||
with open(path, "rb") as f:
|
|
||||||
if checksum(f.read()) == csum:
|
|
||||||
return fname
|
|
||||||
# otherwise, increment the index in the filename
|
|
||||||
reg = r" \((\d+)\)$"
|
|
||||||
if not re.search(reg, root):
|
|
||||||
root = root + " (1)"
|
|
||||||
else:
|
|
||||||
root = re.sub(reg, repl, root)
|
|
||||||
|
|
||||||
# String manipulation
|
# String manipulation
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
|
@ -90,8 +90,8 @@ def proto_replacement_list_to_native(
|
||||||
|
|
||||||
|
|
||||||
class RustBackend:
|
class RustBackend:
|
||||||
def __init__(self, path: str):
|
def __init__(self, col_path: str, media_folder: str):
|
||||||
self._backend = ankirspy.Backend(path)
|
self._backend = ankirspy.Backend(col_path, media_folder)
|
||||||
|
|
||||||
def _run_command(self, input: pb.BackendInput) -> pb.BackendOutput:
|
def _run_command(self, input: pb.BackendInput) -> pb.BackendOutput:
|
||||||
input_bytes = input.SerializeToString()
|
input_bytes = input.SerializeToString()
|
||||||
|
@ -181,3 +181,12 @@ class RustBackend:
|
||||||
return self._run_command(
|
return self._run_command(
|
||||||
pb.BackendInput(expand_clozes_to_reveal_latex=text)
|
pb.BackendInput(expand_clozes_to_reveal_latex=text)
|
||||||
).expand_clozes_to_reveal_latex
|
).expand_clozes_to_reveal_latex
|
||||||
|
|
||||||
|
def add_file_to_media_folder(self, desired_name: str, data: bytes) -> str:
|
||||||
|
return self._run_command(
|
||||||
|
pb.BackendInput(
|
||||||
|
add_file_to_media_folder=pb.AddFileToMediaFolderIn(
|
||||||
|
desired_name=desired_name, data=data
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).add_file_to_media_folder
|
||||||
|
|
|
@ -11,6 +11,7 @@ from anki.collection import _Collection
|
||||||
from anki.consts import *
|
from anki.consts import *
|
||||||
from anki.db import DB
|
from anki.db import DB
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
|
from anki.media import media_folder_from_col_path
|
||||||
from anki.rsbackend import RustBackend
|
from anki.rsbackend import RustBackend
|
||||||
from anki.stdmodels import (
|
from anki.stdmodels import (
|
||||||
addBasicModel,
|
addBasicModel,
|
||||||
|
@ -30,7 +31,7 @@ def Collection(
|
||||||
path: str, lock: bool = True, server: Optional[ServerData] = None, log: bool = False
|
path: str, lock: bool = True, server: Optional[ServerData] = None, log: bool = False
|
||||||
) -> _Collection:
|
) -> _Collection:
|
||||||
"Open a new or existing collection. Path must be unicode."
|
"Open a new or existing collection. Path must be unicode."
|
||||||
backend = RustBackend(path)
|
backend = RustBackend(path, media_folder_from_col_path(path))
|
||||||
assert path.endswith(".anki2")
|
assert path.endswith(".anki2")
|
||||||
path = os.path.abspath(path)
|
path = os.path.abspath(path)
|
||||||
create = not os.path.exists(path)
|
create = not os.path.exists(path)
|
||||||
|
|
|
@ -17,10 +17,10 @@ def test_add():
|
||||||
assert d.media.addFile(path) == "foo.jpg"
|
assert d.media.addFile(path) == "foo.jpg"
|
||||||
# adding the same file again should not create a duplicate
|
# adding the same file again should not create a duplicate
|
||||||
assert d.media.addFile(path) == "foo.jpg"
|
assert d.media.addFile(path) == "foo.jpg"
|
||||||
# but if it has a different md5, it should
|
# but if it has a different sha1, it should
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
f.write("world")
|
f.write("world")
|
||||||
assert d.media.addFile(path) == "foo (1).jpg"
|
assert d.media.addFile(path) == "foo-7c211433f02071597741e6ff5a8ea34789abbf43.jpg"
|
||||||
|
|
||||||
|
|
||||||
def test_strings():
|
def test_strings():
|
||||||
|
|
|
@ -793,8 +793,11 @@ to a cloze type first, via Edit>Change Note Type."""
|
||||||
self.mw.progress.finish()
|
self.mw.progress.finish()
|
||||||
# strip off any query string
|
# strip off any query string
|
||||||
url = re.sub(r"\?.*?$", "", url)
|
url = re.sub(r"\?.*?$", "", url)
|
||||||
path = urllib.parse.unquote(url)
|
fname = os.path.basename(urllib.parse.unquote(url))
|
||||||
return self.mw.col.media.writeData(path, filecontents, typeHint=ct)
|
if ct:
|
||||||
|
fname = self.mw.col.media.add_extension_based_on_mime(fname, ct)
|
||||||
|
|
||||||
|
return self.mw.col.media.write_data(fname, filecontents)
|
||||||
|
|
||||||
# Paste/drag&drop
|
# Paste/drag&drop
|
||||||
######################################################################
|
######################################################################
|
||||||
|
|
|
@ -6,6 +6,7 @@ use crate::backend_proto::backend_input::Value;
|
||||||
use crate::backend_proto::RenderedTemplateReplacement;
|
use crate::backend_proto::RenderedTemplateReplacement;
|
||||||
use crate::cloze::expand_clozes_to_reveal_latex;
|
use crate::cloze::expand_clozes_to_reveal_latex;
|
||||||
use crate::err::{AnkiError, Result};
|
use crate::err::{AnkiError, Result};
|
||||||
|
use crate::media::add_data_to_folder_uniquely;
|
||||||
use crate::sched::{local_minutes_west_for_stamp, sched_timing_today};
|
use crate::sched::{local_minutes_west_for_stamp, sched_timing_today};
|
||||||
use crate::template::{
|
use crate::template::{
|
||||||
render_card, without_legacy_template_directives, FieldMap, FieldRequirements, ParsedTemplate,
|
render_card, without_legacy_template_directives, FieldMap, FieldRequirements, ParsedTemplate,
|
||||||
|
@ -18,7 +19,8 @@ use std::path::PathBuf;
|
||||||
|
|
||||||
pub struct Backend {
|
pub struct Backend {
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
path: PathBuf,
|
col_path: PathBuf,
|
||||||
|
media_folder: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert an Anki error to a protobuf error.
|
/// Convert an Anki error to a protobuf error.
|
||||||
|
@ -26,10 +28,11 @@ impl std::convert::From<AnkiError> for pt::BackendError {
|
||||||
fn from(err: AnkiError) -> Self {
|
fn from(err: AnkiError) -> Self {
|
||||||
use pt::backend_error::Value as V;
|
use pt::backend_error::Value as V;
|
||||||
let value = match err {
|
let value = match err {
|
||||||
AnkiError::InvalidInput { info } => V::InvalidInput(pt::InvalidInputError { info }),
|
AnkiError::InvalidInput { info } => V::InvalidInput(pt::StringError { info }),
|
||||||
AnkiError::TemplateError { info, q_side } => {
|
AnkiError::TemplateError { info, q_side } => {
|
||||||
V::TemplateParse(pt::TemplateParseError { info, q_side })
|
V::TemplateParse(pt::TemplateParseError { info, q_side })
|
||||||
}
|
},
|
||||||
|
AnkiError::IOError { info } => V::IoError(pt::StringError { info }),
|
||||||
};
|
};
|
||||||
|
|
||||||
pt::BackendError { value: Some(value) }
|
pt::BackendError { value: Some(value) }
|
||||||
|
@ -44,8 +47,11 @@ impl std::convert::From<AnkiError> for pt::backend_output::Value {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Backend {
|
impl Backend {
|
||||||
pub fn new<P: Into<PathBuf>>(path: P) -> Backend {
|
pub fn new<P: Into<PathBuf>>(col_path: P, media_folder: P) -> Backend {
|
||||||
Backend { path: path.into() }
|
Backend {
|
||||||
|
col_path: col_path.into(),
|
||||||
|
media_folder: media_folder.into(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decode a request, process it, and return the encoded result.
|
/// Decode a request, process it, and return the encoded result.
|
||||||
|
@ -107,6 +113,9 @@ impl Backend {
|
||||||
Value::ExpandClozesToRevealLatex(input) => {
|
Value::ExpandClozesToRevealLatex(input) => {
|
||||||
OValue::ExpandClozesToRevealLatex(expand_clozes_to_reveal_latex(&input))
|
OValue::ExpandClozesToRevealLatex(expand_clozes_to_reveal_latex(&input))
|
||||||
}
|
}
|
||||||
|
Value::AddFileToMediaFolder(input) => {
|
||||||
|
OValue::AddFileToMediaFolder(self.add_file_to_media_folder(input)?)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,6 +228,13 @@ impl Backend {
|
||||||
av_tags: pt_tags,
|
av_tags: pt_tags,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn add_file_to_media_folder(&self, input: pt::AddFileToMediaFolderIn) -> Result<String> {
|
||||||
|
Ok(
|
||||||
|
add_data_to_folder_uniquely(&self.media_folder, &input.desired_name, &input.data)?
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ords_hash_to_set(ords: HashSet<u16>) -> Vec<u32> {
|
fn ords_hash_to_set(ords: HashSet<u16>) -> Vec<u32> {
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
pub use failure::{Error, Fail};
|
pub use failure::{Error, Fail};
|
||||||
|
use std::io;
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, AnkiError>;
|
pub type Result<T> = std::result::Result<T, AnkiError>;
|
||||||
|
|
||||||
|
@ -12,6 +13,9 @@ pub enum AnkiError {
|
||||||
|
|
||||||
#[fail(display = "invalid card template: {}", info)]
|
#[fail(display = "invalid card template: {}", info)]
|
||||||
TemplateError { info: String, q_side: bool },
|
TemplateError { info: String, q_side: bool },
|
||||||
|
|
||||||
|
#[fail(display = "I/O error: {}", info)]
|
||||||
|
IOError { info: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
// error helpers
|
// error helpers
|
||||||
|
@ -34,3 +38,11 @@ pub enum TemplateError {
|
||||||
field: String,
|
field: String,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<io::Error> for AnkiError {
|
||||||
|
fn from(err: io::Error) -> Self {
|
||||||
|
AnkiError::IOError {
|
||||||
|
info: format!("{:?}", err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -16,10 +16,10 @@ fn buildhash() -> &'static str {
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
impl Backend {
|
impl Backend {
|
||||||
#[new]
|
#[new]
|
||||||
fn init(obj: &PyRawObject, path: String) {
|
fn init(obj: &PyRawObject, col_path: String, media_folder: String) {
|
||||||
obj.init({
|
obj.init({
|
||||||
Backend {
|
Backend {
|
||||||
backend: RustBackend::new(path),
|
backend: RustBackend::new(col_path, media_folder),
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue