mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 22:12:21 -04:00
Add missing close file descriptors using context managers
This commit is contained in:
parent
31ceb5d730
commit
efb62b9528
14 changed files with 67 additions and 29 deletions
|
@ -56,7 +56,7 @@ class TextImporter(NoteImporter):
|
||||||
log.append(_("Aborted: %s") % str(e))
|
log.append(_("Aborted: %s") % str(e))
|
||||||
self.log = log
|
self.log = log
|
||||||
self.ignored = ignored
|
self.ignored = ignored
|
||||||
self.fileobj.close()
|
self.close()
|
||||||
return notes
|
return notes
|
||||||
|
|
||||||
def open(self) -> None:
|
def open(self) -> None:
|
||||||
|
@ -134,6 +134,20 @@ class TextImporter(NoteImporter):
|
||||||
self.open()
|
self.open()
|
||||||
return self.numFields
|
return self.numFields
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if self.fileobj:
|
||||||
|
self.fileobj.close()
|
||||||
|
self.fileobj = None
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.close()
|
||||||
|
try:
|
||||||
|
super().__del__ # type: ignore
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
super().__del__(self) # type: ignore
|
||||||
|
|
||||||
def noteFromFields(self, fields: List[str]) -> ForeignNote:
|
def noteFromFields(self, fields: List[str]) -> ForeignNote:
|
||||||
note = ForeignNote()
|
note = ForeignNote()
|
||||||
note.fields.extend([x for x in fields])
|
note.fields.extend([x for x in fields])
|
||||||
|
|
|
@ -300,7 +300,8 @@ class SupermemoXmlImporter(NoteImporter):
|
||||||
|
|
||||||
# OPEN AND LOAD
|
# OPEN AND LOAD
|
||||||
def openAnything(self, source):
|
def openAnything(self, source):
|
||||||
"Open any source / actually only openig of files is used"
|
"""Open any source / actually only opening of files is used
|
||||||
|
@return an open handle which must be closed after use, i.e., handle.close()"""
|
||||||
|
|
||||||
if source == "-":
|
if source == "-":
|
||||||
return sys.stdin
|
return sys.stdin
|
||||||
|
|
|
@ -138,7 +138,8 @@ package in the LaTeX header instead."""
|
||||||
if call(latexCmd, stdout=log, stderr=log):
|
if call(latexCmd, stdout=log, stderr=log):
|
||||||
return _errMsg(latexCmd[0], texpath)
|
return _errMsg(latexCmd[0], texpath)
|
||||||
# add to media
|
# add to media
|
||||||
data = open(png_or_svg, "rb").read()
|
with open(png_or_svg, "rb") as file:
|
||||||
|
data = file.read()
|
||||||
col.media.write_data(extracted.filename, data)
|
col.media.write_data(extracted.filename, data)
|
||||||
os.unlink(png_or_svg)
|
os.unlink(png_or_svg)
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -639,7 +639,8 @@ class FullSyncer(HttpSyncer):
|
||||||
if cont == "upgradeRequired":
|
if cont == "upgradeRequired":
|
||||||
hooks.sync_stage_did_change("upgradeRequired")
|
hooks.sync_stage_did_change("upgradeRequired")
|
||||||
return None
|
return None
|
||||||
open(tpath, "wb").write(cont)
|
with open(tpath, "wb") as file:
|
||||||
|
file.write(cont)
|
||||||
# check the received file is ok
|
# check the received file is ok
|
||||||
d = DB(tpath)
|
d = DB(tpath)
|
||||||
assert d.scalar("pragma integrity_check") == "ok"
|
assert d.scalar("pragma integrity_check") == "ok"
|
||||||
|
@ -665,6 +666,7 @@ class FullSyncer(HttpSyncer):
|
||||||
return False
|
return False
|
||||||
# apply some adjustments, then upload
|
# apply some adjustments, then upload
|
||||||
self.col.beforeUpload()
|
self.col.beforeUpload()
|
||||||
if self.req("upload", open(self.col.path, "rb")) != b"OK":
|
with open(self.col.path, "rb") as file:
|
||||||
return False
|
if self.req("upload", file) != b"OK":
|
||||||
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -151,11 +151,13 @@ def test_export_textnote():
|
||||||
os.close(fd)
|
os.close(fd)
|
||||||
os.unlink(f)
|
os.unlink(f)
|
||||||
e.exportInto(f)
|
e.exportInto(f)
|
||||||
assert open(f).readline() == "foo\tbar<br>\ttag tag2\n"
|
with open(f) as file:
|
||||||
|
assert file.readline() == "foo\tbar<br>\ttag tag2\n"
|
||||||
e.includeTags = False
|
e.includeTags = False
|
||||||
e.includeHTML = False
|
e.includeHTML = False
|
||||||
e.exportInto(f)
|
e.exportInto(f)
|
||||||
assert open(f).readline() == "foo\tbar\n"
|
with open(f) as file:
|
||||||
|
assert file.readline() == "foo\tbar\n"
|
||||||
|
|
||||||
|
|
||||||
def test_exporters():
|
def test_exporters():
|
||||||
|
|
|
@ -154,11 +154,14 @@ def update_file(path: str, hooks: List[Hook]):
|
||||||
for hook in hooks:
|
for hook in hooks:
|
||||||
code += hook.code()
|
code += hook.code()
|
||||||
|
|
||||||
orig = open(path).read()
|
with open(path) as file:
|
||||||
|
orig = file.read()
|
||||||
|
|
||||||
new = re.sub(
|
new = re.sub(
|
||||||
"(?s)# @@AUTOGEN@@.*?# @@AUTOGEN@@\n",
|
"(?s)# @@AUTOGEN@@.*?# @@AUTOGEN@@\n",
|
||||||
f"# @@AUTOGEN@@\n\n{code}# @@AUTOGEN@@\n",
|
f"# @@AUTOGEN@@\n\n{code}# @@AUTOGEN@@\n",
|
||||||
orig,
|
orig,
|
||||||
)
|
)
|
||||||
|
|
||||||
open(path, "wb").write(new.encode("utf8"))
|
with open(path, "wb") as file:
|
||||||
|
file.write(new.encode("utf8"))
|
||||||
|
|
|
@ -808,7 +808,8 @@ to a cloze type first, via Edit>Change Note Type."""
|
||||||
req = urllib.request.Request(
|
req = urllib.request.Request(
|
||||||
url, None, {"User-Agent": "Mozilla/5.0 (compatible; Anki)"}
|
url, None, {"User-Agent": "Mozilla/5.0 (compatible; Anki)"}
|
||||||
)
|
)
|
||||||
filecontents = urllib.request.urlopen(req).read()
|
with urllib.request.urlopen(req) as response:
|
||||||
|
filecontents = response.read()
|
||||||
else:
|
else:
|
||||||
reqs = HttpClient()
|
reqs = HttpClient()
|
||||||
reqs.timeout = 30
|
reqs.timeout = 30
|
||||||
|
@ -1129,7 +1130,8 @@ class EditorWebView(AnkiWebView):
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return
|
return
|
||||||
|
|
||||||
data = open(path, "rb").read()
|
with open(path, "rb") as file:
|
||||||
|
data = file.read()
|
||||||
fname = self.editor._addPastedImage(data, ext)
|
fname = self.editor._addPastedImage(data, ext)
|
||||||
if fname:
|
if fname:
|
||||||
return self.editor.fnameToLink(fname)
|
return self.editor.fnameToLink(fname)
|
||||||
|
|
|
@ -505,7 +505,8 @@ def _replaceWithApkg(mw, filename, backup):
|
||||||
if os.path.exists(dest) and size == os.stat(dest).st_size:
|
if os.path.exists(dest) and size == os.stat(dest).st_size:
|
||||||
continue
|
continue
|
||||||
data = z.read(cStr)
|
data = z.read(cStr)
|
||||||
open(dest, "wb").write(data)
|
with open(dest, "wb") as file:
|
||||||
|
file.write(data)
|
||||||
|
|
||||||
z.close()
|
z.close()
|
||||||
|
|
||||||
|
|
|
@ -567,7 +567,8 @@ from the profile screen."
|
||||||
self.data = data
|
self.data = data
|
||||||
# create the file in calling thread to ensure the same
|
# create the file in calling thread to ensure the same
|
||||||
# file is not created twice
|
# file is not created twice
|
||||||
open(self.path, "wb").close()
|
with open(self.path, "wb") as file:
|
||||||
|
pass
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
z = zipfile.ZipFile(self.path, "w", zipfile.ZIP_DEFLATED)
|
z = zipfile.ZipFile(self.path, "w", zipfile.ZIP_DEFLATED)
|
||||||
|
|
|
@ -467,11 +467,12 @@ create table if not exists profiles
|
||||||
"Create a new profile if none exists."
|
"Create a new profile if none exists."
|
||||||
self.create(_("User 1"))
|
self.create(_("User 1"))
|
||||||
p = os.path.join(self.base, "README.txt")
|
p = os.path.join(self.base, "README.txt")
|
||||||
open(p, "w", encoding="utf8").write(
|
with open(p, "w", encoding="utf8") as file:
|
||||||
without_unicode_isolation(
|
file.write(
|
||||||
tr(TR.PROFILES_FOLDER_README, link=appHelpSite + "#startupopts")
|
without_unicode_isolation(
|
||||||
|
tr(TR.PROFILES_FOLDER_README, link=appHelpSite + "#startupopts")
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
# Default language
|
# Default language
|
||||||
######################################################################
|
######################################################################
|
||||||
|
@ -544,7 +545,8 @@ create table if not exists profiles
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return "software"
|
return "software"
|
||||||
|
|
||||||
mode = open(path, "r").read().strip()
|
with open(path, "r") as file:
|
||||||
|
mode = file.read().strip()
|
||||||
|
|
||||||
if mode == "angle" and isWin:
|
if mode == "angle" and isWin:
|
||||||
return mode
|
return mode
|
||||||
|
@ -553,7 +555,8 @@ create table if not exists profiles
|
||||||
return "auto"
|
return "auto"
|
||||||
|
|
||||||
def setGlMode(self, mode):
|
def setGlMode(self, mode):
|
||||||
open(self._glPath(), "w").write(mode)
|
with open(self._glPath(), "w") as file:
|
||||||
|
file.write(mode)
|
||||||
|
|
||||||
def nextGlMode(self):
|
def nextGlMode(self):
|
||||||
mode = self.glMode()
|
mode = self.glMode()
|
||||||
|
|
|
@ -40,13 +40,16 @@ class AnkiWebPage(QWebEnginePage): # type: ignore
|
||||||
self._channel.registerObject("py", self._bridge)
|
self._channel.registerObject("py", self._bridge)
|
||||||
self.setWebChannel(self._channel)
|
self.setWebChannel(self._channel)
|
||||||
|
|
||||||
js = QFile(":/qtwebchannel/qwebchannel.js")
|
qwebchannel = ":/qtwebchannel/qwebchannel.js"
|
||||||
assert js.open(QIODevice.ReadOnly)
|
jsfile = QFile(qwebchannel)
|
||||||
js = bytes(js.readAll()).decode("utf-8")
|
if not jsfile.open(QIODevice.ReadOnly):
|
||||||
|
print(f"Error opening '{qwebchannel}': {jsfile.error()}", file=sys.stderr)
|
||||||
|
jstext = bytes(jsfile.readAll()).decode("utf-8")
|
||||||
|
jsfile.close()
|
||||||
|
|
||||||
script = QWebEngineScript()
|
script = QWebEngineScript()
|
||||||
script.setSourceCode(
|
script.setSourceCode(
|
||||||
js
|
jstext
|
||||||
+ """
|
+ """
|
||||||
var pycmd;
|
var pycmd;
|
||||||
new QWebChannel(qt.webChannelTransport, function(channel) {
|
new QWebChannel(qt.webChannelTransport, function(channel) {
|
||||||
|
|
|
@ -38,7 +38,8 @@ def dupe_key(fname, old, new):
|
||||||
if not os.path.exists(fname):
|
if not os.path.exists(fname):
|
||||||
return
|
return
|
||||||
|
|
||||||
orig = open(fname).read()
|
with open(fname) as file:
|
||||||
|
orig = file.read()
|
||||||
obj = parse(orig)
|
obj = parse(orig)
|
||||||
for ent in obj.body:
|
for ent in obj.body:
|
||||||
if isinstance(ent, Junk):
|
if isinstance(ent, Junk):
|
||||||
|
@ -68,7 +69,8 @@ def dupe_key(fname, old, new):
|
||||||
raise Exception(f"introduced junk! {fname} {ent}")
|
raise Exception(f"introduced junk! {fname} {ent}")
|
||||||
|
|
||||||
# it's ok, write it out
|
# it's ok, write it out
|
||||||
open(fname, "w").write(modified)
|
with open(fname, "w") as file:
|
||||||
|
file.write(modified)
|
||||||
|
|
||||||
|
|
||||||
i18ndir = os.path.join(os.path.dirname(ftl_filename), "..")
|
i18ndir = os.path.join(os.path.dirname(ftl_filename), "..")
|
||||||
|
|
|
@ -107,7 +107,8 @@ def plural_text(key, lang, translation):
|
||||||
def add_simple_message(fname, key, message):
|
def add_simple_message(fname, key, message):
|
||||||
orig = ""
|
orig = ""
|
||||||
if os.path.exists(fname):
|
if os.path.exists(fname):
|
||||||
orig = open(fname).read()
|
with open(fname) as file:
|
||||||
|
orig = file.read()
|
||||||
|
|
||||||
obj = parse(orig)
|
obj = parse(orig)
|
||||||
for ent in obj.body:
|
for ent in obj.body:
|
||||||
|
@ -126,7 +127,8 @@ def add_simple_message(fname, key, message):
|
||||||
raise Exception(f"introduced junk! {fname} {ent}")
|
raise Exception(f"introduced junk! {fname} {ent}")
|
||||||
|
|
||||||
# it's ok, write it out
|
# it's ok, write it out
|
||||||
open(fname, "w").write(modified)
|
with open(fname, "w") as file:
|
||||||
|
file.write(modified)
|
||||||
|
|
||||||
|
|
||||||
def add_message(fname, key, translation):
|
def add_message(fname, key, translation):
|
||||||
|
|
|
@ -55,5 +55,6 @@ for lang in folders:
|
||||||
continue
|
continue
|
||||||
langs.setdefault(lang, {})[entry.msgid] = msgstr
|
langs.setdefault(lang, {})[entry.msgid] = msgstr
|
||||||
|
|
||||||
open("strings.json", "w").write(json.dumps(langs))
|
with open("strings.json", "w") as file:
|
||||||
|
file.write(json.dumps(langs))
|
||||||
print("wrote to strings.json")
|
print("wrote to strings.json")
|
||||||
|
|
Loading…
Reference in a new issue