rename deck.s to a more understable deck.db; keep s for compat

This commit is contained in:
Damien Elmes 2011-02-19 16:27:28 +09:00
parent b9cf5ad85d
commit f828393de3
14 changed files with 459 additions and 468 deletions

File diff suppressed because it is too large Load diff

View file

@ -49,10 +49,10 @@ class Exporter(object):
if self.limitCardIds: if self.limitCardIds:
return self.limitCardIds return self.limitCardIds
if not self.limitTags: if not self.limitTags:
cards = self.deck.s.column0("select id from cards") cards = self.deck.db.column0("select id from cards")
else: else:
d = tagIds(self.deck.s, self.limitTags, create=False) d = tagIds(self.deck.db, self.limitTags, create=False)
cards = self.deck.s.column0( cards = self.deck.db.column0(
"select cardId from cardTags where tagid in %s" % "select cardId from cardTags where tagid in %s" %
ids2str(d.values())) ids2str(d.values()))
self.count = len(cards) self.count = len(cards)
@ -84,7 +84,7 @@ class AnkiExporter(Exporter):
client.setServer(server) client.setServer(server)
client.localTime = self.deck.modified client.localTime = self.deck.modified
client.remoteTime = 0 client.remoteTime = 0
self.deck.s.flush() self.deck.db.flush()
# set up a custom change list and sync # set up a custom change list and sync
lsum = self.localSummary() lsum = self.localSummary()
rsum = server.summary(0) rsum = server.summary(0)
@ -94,9 +94,9 @@ class AnkiExporter(Exporter):
res = server.applyPayload(payload) res = server.applyPayload(payload)
if not self.includeSchedulingInfo: if not self.includeSchedulingInfo:
self.deck.updateProgress() self.deck.updateProgress()
self.newDeck.s.statement(""" self.newDeck.db.statement("""
delete from revlog""") delete from revlog""")
self.newDeck.s.statement(""" self.newDeck.db.statement("""
update cards set update cards set
interval = 0, interval = 0,
lastInterval = 0, lastInterval = 0,
@ -134,25 +134,25 @@ modified = :now
self.newDeck.rebuildCounts() self.newDeck.rebuildCounts()
self.exportedCards = self.newDeck.cardCount self.exportedCards = self.newDeck.cardCount
self.newDeck.utcOffset = -1 self.newDeck.utcOffset = -1
self.newDeck.s.commit() self.newDeck.db.commit()
self.newDeck.close() self.newDeck.close()
self.deck.finishProgress() self.deck.finishProgress()
def localSummary(self): def localSummary(self):
cardIds = self.cardIds() cardIds = self.cardIds()
cStrIds = ids2str(cardIds) cStrIds = ids2str(cardIds)
cards = self.deck.s.all(""" cards = self.deck.db.all("""
select id, modified from cards select id, modified from cards
where id in %s""" % cStrIds) where id in %s""" % cStrIds)
facts = self.deck.s.all(""" facts = self.deck.db.all("""
select facts.id, facts.modified from cards, facts where select facts.id, facts.modified from cards, facts where
facts.id = cards.factId and facts.id = cards.factId and
cards.id in %s""" % cStrIds) cards.id in %s""" % cStrIds)
models = self.deck.s.all(""" models = self.deck.db.all("""
select models.id, models.modified from models, facts where select models.id, models.modified from models, facts where
facts.modelId = models.id and facts.modelId = models.id and
facts.id in %s""" % ids2str([f[0] for f in facts])) facts.id in %s""" % ids2str([f[0] for f in facts]))
media = self.deck.s.all(""" media = self.deck.db.all("""
select id, created from media""") select id, created from media""")
return { return {
# cards # cards
@ -183,13 +183,13 @@ class TextCardExporter(Exporter):
strids = ids2str(ids) strids = ids2str(ids)
self.deck.startProgress((len(ids) + 1) / 50) self.deck.startProgress((len(ids) + 1) / 50)
self.deck.updateProgress(_("Exporting...")) self.deck.updateProgress(_("Exporting..."))
cards = self.deck.s.all(""" cards = self.deck.db.all("""
select cards.question, cards.answer, cards.id from cards select cards.question, cards.answer, cards.id from cards
where cards.id in %s where cards.id in %s
order by cards.created""" % strids) order by cards.created""" % strids)
self.deck.updateProgress() self.deck.updateProgress()
if self.includeTags: if self.includeTags:
self.cardTags = dict(self.deck.s.all(""" self.cardTags = dict(self.deck.db.all("""
select cards.id, facts.tags from cards, facts select cards.id, facts.tags from cards, facts
where cards.factId = facts.id where cards.factId = facts.id
and cards.id in %s and cards.id in %s
@ -222,7 +222,7 @@ class TextFactExporter(Exporter):
cardIds = self.cardIds() cardIds = self.cardIds()
self.deck.startProgress() self.deck.startProgress()
self.deck.updateProgress(_("Exporting...")) self.deck.updateProgress(_("Exporting..."))
facts = self.deck.s.all(""" facts = self.deck.db.all("""
select factId, value, facts.created from facts, fields select factId, value, facts.created from facts, fields
where where
facts.id in facts.id in
@ -233,7 +233,7 @@ order by factId, ordinal""" % ids2str(cardIds))
txt = "" txt = ""
self.deck.updateProgress() self.deck.updateProgress()
if self.includeTags: if self.includeTags:
self.factTags = dict(self.deck.s.all( self.factTags = dict(self.deck.db.all(
"select id, tags from facts where id in %s" % "select id, tags from facts where id in %s" %
ids2str([fact[0] for fact in facts]))) ids2str([fact[0] for fact in facts])))
groups = itertools.groupby(facts, itemgetter(0)) groups = itertools.groupby(facts, itemgetter(0))

View file

@ -75,8 +75,8 @@ from cards c where relativeDelay = 1 and type >= 0 and interval > 21"""
young) young)
mature = self.deck._cardLimit("revActive", "revInactive", mature = self.deck._cardLimit("revActive", "revInactive",
mature) mature)
young = self.deck.s.all(young) young = self.deck.db.all(young)
mature = self.deck.s.all(mature) mature = self.deck.db.all(mature)
for (src, dest) in [(young, daysYoung), for (src, dest) in [(young, daysYoung),
(mature, daysMature)]: (mature, daysMature)]:
for (interval, due) in src: for (interval, due) in src:
@ -111,7 +111,7 @@ from cards c where relativeDelay = 1 and type >= 0 and interval > 21"""
*(int(x)for x in dr["day"].split("-")))).days, dr["reviewTime"]/60.0), dayReps)) *(int(x)for x in dr["day"].split("-")))).days, dr["reviewTime"]/60.0), dayReps))
def getDayReps(self): def getDayReps(self):
return self.deck.s.all(""" return self.deck.db.all("""
select select
count() as combinedNewReps, count() as combinedNewReps,
date(time-:off, "unixepoch") as day, date(time-:off, "unixepoch") as day,
@ -238,7 +238,7 @@ group by day order by day
days = {} days = {}
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi) fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
limit = self.endOfDay - (numdays) * 86400 limit = self.endOfDay - (numdays) * 86400
res = self.deck.s.column0("select %s from cards where %s >= %f" % res = self.deck.db.column0("select %s from cards where %s >= %f" %
(attr, attr, limit)) (attr, attr, limit))
for r in res: for r in res:
d = int((r - self.endOfDay) / 86400.0) d = int((r - self.endOfDay) / 86400.0)
@ -361,7 +361,7 @@ group by day order by day
arr = [0] * arrsize arr = [0] * arrsize
colours = [easesNewC, easesYoungC, easesMatureC] colours = [easesNewC, easesYoungC, easesMatureC]
bars = [] bars = []
eases = self.deck.s.all(""" eases = self.deck.db.all("""
select (case when rep = 1 then 0 when lastInterval <= 21 then 1 else 2 end) select (case when rep = 1 then 0 when lastInterval <= 21 then 1 else 2 end)
as type, ease, count() from revlog group by type, ease""") as type, ease, count() from revlog group by type, ease""")
d = {} d = {}

View file

@ -76,7 +76,7 @@ class Importer(object):
cards = self.foreignCards() cards = self.foreignCards()
# grab data from db # grab data from db
self.deck.updateProgress() self.deck.updateProgress()
fields = self.deck.s.all(""" fields = self.deck.db.all("""
select factId, value from fields where fieldModelId = :id select factId, value from fields where fieldModelId = :id
and value != ''""", and value != ''""",
id=self.updateKey[1]) id=self.updateKey[1])
@ -123,7 +123,7 @@ and value != ''""",
'v': c.fields[index], 'v': c.fields[index],
'chk': self.maybeChecksum(c.fields[index], fm.unique)} 'chk': self.maybeChecksum(c.fields[index], fm.unique)}
for (fid, c) in upcards] for (fid, c) in upcards]
self.deck.s.execute(""" self.deck.db.execute("""
update fields set value = :v, chksum = :chk where factId = :fid update fields set value = :v, chksum = :chk where factId = :fid
and fieldModelId = :fmid""", data) and fieldModelId = :fmid""", data)
# update tags # update tags
@ -132,12 +132,12 @@ and fieldModelId = :fmid""", data)
data = [{'fid': fid, data = [{'fid': fid,
't': c.fields[tagsIdx]} 't': c.fields[tagsIdx]}
for (fid, c) in upcards] for (fid, c) in upcards]
self.deck.s.execute( self.deck.db.execute(
"update facts set tags = :t where id = :fid", "update facts set tags = :t where id = :fid",
data) data)
# rebuild caches # rebuild caches
self.deck.updateProgress() self.deck.updateProgress()
cids = self.deck.s.column0( cids = self.deck.db.column0(
"select id from cards where factId in %s" % "select id from cards where factId in %s" %
ids2str(fids)) ids2str(fids))
self.deck.updateCardTags(cids) self.deck.updateCardTags(cids)
@ -238,12 +238,12 @@ The current importer only supports a single active card template. Please disable
d['created'] = tmp[0] d['created'] = tmp[0]
factCreated[d['id']] = d['created'] factCreated[d['id']] = d['created']
return d return d
self.deck.s.execute(factsTable.insert(), self.deck.db.execute(factsTable.insert(),
[fudgeCreated({'modelId': self.model.id, [fudgeCreated({'modelId': self.model.id,
'tags': canonifyTags(self.tagsToAdd + " " + cards[n].tags), 'tags': canonifyTags(self.tagsToAdd + " " + cards[n].tags),
'id': factIds[n]}) for n in range(len(cards))]) 'id': factIds[n]}) for n in range(len(cards))])
self.deck.factCount += len(factIds) self.deck.factCount += len(factIds)
self.deck.s.execute(""" self.deck.db.execute("""
delete from factsDeleted delete from factsDeleted
where factId in (%s)""" % ",".join([str(s) for s in factIds])) where factId in (%s)""" % ",".join([str(s) for s in factIds]))
# add all the fields # add all the fields
@ -264,7 +264,7 @@ where factId in (%s)""" % ",".join([str(s) for s in factIds]))
cards[m].fields[index] or u"", fm.unique) cards[m].fields[index] or u"", fm.unique)
} }
for m in range(len(cards))] for m in range(len(cards))]
self.deck.s.execute(fieldsTable.insert(), self.deck.db.execute(fieldsTable.insert(),
data) data)
# and cards # and cards
self.deck.updateProgress() self.deck.updateProgress()
@ -281,7 +281,7 @@ where factId in (%s)""" % ",".join([str(s) for s in factIds]))
'question': u"", 'question': u"",
'answer': u"" 'answer': u""
},cards[m]) for m in range(len(cards))] },cards[m]) for m in range(len(cards))]
self.deck.s.execute(cardsTable.insert(), self.deck.db.execute(cardsTable.insert(),
data) data)
self.deck.updateProgress() self.deck.updateProgress()
self.deck.updateCardsFromFactIds(factIds) self.deck.updateCardsFromFactIds(factIds)
@ -334,7 +334,7 @@ where factId in (%s)""" % ",".join([str(s) for s in factIds]))
def getUniqueCache(self, field): def getUniqueCache(self, field):
"Return a dict with all fields, to test for uniqueness." "Return a dict with all fields, to test for uniqueness."
return dict(self.deck.s.all( return dict(self.deck.db.all(
"select value, 1 from fields where fieldModelId = :fmid", "select value, 1 from fields where fieldModelId = :fmid",
fmid=field.id)) fmid=field.id))

View file

@ -33,7 +33,7 @@ class Anki10Importer(Importer):
src.s.execute("update models set modified = 1") src.s.execute("update models set modified = 1")
src.s.execute("update cards set modified = 1") src.s.execute("update cards set modified = 1")
src.s.execute("update media set created = 1") src.s.execute("update media set created = 1")
self.deck.s.flush() self.deck.db.flush()
# set up a custom change list and sync # set up a custom change list and sync
lsum = client.summary(0) lsum = client.summary(0)
self._clearDeleted(lsum) self._clearDeleted(lsum)
@ -57,13 +57,13 @@ class Anki10Importer(Importer):
fids = [f[0] for f in res['added-facts']['facts']] fids = [f[0] for f in res['added-facts']['facts']]
self.deck.addTags(fids, self.tagsToAdd) self.deck.addTags(fids, self.tagsToAdd)
# mark import material as newly added # mark import material as newly added
self.deck.s.statement( self.deck.db.statement(
"update cards set modified = :t where id in %s" % "update cards set modified = :t where id in %s" %
ids2str([x[0] for x in res['added-cards']]), t=time.time()) ids2str([x[0] for x in res['added-cards']]), t=time.time())
self.deck.s.statement( self.deck.db.statement(
"update facts set modified = :t where id in %s" % "update facts set modified = :t where id in %s" %
ids2str([x[0] for x in res['added-facts']['facts']]), t=time.time()) ids2str([x[0] for x in res['added-facts']['facts']]), t=time.time())
self.deck.s.statement( self.deck.db.statement(
"update models set modified = :t where id in %s" % "update models set modified = :t where id in %s" %
ids2str([x['id'] for x in res['added-models']]), t=time.time()) ids2str([x['id'] for x in res['added-models']]), t=time.time())
# update total and refresh # update total and refresh

View file

@ -218,7 +218,7 @@ if __name__ == '__main__':
i = DingsBumsImporter(mydeck, filename) i = DingsBumsImporter(mydeck, filename)
i.doImport() i.doImport()
assert 7 == i.total assert 7 == i.total
mydeck.s.close() mydeck.db.close()
print "... Finished" print "... Finished"
sys.exit(1) sys.exit(1)

View file

@ -50,7 +50,7 @@ If a file with the same md5sum exists in the DB, return that.
If a file with the same name exists, return a unique name. If a file with the same name exists, return a unique name.
This does not modify the media table.""" This does not modify the media table."""
# see if have duplicate contents # see if have duplicate contents
newpath = deck.s.scalar( newpath = deck.db.scalar(
"select filename from media where originalPath = :cs", "select filename from media where originalPath = :cs",
cs=checksum(open(path, "rb").read())) cs=checksum(open(path, "rb").read()))
# check if this filename already exists # check if this filename already exists
@ -85,9 +85,9 @@ def uniquePath(dir, base):
def updateMediaCount(deck, file, count=1): def updateMediaCount(deck, file, count=1):
mdir = deck.mediaDir() mdir = deck.mediaDir()
if deck.s.scalar( if deck.db.scalar(
"select 1 from media where filename = :file", file=file): "select 1 from media where filename = :file", file=file):
deck.s.statement( deck.db.statement(
"update media set size = size + :c, created = :t where filename = :file", "update media set size = size + :c, created = :t where filename = :file",
file=file, c=count, t=time.time()) file=file, c=count, t=time.time())
elif count > 0: elif count > 0:
@ -96,18 +96,18 @@ def updateMediaCount(deck, file, count=1):
checksum(open(os.path.join(mdir, file), "rb").read())) checksum(open(os.path.join(mdir, file), "rb").read()))
except: except:
sum = u"" sum = u""
deck.s.statement(""" deck.db.statement("""
insert into media (id, filename, size, created, originalPath, description) insert into media (id, filename, size, created, originalPath, description)
values (:id, :file, :c, :mod, :sum, '')""", values (:id, :file, :c, :mod, :sum, '')""",
id=genID(), file=file, c=count, mod=time.time(), id=genID(), file=file, c=count, mod=time.time(),
sum=sum) sum=sum)
def removeUnusedMedia(deck): def removeUnusedMedia(deck):
ids = deck.s.column0("select id from media where size = 0") ids = deck.db.column0("select id from media where size = 0")
for id in ids: for id in ids:
deck.s.statement("insert into mediaDeleted values (:id, :t)", deck.db.statement("insert into mediaDeleted values (:id, :t)",
id=id, t=time.time()) id=id, t=time.time())
deck.s.statement("delete from media where size = 0") deck.db.statement("delete from media where size = 0")
# String manipulation # String manipulation
########################################################################## ##########################################################################
@ -147,7 +147,7 @@ def rebuildMediaDir(deck, delete=False, dirty=True):
return (0, 0) return (0, 0)
deck.startProgress(title=_("Check Media DB")) deck.startProgress(title=_("Check Media DB"))
# set all ref counts to 0 # set all ref counts to 0
deck.s.statement("update media set size = 0") deck.db.statement("update media set size = 0")
# look through cards for media references # look through cards for media references
refs = {} refs = {}
normrefs = {} normrefs = {}
@ -155,7 +155,7 @@ def rebuildMediaDir(deck, delete=False, dirty=True):
if isinstance(s, unicode): if isinstance(s, unicode):
return unicodedata.normalize('NFD', s) return unicodedata.normalize('NFD', s)
return s return s
for (question, answer) in deck.s.all( for (question, answer) in deck.db.all(
"select question, answer from cards"): "select question, answer from cards"):
for txt in (question, answer): for txt in (question, answer):
for f in mediaFiles(txt): for f in mediaFiles(txt):
@ -186,7 +186,7 @@ def rebuildMediaDir(deck, delete=False, dirty=True):
removeUnusedMedia(deck) removeUnusedMedia(deck)
# check md5s are up to date # check md5s are up to date
update = [] update = []
for (file, created, md5) in deck.s.all( for (file, created, md5) in deck.db.all(
"select filename, created, originalPath from media"): "select filename, created, originalPath from media"):
path = os.path.join(mdir, file) path = os.path.join(mdir, file)
if not os.path.exists(path): if not os.path.exists(path):
@ -198,13 +198,13 @@ def rebuildMediaDir(deck, delete=False, dirty=True):
if md5 != sum: if md5 != sum:
update.append({'f':file, 'sum':sum, 'c':time.time()}) update.append({'f':file, 'sum':sum, 'c':time.time()})
if update: if update:
deck.s.statements(""" deck.db.statements("""
update media set originalPath = :sum, created = :c where filename = :f""", update media set originalPath = :sum, created = :c where filename = :f""",
update) update)
# update deck and get return info # update deck and get return info
if dirty: if dirty:
deck.flushMod() deck.flushMod()
nohave = deck.s.column0("select filename from media where originalPath = ''") nohave = deck.db.column0("select filename from media where originalPath = ''")
deck.finishProgress() deck.finishProgress()
return (nohave, unused) return (nohave, unused)
@ -219,7 +219,7 @@ def downloadMissing(deck):
deck.startProgress() deck.startProgress()
missing = 0 missing = 0
grabbed = 0 grabbed = 0
for c, (f, sum) in enumerate(deck.s.all( for c, (f, sum) in enumerate(deck.db.all(
"select filename, originalPath from media")): "select filename, originalPath from media")):
path = os.path.join(mdir, f) path = os.path.join(mdir, f)
if not os.path.exists(path): if not os.path.exists(path):
@ -247,7 +247,7 @@ def downloadRemote(deck):
mdir = deck.mediaDir(create=True) mdir = deck.mediaDir(create=True)
refs = {} refs = {}
deck.startProgress() deck.startProgress()
for (question, answer) in deck.s.all( for (question, answer) in deck.db.all(
"select question, answer from cards"): "select question, answer from cards"):
for txt in (question, answer): for txt in (question, answer):
for f in mediaFiles(txt, remote=True): for f in mediaFiles(txt, remote=True):
@ -267,7 +267,7 @@ def downloadRemote(deck):
failed.append(link) failed.append(link)
deck.updateProgress(label=_("Download %d...") % c) deck.updateProgress(label=_("Download %d...") % c)
for (url, name) in passed: for (url, name) in passed:
deck.s.statement( deck.db.statement(
"update fields set value = replace(value, :url, :name)", "update fields set value = replace(value, :url, :name)",
url=url, name=name) url=url, name=name)
deck.updateProgress(label=_("Updating references...")) deck.updateProgress(label=_("Updating references..."))

View file

@ -239,8 +239,8 @@ class DeckStats(object):
if not test: if not test:
test = "lastInterval > 21" test = "lastInterval > 21"
head = "select count() from revlog where %s" head = "select count() from revlog where %s"
all = self.deck.s.scalar(head % test) all = self.deck.db.scalar(head % test)
yes = self.deck.s.scalar((head % test) + " and ease > 1") yes = self.deck.db.scalar((head % test) + " and ease > 1")
return (all, yes, yes/float(all)*100) return (all, yes, yes/float(all)*100)
def getYoungCorrect(self): def getYoungCorrect(self):
@ -253,7 +253,7 @@ class DeckStats(object):
today = self.deck.failedCutoff today = self.deck.failedCutoff
x = today + 86400*start x = today + 86400*start
y = today + 86400*finish y = today + 86400*finish
return self.deck.s.scalar(""" return self.deck.db.scalar("""
select count(distinct(cast((time-:off)/86400 as integer))) from revlog select count(distinct(cast((time-:off)/86400 as integer))) from revlog
where time >= :x and time <= :y""",x=x,y=y, off=self.deck.utcOffset) where time >= :x and time <= :y""",x=x,y=y, off=self.deck.utcOffset)
@ -261,12 +261,12 @@ where time >= :x and time <= :y""",x=x,y=y, off=self.deck.utcOffset)
now = datetime.datetime.today() now = datetime.datetime.today()
x = time.mktime((now + datetime.timedelta(start)).timetuple()) x = time.mktime((now + datetime.timedelta(start)).timetuple())
y = time.mktime((now + datetime.timedelta(finish)).timetuple()) y = time.mktime((now + datetime.timedelta(finish)).timetuple())
return self.deck.s.scalar( return self.deck.db.scalar(
"select count() from revlog where time >= :x and time <= :y", "select count() from revlog where time >= :x and time <= :y",
x=x, y=y) x=x, y=y)
def getAverageInterval(self): def getAverageInterval(self):
return self.deck.s.scalar( return self.deck.db.scalar(
"select sum(interval) / count(interval) from cards " "select sum(interval) / count(interval) from cards "
"where cards.reps > 0") or 0 "where cards.reps > 0") or 0
@ -305,32 +305,32 @@ where time >= :x and time <= :y""",x=x,y=y, off=self.deck.utcOffset)
return (time.time() - self.deck.created) / 86400.0 return (time.time() - self.deck.created) / 86400.0
def getSumInverseRoundInterval(self): def getSumInverseRoundInterval(self):
return self.deck.s.scalar( return self.deck.db.scalar(
"select sum(1/round(max(interval, 1)+0.5)) from cards " "select sum(1/round(max(interval, 1)+0.5)) from cards "
"where cards.reps > 0 " "where cards.reps > 0 "
"and type >= 0") or 0 "and type >= 0") or 0
def getWorkloadPeriod(self, period): def getWorkloadPeriod(self, period):
cutoff = time.time() + 86400 * period cutoff = time.time() + 86400 * period
return (self.deck.s.scalar(""" return (self.deck.db.scalar("""
select count(id) from cards select count(id) from cards
where combinedDue < :cutoff where combinedDue < :cutoff
and type >= 0 and relativeDelay in (0,1)""", cutoff=cutoff) or 0) / float(period) and type >= 0 and relativeDelay in (0,1)""", cutoff=cutoff) or 0) / float(period)
def getPastWorkloadPeriod(self, period): def getPastWorkloadPeriod(self, period):
cutoff = time.time() - 86400 * period cutoff = time.time() - 86400 * period
return (self.deck.s.scalar(""" return (self.deck.db.scalar("""
select count(*) from revlog select count(*) from revlog
where time > :cutoff""", cutoff=cutoff) or 0) / float(period) where time > :cutoff""", cutoff=cutoff) or 0) / float(period)
def getNewPeriod(self, period): def getNewPeriod(self, period):
cutoff = time.time() - 86400 * period cutoff = time.time() - 86400 * period
return (self.deck.s.scalar(""" return (self.deck.db.scalar("""
select count(id) from cards select count(id) from cards
where created > :cutoff""", cutoff=cutoff) or 0) where created > :cutoff""", cutoff=cutoff) or 0)
def getFirstPeriod(self, period): def getFirstPeriod(self, period):
cutoff = time.time() - 86400 * period cutoff = time.time() - 86400 * period
return (self.deck.s.scalar(""" return (self.deck.db.scalar("""
select count(*) from revlog select count(*) from revlog
where rep = 1 and time > :cutoff""", cutoff=cutoff) or 0) where rep = 1 and time > :cutoff""", cutoff=cutoff) or 0)

View file

@ -175,14 +175,14 @@ class SyncTools(object):
"Facts missing after sync. Please run Tools>Advanced>Check DB.") "Facts missing after sync. Please run Tools>Advanced>Check DB.")
def missingFacts(self): def missingFacts(self):
return self.deck.s.scalar( return self.deck.db.scalar(
"select count() from cards where factId "+ "select count() from cards where factId "+
"not in (select id from facts)"); "not in (select id from facts)");
def postSyncRefresh(self): def postSyncRefresh(self):
"Flush changes to DB, and reload object associations." "Flush changes to DB, and reload object associations."
self.deck.s.flush() self.deck.db.flush()
self.deck.s.refresh(self.deck) self.deck.db.refresh(self.deck)
self.deck.currentModel self.deck.currentModel
# Summaries # Summaries
@ -225,7 +225,7 @@ class SyncTools(object):
for (key, sql) in cats: for (key, sql) in cats:
if self.fullThreshold: if self.fullThreshold:
sql += " limit %d" % self.fullThreshold sql += " limit %d" % self.fullThreshold
ret = self.deck.s.all(sql, m=lastSync) ret = self.deck.db.all(sql, m=lastSync)
if self.fullThreshold and self.fullThreshold == len(ret): if self.fullThreshold and self.fullThreshold == len(ret):
# theshold exceeded, abort early # theshold exceeded, abort early
return None return None
@ -303,7 +303,7 @@ class SyncTools(object):
def bundleModel(self, id, updateModified): def bundleModel(self, id, updateModified):
"Return a model representation suitable for transport." "Return a model representation suitable for transport."
mod = self.deck.s.query(Model).get(id) mod = self.deck.db.query(Model).get(id)
# force load of lazy attributes # force load of lazy attributes
mod.fieldModels; mod.cardModels mod.fieldModels; mod.cardModels
m = self.dictFromObj(mod) m = self.dictFromObj(mod)
@ -332,7 +332,7 @@ class SyncTools(object):
self.applyDict(local, model) self.applyDict(local, model)
self.mergeFieldModels(local, fms) self.mergeFieldModels(local, fms)
self.mergeCardModels(local, cms) self.mergeCardModels(local, cms)
self.deck.s.statement( self.deck.db.statement(
"delete from modelsDeleted where modelId in %s" % "delete from modelsDeleted where modelId in %s" %
ids2str([m['id'] for m in models])) ids2str([m['id'] for m in models]))
@ -404,10 +404,10 @@ class SyncTools(object):
modified = "modified" modified = "modified"
factIds = ids2str(ids) factIds = ids2str(ids)
return { return {
'facts': self.realLists(self.deck.s.all(""" 'facts': self.realLists(self.deck.db.all("""
select id, modelId, created, %s, tags, spaceUntil, lastCardId from facts select id, modelId, created, %s, tags, spaceUntil, lastCardId from facts
where id in %s""" % (modified, factIds))), where id in %s""" % (modified, factIds))),
'fields': self.realLists(self.deck.s.all(""" 'fields': self.realLists(self.deck.db.all("""
select id, factId, fieldModelId, ordinal, value, chksum from fields select id, factId, fieldModelId, ordinal, value, chksum from fields
where factId in %s""" % factIds)) where factId in %s""" % factIds))
} }
@ -427,7 +427,7 @@ where factId in %s""" % factIds))
'spaceUntil': f[5] or "", 'spaceUntil': f[5] or "",
'lastCardId': f[6] 'lastCardId': f[6]
} for f in facts] } for f in facts]
self.deck.s.execute(""" self.deck.db.execute("""
insert or replace into facts insert or replace into facts
(id, modelId, created, modified, tags, spaceUntil, lastCardId) (id, modelId, created, modified, tags, spaceUntil, lastCardId)
values values
@ -446,16 +446,16 @@ values
'chksum': f[5] 'chksum': f[5]
} for f in fields] } for f in fields]
# delete local fields since ids may have changed # delete local fields since ids may have changed
self.deck.s.execute( self.deck.db.execute(
"delete from fields where factId in %s" % "delete from fields where factId in %s" %
ids2str([f[0] for f in facts])) ids2str([f[0] for f in facts]))
# then update # then update
self.deck.s.execute(""" self.deck.db.execute("""
insert into fields insert into fields
(id, factId, fieldModelId, ordinal, value, chksum) (id, factId, fieldModelId, ordinal, value, chksum)
values values
(:id, :factId, :fieldModelId, :ordinal, :value, :chksum)""", dlist) (:id, :factId, :fieldModelId, :ordinal, :value, :chksum)""", dlist)
self.deck.s.statement( self.deck.db.statement(
"delete from factsDeleted where factId in %s" % "delete from factsDeleted where factId in %s" %
ids2str([f[0] for f in facts])) ids2str([f[0] for f in facts]))
@ -466,7 +466,7 @@ values
########################################################################## ##########################################################################
def getCards(self, ids): def getCards(self, ids):
return self.realLists(self.deck.s.all(""" return self.realLists(self.deck.db.all("""
select id, factId, cardModelId, created, modified, tags, ordinal, select id, factId, cardModelId, created, modified, tags, ordinal,
priority, interval, lastInterval, due, lastDue, factor, priority, interval, lastInterval, due, lastDue, factor,
firstAnswered, reps, successive, averageTime, reviewTime, youngEase0, firstAnswered, reps, successive, averageTime, reviewTime, youngEase0,
@ -516,7 +516,7 @@ from cards where id in %s""" % ids2str(ids)))
'combinedDue': c[35], 'combinedDue': c[35],
'rd': c[36], 'rd': c[36],
} for c in cards] } for c in cards]
self.deck.s.execute(""" self.deck.db.execute("""
insert or replace into cards insert or replace into cards
(id, factId, cardModelId, created, modified, tags, ordinal, (id, factId, cardModelId, created, modified, tags, ordinal,
priority, interval, lastInterval, due, lastDue, factor, priority, interval, lastInterval, due, lastDue, factor,
@ -533,7 +533,7 @@ values
:matureEase1, :matureEase2, :matureEase3, :matureEase4, :yesCount, :matureEase1, :matureEase2, :matureEase3, :matureEase4, :yesCount,
:noCount, :question, :answer, :lastFactor, :spaceUntil, :noCount, :question, :answer, :lastFactor, :spaceUntil,
:type, :combinedDue, :rd, 0)""", dlist) :type, :combinedDue, :rd, 0)""", dlist)
self.deck.s.statement( self.deck.db.statement(
"delete from cardsDeleted where cardId in %s" % "delete from cardsDeleted where cardId in %s" %
ids2str([c[0] for c in cards])) ids2str([c[0] for c in cards]))
@ -551,39 +551,30 @@ values
# and ensure lastSync is greater than modified # and ensure lastSync is greater than modified
self.deck.lastSync = max(time.time(), self.deck.modified+1) self.deck.lastSync = max(time.time(), self.deck.modified+1)
d = self.dictFromObj(self.deck) d = self.dictFromObj(self.deck)
del d['Session'] for bad in ("Session", "engine", "s", "db", "path", "syncName",
del d['engine'] "version", "newQueue", "failedQueue", "revQueue",
del d['s'] "css", "models", "currentModel"):
del d['path'] if bad in d:
del d['syncName'] del d[bad]
del d['version']
if 'newQueue' in d:
del d['newQueue']
del d['failedQueue']
del d['revQueue']
# these may be deleted before bundling
if 'css' in d: del d['css']
if 'models' in d: del d['models']
if 'currentModel' in d: del d['currentModel']
keys = d.keys() keys = d.keys()
for k in keys: for k in keys:
if isinstance(d[k], types.MethodType): if isinstance(d[k], types.MethodType):
del d[k] del d[k]
d['meta'] = self.realLists(self.deck.s.all("select * from deckVars")) d['meta'] = self.realLists(self.deck.db.all("select * from deckVars"))
return d return d
def updateDeck(self, deck): def updateDeck(self, deck):
if 'meta' in deck: if 'meta' in deck:
meta = deck['meta'] meta = deck['meta']
for (k,v) in meta: for (k,v) in meta:
self.deck.s.statement(""" self.deck.db.statement("""
insert or replace into deckVars insert or replace into deckVars
(key, value) values (:k, :v)""", k=k, v=v) (key, value) values (:k, :v)""", k=k, v=v)
del deck['meta'] del deck['meta']
self.applyDict(self.deck, deck) self.applyDict(self.deck, deck)
def bundleHistory(self): def bundleHistory(self):
return self.realLists(self.deck.s.all(""" return self.realLists(self.deck.db.all("""
select * from revlog where time > :ls""", select * from revlog where time > :ls""",
ls=self.deck.lastSync)) ls=self.deck.lastSync))
@ -599,18 +590,18 @@ select * from revlog where time > :ls""",
'flags': h[8]} for h in history] 'flags': h[8]} for h in history]
if not dlist: if not dlist:
return return
self.deck.s.statements(""" self.deck.db.statements("""
insert or ignore into revlog values insert or ignore into revlog values
(:time, :cardId, :ease, :rep, :lastInterval, :interval, :factor, (:time, :cardId, :ease, :rep, :lastInterval, :interval, :factor,
:userTime, :flags)""", :userTime, :flags)""",
dlist) dlist)
def bundleSources(self): def bundleSources(self):
return self.realLists(self.deck.s.all("select * from sources")) return self.realLists(self.deck.db.all("select * from sources"))
def updateSources(self, sources): def updateSources(self, sources):
for s in sources: for s in sources:
self.deck.s.statement(""" self.deck.db.statement("""
insert or replace into sources values insert or replace into sources values
(:id, :name, :created, :lastSync, :syncPeriod)""", (:id, :name, :created, :lastSync, :syncPeriod)""",
id=s[0], id=s[0],
@ -623,7 +614,7 @@ insert or replace into sources values
########################################################################## ##########################################################################
def getMedia(self, ids): def getMedia(self, ids):
return [tuple(row) for row in self.deck.s.all(""" return [tuple(row) for row in self.deck.db.all("""
select id, filename, size, created, originalPath, description select id, filename, size, created, originalPath, description
from media where id in %s""" % ids2str(ids))] from media where id in %s""" % ids2str(ids))]
@ -640,24 +631,24 @@ from media where id in %s""" % ids2str(ids))]
'description': m[5]}) 'description': m[5]})
# apply metadata # apply metadata
if meta: if meta:
self.deck.s.statements(""" self.deck.db.statements("""
insert or replace into media (id, filename, size, created, insert or replace into media (id, filename, size, created,
originalPath, description) originalPath, description)
values (:id, :filename, :size, :created, :originalPath, values (:id, :filename, :size, :created, :originalPath,
:description)""", meta) :description)""", meta)
self.deck.s.statement( self.deck.db.statement(
"delete from mediaDeleted where mediaId in %s" % "delete from mediaDeleted where mediaId in %s" %
ids2str([m[0] for m in media])) ids2str([m[0] for m in media]))
def deleteMedia(self, ids): def deleteMedia(self, ids):
sids = ids2str(ids) sids = ids2str(ids)
files = self.deck.s.column0( files = self.deck.db.column0(
"select filename from media where id in %s" % sids) "select filename from media where id in %s" % sids)
self.deck.s.statement(""" self.deck.db.statement("""
insert into mediaDeleted insert into mediaDeleted
select id, :now from media select id, :now from media
where media.id in %s""" % sids, now=time.time()) where media.id in %s""" % sids, now=time.time())
self.deck.s.execute( self.deck.db.execute(
"delete from media where id in %s" % sids) "delete from media where id in %s" % sids)
# One-way syncing (sharing) # One-way syncing (sharing)
@ -824,7 +815,7 @@ and cards.id in %s""" % ids2str([c[0] for c in cards])))
for l in sum.values(): for l in sum.values():
if len(l) > 1000: if len(l) > 1000:
return True return True
if self.deck.s.scalar( if self.deck.db.scalar(
"select count() from revlog where time > :ls", "select count() from revlog where time > :ls",
ls=self.deck.lastSync) > 1000: ls=self.deck.lastSync) > 1000:
return True return True
@ -835,7 +826,7 @@ and cards.id in %s""" % ids2str([c[0] for c in cards])))
t = time.time() t = time.time()
# ensure modified is not greater than server time # ensure modified is not greater than server time
self.deck.modified = min(self.deck.modified, self.server.timestamp) self.deck.modified = min(self.deck.modified, self.server.timestamp)
self.deck.s.commit() self.deck.db.commit()
self.deck.close() self.deck.close()
fields = { fields = {
"p": self.server.password, "p": self.server.password,

View file

@ -23,56 +23,56 @@ def upgradeSchema(s):
def updateIndices(deck): def updateIndices(deck):
"Add indices to the DB." "Add indices to the DB."
# counts, failed cards # counts, failed cards
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_cards_typeCombined on cards create index if not exists ix_cards_typeCombined on cards
(type, combinedDue, factId)""") (type, combinedDue, factId)""")
# scheduler-agnostic type # scheduler-agnostic type
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_cards_relativeDelay on cards create index if not exists ix_cards_relativeDelay on cards
(relativeDelay)""") (relativeDelay)""")
# index on modified, to speed up sync summaries # index on modified, to speed up sync summaries
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_cards_modified on cards create index if not exists ix_cards_modified on cards
(modified)""") (modified)""")
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_facts_modified on facts create index if not exists ix_facts_modified on facts
(modified)""") (modified)""")
# card spacing # card spacing
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_cards_factId on cards (factId)""") create index if not exists ix_cards_factId on cards (factId)""")
# fields # fields
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_fields_factId on fields (factId)""") create index if not exists ix_fields_factId on fields (factId)""")
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_fields_fieldModelId on fields (fieldModelId)""") create index if not exists ix_fields_fieldModelId on fields (fieldModelId)""")
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_fields_chksum on fields (chksum)""") create index if not exists ix_fields_chksum on fields (chksum)""")
# media # media
deck.s.statement(""" deck.db.statement("""
create unique index if not exists ix_media_filename on media (filename)""") create unique index if not exists ix_media_filename on media (filename)""")
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_media_originalPath on media (originalPath)""") create index if not exists ix_media_originalPath on media (originalPath)""")
# deletion tracking # deletion tracking
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_cardsDeleted_cardId on cardsDeleted (cardId)""") create index if not exists ix_cardsDeleted_cardId on cardsDeleted (cardId)""")
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_modelsDeleted_modelId on modelsDeleted (modelId)""") create index if not exists ix_modelsDeleted_modelId on modelsDeleted (modelId)""")
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_factsDeleted_factId on factsDeleted (factId)""") create index if not exists ix_factsDeleted_factId on factsDeleted (factId)""")
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_mediaDeleted_factId on mediaDeleted (mediaId)""") create index if not exists ix_mediaDeleted_factId on mediaDeleted (mediaId)""")
# tags # tags
txt = "create unique index if not exists ix_tags_tag on tags (tag)" txt = "create unique index if not exists ix_tags_tag on tags (tag)"
try: try:
deck.s.statement(txt) deck.db.statement(txt)
except: except:
deck.s.statement(""" deck.db.statement("""
delete from tags where exists (select 1 from tags t2 where tags.tag = t2.tag delete from tags where exists (select 1 from tags t2 where tags.tag = t2.tag
and tags.rowid > t2.rowid)""") and tags.rowid > t2.rowid)""")
deck.s.statement(txt) deck.db.statement(txt)
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_cardTags_tagCard on cardTags (tagId, cardId)""") create index if not exists ix_cardTags_tagCard on cardTags (tagId, cardId)""")
deck.s.statement(""" deck.db.statement("""
create index if not exists ix_cardTags_cardId on cardTags (cardId)""") create index if not exists ix_cardTags_cardId on cardTags (cardId)""")
def upgradeDeck(deck): def upgradeDeck(deck):
@ -92,13 +92,13 @@ def upgradeDeck(deck):
raise Exception("oldDeckVersion") raise Exception("oldDeckVersion")
if deck.version < 44: if deck.version < 44:
# leaner indices # leaner indices
deck.s.statement("drop index if exists ix_cards_factId") deck.db.statement("drop index if exists ix_cards_factId")
deck.version = 44 deck.version = 44
deck.s.commit() deck.db.commit()
if deck.version < 48: if deck.version < 48:
deck.updateFieldCache(deck.s.column0("select id from facts")) deck.updateFieldCache(deck.db.column0("select id from facts"))
deck.version = 48 deck.version = 48
deck.s.commit() deck.db.commit()
if deck.version < 52: if deck.version < 52:
dname = deck.name() dname = deck.name()
sname = deck.syncName sname = deck.syncName
@ -121,20 +121,20 @@ this message. (ERR-0101)""") % {
elif sname: elif sname:
deck.enableSyncing() deck.enableSyncing()
deck.version = 52 deck.version = 52
deck.s.commit() deck.db.commit()
if deck.version < 53: if deck.version < 53:
if deck.getBool("perDay"): if deck.getBool("perDay"):
if deck.hardIntervalMin == 0.333: if deck.hardIntervalMin == 0.333:
deck.hardIntervalMin = max(1.0, deck.hardIntervalMin) deck.hardIntervalMin = max(1.0, deck.hardIntervalMin)
deck.hardIntervalMax = max(1.1, deck.hardIntervalMax) deck.hardIntervalMax = max(1.1, deck.hardIntervalMax)
deck.version = 53 deck.version = 53
deck.s.commit() deck.db.commit()
if deck.version < 54: if deck.version < 54:
# broken versions of the DB orm die if this is a bool with a # broken versions of the DB orm die if this is a bool with a
# non-int value # non-int value
deck.s.statement("update fieldModels set editFontFamily = 1"); deck.db.statement("update fieldModels set editFontFamily = 1");
deck.version = 54 deck.version = 54
deck.s.commit() deck.db.commit()
if deck.version < 61: if deck.version < 61:
# do our best to upgrade templates to the new style # do our best to upgrade templates to the new style
txt = '''\ txt = '''\
@ -175,87 +175,87 @@ this message. (ERR-0101)""") % {
# rebuild the media db based on new format # rebuild the media db based on new format
rebuildMediaDir(deck, dirty=False) rebuildMediaDir(deck, dirty=False)
deck.version = 61 deck.version = 61
deck.s.commit() deck.db.commit()
if deck.version < 62: if deck.version < 62:
# updated indices # updated indices
deck.s.statement("drop index if exists ix_cards_typeCombined") deck.db.statement("drop index if exists ix_cards_typeCombined")
updateIndices(deck) updateIndices(deck)
deck.version = 62 deck.version = 62
deck.s.commit() deck.db.commit()
if deck.version < 64: if deck.version < 64:
# remove old static indices, as all clients should be libanki1.2+ # remove old static indices, as all clients should be libanki1.2+
for d in ("ix_cards_duePriority", for d in ("ix_cards_duePriority",
"ix_cards_priorityDue"): "ix_cards_priorityDue"):
deck.s.statement("drop index if exists %s" % d) deck.db.statement("drop index if exists %s" % d)
deck.version = 64 deck.version = 64
deck.s.commit() deck.db.commit()
# note: we keep the priority index for now # note: we keep the priority index for now
if deck.version < 65: if deck.version < 65:
# we weren't correctly setting relativeDelay when answering cards # we weren't correctly setting relativeDelay when answering cards
# in previous versions, so ensure everything is set correctly # in previous versions, so ensure everything is set correctly
deck.rebuildTypes() deck.rebuildTypes()
deck.version = 65 deck.version = 65
deck.s.commit() deck.db.commit()
# skip a few to allow for updates to stable tree # skip a few to allow for updates to stable tree
if deck.version < 70: if deck.version < 70:
# update dynamic indices given we don't use priority anymore # update dynamic indices given we don't use priority anymore
for d in ("intervalDesc", "intervalAsc", "randomOrder", for d in ("intervalDesc", "intervalAsc", "randomOrder",
"dueAsc", "dueDesc"): "dueAsc", "dueDesc"):
deck.s.statement("drop index if exists ix_cards_%s2" % d) deck.db.statement("drop index if exists ix_cards_%s2" % d)
deck.s.statement("drop index if exists ix_cards_%s" % d) deck.db.statement("drop index if exists ix_cards_%s" % d)
deck.updateDynamicIndices() deck.updateDynamicIndices()
# remove old views # remove old views
for v in ("failedCards", "revCardsOld", "revCardsNew", for v in ("failedCards", "revCardsOld", "revCardsNew",
"revCardsDue", "revCardsRandom", "acqCardsRandom", "revCardsDue", "revCardsRandom", "acqCardsRandom",
"acqCardsOld", "acqCardsNew"): "acqCardsOld", "acqCardsNew"):
deck.s.statement("drop view if exists %s" % v) deck.db.statement("drop view if exists %s" % v)
deck.version = 70 deck.version = 70
deck.s.commit() deck.db.commit()
if deck.version < 71: if deck.version < 71:
# remove the expensive value cache # remove the expensive value cache
deck.s.statement("drop index if exists ix_fields_value") deck.db.statement("drop index if exists ix_fields_value")
# add checksums and index # add checksums and index
deck.updateAllFieldChecksums() deck.updateAllFieldChecksums()
updateIndices(deck) updateIndices(deck)
deck.s.execute("vacuum") deck.db.execute("vacuum")
deck.s.execute("analyze") deck.db.execute("analyze")
deck.version = 71 deck.version = 71
deck.s.commit() deck.db.commit()
if deck.version < 72: if deck.version < 72:
# this was only used for calculating average factor # this was only used for calculating average factor
deck.s.statement("drop index if exists ix_cards_factor") deck.db.statement("drop index if exists ix_cards_factor")
deck.version = 72 deck.version = 72
deck.s.commit() deck.db.commit()
if deck.version < 73: if deck.version < 73:
# remove stats, as it's all in the revlog now # remove stats, as it's all in the revlog now
deck.s.statement("drop index if exists ix_stats_typeDay") deck.db.statement("drop index if exists ix_stats_typeDay")
deck.s.statement("drop table if exists stats") deck.db.statement("drop table if exists stats")
deck.version = 73 deck.version = 73
deck.s.commit() deck.db.commit()
if deck.version < 74: if deck.version < 74:
# migrate revlog data to new table # migrate revlog data to new table
deck.s.statement(""" deck.db.statement("""
insert into revlog select insert into revlog select
time, cardId, ease, reps, lastInterval, nextInterval, nextFactor, time, cardId, ease, reps, lastInterval, nextInterval, nextFactor,
min(thinkingTime, 60), 0 from reviewHistory""") min(thinkingTime, 60), 0 from reviewHistory""")
deck.s.statement("drop table reviewHistory") deck.db.statement("drop table reviewHistory")
# convert old ease0 into ease1 # convert old ease0 into ease1
deck.s.statement("update revlog set ease = 1 where ease = 0") deck.db.statement("update revlog set ease = 1 where ease = 0")
# remove priority index # remove priority index
deck.s.statement("drop index if exists ix_cards_priority") deck.db.statement("drop index if exists ix_cards_priority")
deck.version = 74 deck.version = 74
deck.s.commit() deck.db.commit()
# executing a pragma here is very slow on large decks, so we store # executing a pragma here is very slow on large decks, so we store
# our own record # our own record
if not deck.getInt("pageSize") == 4096: if not deck.getInt("pageSize") == 4096:
deck.s.commit() deck.db.commit()
deck.s.execute("pragma page_size = 4096") deck.db.execute("pragma page_size = 4096")
deck.s.execute("pragma legacy_file_format = 0") deck.db.execute("pragma legacy_file_format = 0")
deck.s.execute("vacuum") deck.db.execute("vacuum")
deck.setVar("pageSize", 4096, mod=False) deck.setVar("pageSize", 4096, mod=False)
deck.s.commit() deck.db.commit()
if prog: if prog:
assert deck.modified == oldmod assert deck.modified == oldmod
deck.finishProgress() deck.finishProgress()

View file

@ -78,7 +78,7 @@ def test_saveAs():
newDeck = deck.saveAs(path) newDeck = deck.saveAs(path)
assert newDeck.cardCount == 1 assert newDeck.cardCount == 1
# delete card # delete card
id = newDeck.s.scalar("select id from cards") id = newDeck.db.scalar("select id from cards")
newDeck.deleteCard(id) newDeck.deleteCard(id)
# save into new deck # save into new deck
newDeck2 = newDeck.saveAs(path2) newDeck2 = newDeck.saveAs(path2)
@ -93,7 +93,7 @@ def test_factAddDelete():
deck = DeckStorage.Deck() deck = DeckStorage.Deck()
deck.addModel(BasicModel()) deck.addModel(BasicModel())
# set rollback point # set rollback point
deck.s.commit() deck.db.commit()
f = deck.newFact() f = deck.newFact()
# empty fields # empty fields
try: try:
@ -135,33 +135,33 @@ def test_fieldChecksum():
f = deck.newFact() f = deck.newFact()
f['Front'] = u"new"; f['Back'] = u"new2" f['Front'] = u"new"; f['Back'] = u"new2"
deck.addFact(f) deck.addFact(f)
(id, sum) = deck.s.first( (id, sum) = deck.db.first(
"select id, chksum from fields where value = 'new'") "select id, chksum from fields where value = 'new'")
assert sum == "22af645d" assert sum == "22af645d"
# empty field should have no checksum # empty field should have no checksum
f['Front'] = u"" f['Front'] = u""
deck.s.flush() deck.db.flush()
assert deck.s.scalar( assert deck.db.scalar(
"select chksum from fields where id = :id", id=id) == "" "select chksum from fields where id = :id", id=id) == ""
# changing the value should change the checksum # changing the value should change the checksum
f['Front'] = u"newx" f['Front'] = u"newx"
deck.s.flush() deck.db.flush()
assert deck.s.scalar( assert deck.db.scalar(
"select chksum from fields where id = :id", id=id) == "4b0e5a4c" "select chksum from fields where id = :id", id=id) == "4b0e5a4c"
# back should have no checksum, because it's not set to be unique # back should have no checksum, because it's not set to be unique
(id, sum) = deck.s.first( (id, sum) = deck.db.first(
"select id, chksum from fields where value = 'new2'") "select id, chksum from fields where value = 'new2'")
assert sum == "" assert sum == ""
# if we turn on unique, it should get a checksum # if we turn on unique, it should get a checksum
fm = f.model.fieldModels[1] fm = f.model.fieldModels[1]
fm.unique = True fm.unique = True
deck.updateFieldChecksums(fm.id) deck.updateFieldChecksums(fm.id)
assert deck.s.scalar( assert deck.db.scalar(
"select chksum from fields where id = :id", id=id) == "82f2ec5f" "select chksum from fields where id = :id", id=id) == "82f2ec5f"
# and turning it off should zero the checksum again # and turning it off should zero the checksum again
fm.unique = False fm.unique = False
deck.updateFieldChecksums(fm.id) deck.updateFieldChecksums(fm.id)
assert deck.s.scalar( assert deck.db.scalar(
"select chksum from fields where id = :id", id=id) == "" "select chksum from fields where id = :id", id=id) == ""
def test_modelAddDelete(): def test_modelAddDelete():
@ -176,7 +176,7 @@ def test_modelAddDelete():
deck.deleteModel(deck.currentModel) deck.deleteModel(deck.currentModel)
deck.reset() deck.reset()
assert deck.cardCount == 0 assert deck.cardCount == 0
deck.s.refresh(deck) deck.db.refresh(deck)
def test_modelCopy(): def test_modelCopy():
deck = DeckStorage.Deck() deck = DeckStorage.Deck()
@ -263,7 +263,7 @@ def test_modelChange():
assert deck.modelUseCount(m2) == 1 assert deck.modelUseCount(m2) == 1
assert deck.cardCount == 3 assert deck.cardCount == 3
assert deck.factCount == 2 assert deck.factCount == 2
(q, a) = deck.s.first(""" (q, a) = deck.db.first("""
select question, answer from cards where factId = :id""", select question, answer from cards where factId = :id""",
id=f.id) id=f.id)
assert stripHTML(q) == u"e" assert stripHTML(q) == u"e"

View file

@ -23,7 +23,7 @@ def test_csv():
# four problems - missing front, dupe front, wrong num of fields # four problems - missing front, dupe front, wrong num of fields
assert len(i.log) == 4 assert len(i.log) == 4
assert i.total == 5 assert i.total == 5
deck.s.close() deck.close()
def test_csv_tags(): def test_csv_tags():
deck = DeckStorage.Deck() deck = DeckStorage.Deck()
@ -31,10 +31,10 @@ def test_csv_tags():
file = unicode(os.path.join(testDir, "importing/text-tags.txt")) file = unicode(os.path.join(testDir, "importing/text-tags.txt"))
i = csvfile.TextImporter(deck, file) i = csvfile.TextImporter(deck, file)
i.doImport() i.doImport()
facts = deck.s.query(Fact).all() facts = deck.db.query(Fact).all()
assert len(facts) == 2 assert len(facts) == 2
assert facts[0].tags == "baz qux" or facts[1].tags == "baz qux" assert facts[0].tags == "baz qux" or facts[1].tags == "baz qux"
deck.s.close() deck.close()
def test_mnemosyne10(): def test_mnemosyne10():
deck = DeckStorage.Deck() deck = DeckStorage.Deck()
@ -43,7 +43,7 @@ def test_mnemosyne10():
i = mnemosyne10.Mnemosyne10Importer(deck, file) i = mnemosyne10.Mnemosyne10Importer(deck, file)
i.doImport() i.doImport()
assert i.total == 5 assert i.total == 5
deck.s.close() deck.close()
def test_supermemo_xml_01_unicode(): def test_supermemo_xml_01_unicode():
deck = DeckStorage.Deck() deck = DeckStorage.Deck()
@ -54,7 +54,7 @@ def test_supermemo_xml_01_unicode():
i.doImport() i.doImport()
# only returning top-level elements? # only returning top-level elements?
assert i.total == 1 assert i.total == 1
deck.s.close() deck.close()
def test_anki10(): def test_anki10():
# though these are not modified, sqlite updates the mtime, so copy to tmp # though these are not modified, sqlite updates the mtime, so copy to tmp
@ -69,7 +69,7 @@ def test_anki10():
i = anki10.Anki10Importer(deck, file) i = anki10.Anki10Importer(deck, file)
i.doImport() i.doImport()
assert i.total == 2 assert i.total == 2
deck.s.rollback() deck.db.rollback()
deck.close() deck.close()
# import a deck into itself - 10-2 is the same as test10, but with one # import a deck into itself - 10-2 is the same as test10, but with one
# card answered and another deleted. nothing should be synced to client # card answered and another deleted. nothing should be synced to client
@ -77,7 +77,7 @@ def test_anki10():
i = anki10.Anki10Importer(deck, file2) i = anki10.Anki10Importer(deck, file2)
i.doImport() i.doImport()
assert i.total == 0 assert i.total == 0
deck.s.rollback() deck.db.rollback()
def test_anki10_modtime(): def test_anki10_modtime():
deck1 = DeckStorage.Deck() deck1 = DeckStorage.Deck()
@ -101,9 +101,9 @@ def test_anki10_modtime():
i.doImport() i.doImport()
client.sync() client.sync()
assert i.total == 1 assert i.total == 1
assert deck2.s.scalar("select count(*) from cards") == 2 assert deck2.db.scalar("select count(*) from cards") == 2
assert deck2.s.scalar("select count(*) from facts") == 2 assert deck2.db.scalar("select count(*) from facts") == 2
assert deck2.s.scalar("select count(*) from models") == 2 assert deck2.db.scalar("select count(*) from models") == 2
def test_dingsbums(): def test_dingsbums():
deck = DeckStorage.Deck() deck = DeckStorage.Deck()
@ -113,7 +113,7 @@ def test_dingsbums():
i = dingsbums.DingsBumsImporter(deck, file) i = dingsbums.DingsBumsImporter(deck, file)
i.doImport() i.doImport()
assert 7 == i.total assert 7 == i.total
deck.s.close() deck.close()
def test_updating(): def test_updating():
# get the standard csv deck first # get the standard csv deck first
@ -129,11 +129,11 @@ def test_updating():
i.updateKey = (0, deck.currentModel.fieldModels[0].id) i.updateKey = (0, deck.currentModel.fieldModels[0].id)
i.multipleCardsAllowed = False i.multipleCardsAllowed = False
i.doImport() i.doImport()
ans = deck.s.scalar( ans = deck.db.scalar(
u"select answer from cards where question like '%食べる%'") u"select answer from cards where question like '%食べる%'")
assert "to ate" in ans assert "to ate" in ans
# try again with tags # try again with tags
i.updateKey = (0, deck.currentModel.fieldModels[0].id) i.updateKey = (0, deck.currentModel.fieldModels[0].id)
i.mapping[1] = 0 i.mapping[1] = 0
i.doImport() i.doImport()
deck.s.close() deck.close()

View file

@ -33,7 +33,7 @@ def test_copy():
# new file # new file
assert m.copyToMedia(deck, path) == "foo.jpg" assert m.copyToMedia(deck, path) == "foo.jpg"
# dupe md5 # dupe md5
deck.s.statement(""" deck.db.statement("""
insert into media values (null, 'foo.jpg', 0, 0, :sum, '')""", insert into media values (null, 'foo.jpg', 0, 0, :sum, '')""",
sum=checksum("hello")) sum=checksum("hello"))
path = os.path.join(dir, "bar.jpg") path = os.path.join(dir, "bar.jpg")
@ -53,54 +53,54 @@ def test_db():
f['Back'] = u"back [sound:foo.jpg]" f['Back'] = u"back [sound:foo.jpg]"
deck.addFact(f) deck.addFact(f)
# 1 entry in the media db, with two references, and missing file # 1 entry in the media db, with two references, and missing file
assert deck.s.scalar("select count() from media") == 1 assert deck.db.scalar("select count() from media") == 1
assert deck.s.scalar("select size from media") == 2 assert deck.db.scalar("select size from media") == 2
assert deck.s.scalar("select not originalPath from media") assert deck.db.scalar("select not originalPath from media")
# copy to media folder & check db # copy to media folder & check db
path = m.copyToMedia(deck, path) path = m.copyToMedia(deck, path)
m.rebuildMediaDir(deck) m.rebuildMediaDir(deck)
# md5 should be set now # md5 should be set now
assert deck.s.scalar("select count() from media") == 1 assert deck.db.scalar("select count() from media") == 1
assert deck.s.scalar("select size from media") == 2 assert deck.db.scalar("select size from media") == 2
assert deck.s.scalar("select originalPath from media") assert deck.db.scalar("select originalPath from media")
# edit the fact to remove a reference # edit the fact to remove a reference
f['Back'] = u"" f['Back'] = u""
f.setModified(True, deck) f.setModified(True, deck)
deck.s.flush() deck.db.flush()
assert deck.s.scalar("select count() from media") == 1 assert deck.db.scalar("select count() from media") == 1
assert deck.s.scalar("select size from media") == 1 assert deck.db.scalar("select size from media") == 1
# remove the front reference too # remove the front reference too
f['Front'] = u"" f['Front'] = u""
f.setModified(True, deck) f.setModified(True, deck)
assert deck.s.scalar("select size from media") == 0 assert deck.db.scalar("select size from media") == 0
# add the reference back # add the reference back
f['Front'] = u"<img src='foo.jpg'>" f['Front'] = u"<img src='foo.jpg'>"
f.setModified(True, deck) f.setModified(True, deck)
assert deck.s.scalar("select size from media") == 1 assert deck.db.scalar("select size from media") == 1
# detect file modifications # detect file modifications
oldsum = deck.s.scalar("select originalPath from media") oldsum = deck.db.scalar("select originalPath from media")
open(path, "w").write("world") open(path, "w").write("world")
m.rebuildMediaDir(deck) m.rebuildMediaDir(deck)
newsum = deck.s.scalar("select originalPath from media") newsum = deck.db.scalar("select originalPath from media")
assert newsum and newsum != oldsum assert newsum and newsum != oldsum
# delete underlying file and check db # delete underlying file and check db
os.unlink(path) os.unlink(path)
m.rebuildMediaDir(deck) m.rebuildMediaDir(deck)
# md5 should be gone again # md5 should be gone again
assert deck.s.scalar("select count() from media") == 1 assert deck.db.scalar("select count() from media") == 1
assert deck.s.scalar("select not originalPath from media") assert deck.db.scalar("select not originalPath from media")
# media db should pick up media defined via templates & bulk update # media db should pick up media defined via templates & bulk update
f['Back'] = u"bar.jpg" f['Back'] = u"bar.jpg"
f.setModified(True, deck) f.setModified(True, deck)
deck.s.flush() deck.db.flush()
# modify template & regenerate # modify template & regenerate
assert deck.s.scalar("select count() from media") == 1 assert deck.db.scalar("select count() from media") == 1
assert deck.s.scalar("select sum(size) from media") == 1 assert deck.db.scalar("select sum(size) from media") == 1
deck.currentModel.cardModels[0].aformat=u'<img src="{{{Back}}}">' deck.currentModel.cardModels[0].aformat=u'<img src="{{{Back}}}">'
deck.updateCardsFromModel(deck.currentModel) deck.updateCardsFromModel(deck.currentModel)
assert deck.s.scalar("select sum(size) from media") == 2 assert deck.db.scalar("select sum(size) from media") == 2
assert deck.s.scalar("select count() from media") == 2 assert deck.db.scalar("select count() from media") == 2
deck.currentModel.cardModels[0].aformat=u'{{{Back}}}' deck.currentModel.cardModels[0].aformat=u'{{{Back}}}'
deck.updateCardsFromModel(deck.currentModel) deck.updateCardsFromModel(deck.currentModel)
assert deck.s.scalar("select count() from media") == 2 assert deck.db.scalar("select count() from media") == 2
assert deck.s.scalar("select sum(size) from media") == 1 assert deck.db.scalar("select sum(size) from media") == 1

View file

@ -103,7 +103,7 @@ def test_localsync_deck():
c = deck1.getCard() c = deck1.getCard()
deck1.answerCard(c, 4) deck1.answerCard(c, 4)
client.sync() client.sync()
assert deck2.s.scalar("select count(*) from revlog") == 1 assert deck2.db.scalar("select count(*) from revlog") == 1
# make sure meta data is synced # make sure meta data is synced
deck1.setVar("foo", 1) deck1.setVar("foo", 1)
assert deck1.getInt("foo") == 1 assert deck1.getInt("foo") == 1
@ -164,15 +164,15 @@ def test_localsync_factsandcards():
assert deck1.factCount == 2 and deck1.cardCount == 4 assert deck1.factCount == 2 and deck1.cardCount == 4
assert deck2.factCount == 2 and deck2.cardCount == 4 assert deck2.factCount == 2 and deck2.cardCount == 4
# ensure the fact was copied across # ensure the fact was copied across
f1 = deck1.s.query(Fact).first() f1 = deck1.db.query(Fact).first()
f2 = deck1.s.query(Fact).get(f1.id) f2 = deck1.db.query(Fact).get(f1.id)
f1['Front'] = u"myfront" f1['Front'] = u"myfront"
f1.setModified() f1.setModified()
deck1.setModified() deck1.setModified()
client.sync() client.sync()
deck1.rebuildCounts() deck1.rebuildCounts()
deck2.rebuildCounts() deck2.rebuildCounts()
f2 = deck1.s.query(Fact).get(f1.id) f2 = deck1.db.query(Fact).get(f1.id)
assert f2['Front'] == u"myfront" assert f2['Front'] == u"myfront"
c1 = deck1.getCard() c1 = deck1.getCard()
c2 = deck2.getCard() c2 = deck2.getCard()
@ -226,8 +226,8 @@ def test_localsync_media():
assert len(os.listdir(deck2media)) == 1 assert len(os.listdir(deck2media)) == 1
client.sync() client.sync()
# metadata should have been copied # metadata should have been copied
assert deck1.s.scalar("select count(1) from media") == 3 assert deck1.db.scalar("select count(1) from media") == 3
assert deck2.s.scalar("select count(1) from media") == 3 assert deck2.db.scalar("select count(1) from media") == 3
# copy local files # copy local files
copyLocalMedia(deck1, deck2) copyLocalMedia(deck1, deck2)
assert len(os.listdir(deck1media)) == 2 assert len(os.listdir(deck1media)) == 2
@ -239,8 +239,8 @@ def test_localsync_media():
os.unlink(os.path.join(deck1media, "22161b29b0c18e068038021f54eee1ee.png")) os.unlink(os.path.join(deck1media, "22161b29b0c18e068038021f54eee1ee.png"))
rebuildMediaDir(deck1) rebuildMediaDir(deck1)
client.sync() client.sync()
assert deck1.s.scalar("select count(1) from media") == 3 assert deck1.db.scalar("select count(1) from media") == 3
assert deck2.s.scalar("select count(1) from media") == 3 assert deck2.db.scalar("select count(1) from media") == 3
# Remote tests # Remote tests
########################################################################## ##########################################################################