mirror of
https://github.com/ankitects/anki.git
synced 2025-09-24 16:56:36 -04:00
speed up the revlog migration, tweak db handling
remove the unnecessary transaction magic; we just needed to make sure to lock again after undo is initialized
This commit is contained in:
parent
e49211c5b6
commit
ba09529e34
3 changed files with 22 additions and 15 deletions
|
@ -15,7 +15,7 @@ from anki.hooks import runHook
|
|||
|
||||
class DB(object):
|
||||
def __init__(self, path, text=None):
|
||||
self._db = sqlite.connect(path, timeout=0, isolation_level=None)
|
||||
self._db = sqlite.connect(path, timeout=0)
|
||||
if text:
|
||||
self._db.text_factory = text
|
||||
self._path = path
|
||||
|
@ -23,7 +23,7 @@ class DB(object):
|
|||
|
||||
def execute(self, sql, *a, **ka):
|
||||
if self.echo:
|
||||
print sql, a, ka
|
||||
print sql #, a, ka
|
||||
if ka:
|
||||
# execute("...where id = :id", id=5)
|
||||
res = self._db.execute(sql, ka)
|
||||
|
@ -34,7 +34,7 @@ class DB(object):
|
|||
|
||||
def executemany(self, sql, l):
|
||||
if self.echo:
|
||||
print sql, l
|
||||
print sql #, l
|
||||
self._db.executemany(sql, l)
|
||||
|
||||
def commit(self):
|
||||
|
|
|
@ -119,10 +119,6 @@ qconf=?, conf=?, data=?""",
|
|||
self.lock()
|
||||
|
||||
def lock(self):
|
||||
# we don't know if pysqlite has taken out a transaction from under us,
|
||||
# so make sure we're committed
|
||||
self.db.commit()
|
||||
self.db.execute("begin exclusive")
|
||||
self.db.execute("update deck set mod=mod")
|
||||
|
||||
def close(self, save=True):
|
||||
|
@ -737,6 +733,7 @@ insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table}
|
|||
sql += ",' || quote(old.%s) ||'" % c
|
||||
sql += ")'); end"
|
||||
self.db.execute(sql)
|
||||
self.lock()
|
||||
|
||||
def undoName(self):
|
||||
for n in reversed(self.undoStack):
|
||||
|
@ -848,7 +845,6 @@ seq > :s and seq <= :e order by seq desc""", s=start, e=end)
|
|||
return "\n".join(problems)
|
||||
|
||||
def optimize(self):
|
||||
self.db.commit()
|
||||
self.db.execute("vacuum")
|
||||
self.db.execute("analyze")
|
||||
self.lock()
|
||||
|
|
|
@ -242,8 +242,7 @@ def _upgradeSchema(db):
|
|||
# move into temp table
|
||||
_moveTable(db, "cards", True)
|
||||
# use the new order to rewrite card ids
|
||||
map = dict(db.all("select id, rowid from cards2"))
|
||||
_insertWithIdChange(db, map, 0, "reviewHistory", 12)
|
||||
cardmap = dict(db.all("select id, rowid from cards2"))
|
||||
# move back, preserving new ids
|
||||
db.execute("""
|
||||
insert into cards select rowid, factId, cardModelId, 1, cast(created as int),
|
||||
|
@ -321,15 +320,27 @@ name, "{}", "{}", ?, "" from models2""", simplejson.dumps(
|
|||
|
||||
# reviewHistory -> revlog
|
||||
###########
|
||||
db.execute("""
|
||||
insert or ignore into revlog select
|
||||
# fetch the data so we can rewrite ids quickly
|
||||
r = []
|
||||
for row in db.execute("""
|
||||
select
|
||||
cast(time*1000 as int), cardId, ease, reps,
|
||||
cast(lastInterval as int), cast(nextInterval as int),
|
||||
cast(nextFactor*1000 as int), cast(min(thinkingTime, 60)*1000 as int),
|
||||
0 from reviewHistory""")
|
||||
0 from reviewHistory"""):
|
||||
row = list(row)
|
||||
# new card ids
|
||||
try:
|
||||
row[1] = cardmap[row[1]]
|
||||
except:
|
||||
# id doesn't exist
|
||||
continue
|
||||
# no ease 0 anymore
|
||||
row[2] = row[2] or 1
|
||||
r.append(row)
|
||||
db.executemany(
|
||||
"insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)", r)
|
||||
db.execute("drop table reviewHistory")
|
||||
# convert old ease0 into ease1
|
||||
db.execute("update revlog set ease = 1 where ease = 0")
|
||||
|
||||
# longer migrations
|
||||
###########
|
||||
|
|
Loading…
Reference in a new issue