fix upgrading; drop old mnemosyne 1 importer

This commit is contained in:
Damien Elmes 2011-10-20 22:05:34 +09:00
parent cf4abcb403
commit 76960abd75
6 changed files with 366 additions and 457 deletions

View file

@ -44,6 +44,9 @@ SYNC_PORT = int(os.environ.get("SYNC_PORT") or 80)
SYNC_URL = "http://%s:%d/sync/" % (SYNC_HOST, SYNC_PORT) SYNC_URL = "http://%s:%d/sync/" % (SYNC_HOST, SYNC_PORT)
SYNC_VER = 0 SYNC_VER = 0
# deck schema
SCHEMA_VERSION = 1
# Labels # Labels
########################################################################## ##########################################################################

View file

@ -1,70 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright: Damien Elmes <anki@ichi2.net>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import sys, pickle, time, re
from anki.importing import Importer, ForeignCard
from anki.errors import *
class Mnemosyne10Importer(Importer):
multipleCardsAllowed = False
def foreignCards(self):
# empty objects so we can load the native mnemosyne file
class MnemosyneModule(object):
class StartTime:
pass
class Category:
pass
class Item:
pass
for module in ('mnemosyne',
'mnemosyne.core',
'mnemosyne.core.mnemosyne_core'):
sys.modules[module] = MnemosyneModule()
try:
file = open(self.file, "rb")
except (IOError, OSError), e:
raise ImportFormatError(type="systemError",
info=str(e))
header = file.readline().strip()
# read the structure in
try:
struct = pickle.load(file)
except (EOFError, KeyError):
raise ImportFormatError(type="invalidFile")
startTime = struct[0].time
daysPassed = (time.time() - startTime) / 86400.0
# gather cards
cards = []
for item in struct[2]:
card = ForeignCard()
card.fields.append(self.fudgeText(item.q))
card.fields.append(self.fudgeText(item.a))
# scheduling data
card.interval = item.next_rep - item.last_rep
secDelta = (item.next_rep - daysPassed) * 86400.0
card.due = card.nextTime = time.time() + secDelta
card.factor = item.easiness
# for some reason mnemosyne starts cards off on 1 instead of 0
card.successive = max(
(item.acq_reps_since_lapse + item.ret_reps_since_lapse -1), 0)
card.yesCount = max((item.acq_reps + item.ret_reps) - 1, 0)
card.noCount = item.lapses
card.reps = card.yesCount + card.noCount
if item.cat.name != u"<default>":
card.tags = item.cat.name.replace(" ", "_")
cards.append(card)
return cards
def fields(self):
return 2
def fudgeText(self, text):
text = text.replace("\n", "<br>")
text = re.sub('<sound src="(.*?)">', '[sound:\\1]', text)
text = re.sub('<(/?latex)>', '[\\1]', text)
text = re.sub('<(/?\$)>', '[\\1]', text)
text = re.sub('<(/?\$\$)>', '[\\1]', text)
return text

View file

@ -2,139 +2,112 @@
# Copyright: Damien Elmes <anki@ichi2.net> # Copyright: Damien Elmes <anki@ichi2.net>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/copyleft/agpl.html # License: GNU AGPL, version 3 or later; http://www.gnu.org/copyleft/agpl.html
import os, time, simplejson, re, datetime import os, time, simplejson, re, datetime, shutil
from anki.lang import _ from anki.lang import _
from anki.utils import intTime from anki.utils import intTime, namedtmp
from anki.db import DB from anki.db import DB
from anki.deck import _Deck from anki.deck import _Deck
from anki.consts import *
from anki.storage import _addSchema, _getDeckVars, _addDeckVars, \
_updateIndices
def Deck(path, queue=True, lock=True, server=False): #
"Open a new or existing deck. Path must be unicode." # Upgrading is the first step in migrating to 2.0. The ids are temporary and
path = os.path.abspath(path) # may not be unique across multiple decks. After each of a user's v1.2 decks
create = not os.path.exists(path) # are upgraded, they need to be merged.
if create: #
base = os.path.basename(path) # Caller should have called check() on path before using this.
for c in ("/", ":", "\\"): #
assert c not in base
# connect
db = DB(path)
if create:
ver = _createDB(db)
else:
ver = _upgradeSchema(db)
db.execute("pragma temp_store = memory")
db.execute("pragma cache_size = 10000")
# add db to deck and do any remaining upgrades
deck = _Deck(db, server)
if ver < CURRENT_VERSION:
_upgradeDeck(deck, ver)
elif create:
# add in reverse order so basic is default
addClozeModel(deck)
addBasicModel(deck)
deck.save()
if lock:
deck.lock()
if not queue:
return deck
# rebuild queue
deck.reset()
return deck
# 2.0 schema migration class Upgrader(object):
######################################################################
def _moveTable(db, table, cards=False): def __init__(self):
if cards: pass
insExtra = " order by created"
else:
insExtra = ""
sql = db.scalar(
"select sql from sqlite_master where name = '%s'" % table)
sql = sql.replace("TABLE "+table, "temporary table %s2" % table)
if cards:
sql = sql.replace("PRIMARY KEY (id),", "")
db.execute(sql)
db.execute("insert into %s2 select * from %s%s" % (table, table, insExtra))
db.execute("drop table "+table)
_addSchema(db, False)
def _upgradeSchema(db): def upgrade(self, path):
"Alter tables prior to ORM initialization." self.path = path
try: self._openDB(path)
ver = db.scalar("select ver from deck") self._upgradeSchema()
except: self._openDeck()
ver = db.scalar("select version from decks") self._upgradeDeck()
# latest 1.2 is 65 return self.deck
if ver < 65:
raise AnkiError("oldDeckVersion")
if ver > 99:
# anki 2.0
if ver > CURRENT_VERSION:
# refuse to load decks created with a future version
raise AnkiError("newDeckVersion")
return ver
runHook("1.x upgrade", db)
# these weren't always correctly set def _openDB(self, path):
db.execute("pragma page_size = 4096") self.tmppath = namedtmp(os.path.basename(path))
db.execute("pragma legacy_file_format = 0") shutil.copy(path, self.tmppath)
self.db = DB(self.tmppath)
# facts def _openDeck(self):
########### self.deck = _Deck(self.db)
# tags should have a leading and trailing space if not empty, and not
# use commas # Schema upgrade
db.execute(""" ######################################################################
def _upgradeSchema(self):
"Alter tables prior to ORM initialization."
db = self.db
# speed up the upgrade
db.execute("pragma temp_store = memory")
db.execute("pragma cache_size = 10000")
# these weren't always correctly set
db.execute("pragma page_size = 4096")
db.execute("pragma legacy_file_format = 0")
# facts
###########
# tags should have a leading and trailing space if not empty, and not
# use commas
db.execute("""
update facts set tags = (case update facts set tags = (case
when trim(tags) == "" then "" when trim(tags) == "" then ""
else " " || replace(replace(trim(tags), ",", " "), " ", " ") || " " else " " || replace(replace(trim(tags), ",", " "), " ", " ") || " "
end) end)
""") """)
# pull facts into memory, so we can merge them with fields efficiently # pull facts into memory, so we can merge them with fields efficiently
facts = db.all(""" facts = db.all("""
select id, id, modelId, 1, cast(created*1000 as int), cast(modified as int), 0, tags select id, id, modelId, 1, cast(created*1000 as int), cast(modified as int),
from facts order by created""") 0, tags from facts order by created""")
# build field hash # build field hash
fields = {} fields = {}
for (fid, ord, val) in db.execute( for (fid, ord, val) in db.execute(
"select factId, ordinal, value from fields order by factId, ordinal"): "select factId, ordinal, value from fields order by factId, ordinal"):
if fid not in fields: if fid not in fields:
fields[fid] = [] fields[fid] = []
fields[fid].append((ord, val)) fields[fid].append((ord, val))
# build insert data and transform ids, and minimize qt's # build insert data and transform ids, and minimize qt's
# bold/italics/underline cruft. # bold/italics/underline cruft.
map = {} map = {}
data = [] data = []
factidmap = {} factidmap = {}
times = {} times = {}
from anki.utils import minimizeHTML from anki.utils import minimizeHTML
for c, row in enumerate(facts): for c, row in enumerate(facts):
oldid = row[0] oldid = row[0]
row = list(row) row = list(row)
# get rid of old created column and update id # get rid of old created column and update id
while row[4] in times: while row[4] in times:
row[4] += 1 row[4] += 1
times[row[4]] = True times[row[4]] = True
factidmap[row[0]] = row[4] factidmap[row[0]] = row[4]
row[0] = row[4] row[0] = row[4]
del row[4] del row[4]
map[oldid] = row[0] map[oldid] = row[0]
row.append(minimizeHTML("\x1f".join([x[1] for x in sorted(fields[oldid])]))) row.append(minimizeHTML("\x1f".join([x[1] for x in sorted(fields[oldid])])))
data.append(row) data.append(row)
# and put the facts into the new table # and put the facts into the new table
db.execute("drop table facts") db.execute("drop table facts")
_addSchema(db, False) _addSchema(db, False)
db.executemany("insert into facts values (?,?,?,?,?,?,?,?,'',0,'')", data) db.executemany("insert into facts values (?,?,?,?,?,?,?,?,'',0,'')", data)
db.execute("drop table fields") db.execute("drop table fields")
# cards # cards
########### ###########
# we need to pull this into memory, to rewrite the creation time if # we need to pull this into memory, to rewrite the creation time if
# it's not unique and update the fact id # it's not unique and update the fact id
times = {} times = {}
rows = [] rows = []
cardidmap = {} cardidmap = {}
for row in db.execute(""" for row in db.execute("""
select id, cast(created*1000 as int), factId, ordinal, select id, cast(created*1000 as int), factId, ordinal,
cast(modified as int), 0, cast(modified as int), 0,
(case relativeDelay (case relativeDelay
@ -149,286 +122,283 @@ else type end),
cast(due as int), cast(interval as int), cast(due as int), cast(interval as int),
cast(factor*1000 as int), reps, noCount from cards cast(factor*1000 as int), reps, noCount from cards
order by created"""): order by created"""):
# find an unused time # find an unused time
row = list(row) row = list(row)
while row[1] in times: while row[1] in times:
row[1] += 1 row[1] += 1
times[row[1]] = True times[row[1]] = True
# rewrite fact id # rewrite fact id
row[2] = factidmap[row[2]] row[2] = factidmap[row[2]]
# note id change and save all but old id # note id change and save all but old id
cardidmap[row[0]] = row[1] cardidmap[row[0]] = row[1]
rows.append(row[1:]) rows.append(row[1:])
# drop old table and rewrite # drop old table and rewrite
db.execute("drop table cards") db.execute("drop table cards")
_addSchema(db, False) _addSchema(db, False)
db.executemany(""" db.executemany("""
insert into cards values (?,?,1,?,?,?,?,?,?,?,?,?,?,0,0,0,"")""", insert into cards values (?,?,1,?,?,?,?,?,?,?,?,?,?,0,0,0,"")""",
rows) rows)
# reviewHistory -> revlog # reviewHistory -> revlog
########### ###########
# fetch the data so we can rewrite ids quickly # fetch the data so we can rewrite ids quickly
r = [] r = []
for row in db.execute(""" for row in db.execute("""
select select
cast(time*1000 as int), cardId, 0, ease, cast(time*1000 as int), cardId, 0, ease,
cast(nextInterval as int), cast(lastInterval as int), cast(nextInterval as int), cast(lastInterval as int),
cast(nextFactor*1000 as int), cast(min(thinkingTime, 60)*1000 as int), cast(nextFactor*1000 as int), cast(min(thinkingTime, 60)*1000 as int),
yesCount from reviewHistory"""): yesCount from reviewHistory"""):
row = list(row) row = list(row)
# new card ids # new card ids
try: try:
row[1] = cardidmap[row[1]] row[1] = cardidmap[row[1]]
except: except:
# id doesn't exist # id doesn't exist
continue continue
# no ease 0 anymore # no ease 0 anymore
row[2] = row[2] or 1 row[2] = row[2] or 1
# determine type, overwriting yesCount # determine type, overwriting yesCount
newInt = row[3] newInt = row[3]
oldInt = row[4] oldInt = row[4]
yesCnt = row[7] yesCnt = row[7]
# yesCnt included the current answer # yesCnt included the current answer
if row[2] > 1: if row[2] > 1:
yesCnt -= 1 yesCnt -= 1
if oldInt < 1: if oldInt < 1:
# new or failed # new or failed
if yesCnt: if yesCnt:
# type=relrn # type=relrn
row[7] = 2 row[7] = 2
else:
# type=lrn
row[7] = 0
else: else:
# type=lrn # type=rev
row[7] = 0 row[7] = 1
else: r.append(row)
# type=rev db.executemany(
row[7] = 1 "insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)", r)
r.append(row) db.execute("drop table reviewHistory")
db.executemany(
"insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)", r)
db.execute("drop table reviewHistory")
# deck # deck
########### ###########
_migrateDeckTbl(db) self._migrateDeckTbl()
# tags # tags
########### ###########
tags = {} tags = {}
for t in db.list("select tag from tags"): for t in db.list("select tag from tags"):
tags[t] = intTime() tags[t] = intTime()
db.execute("update deck set tags = ?", simplejson.dumps(tags)) db.execute("update deck set tags = ?", simplejson.dumps(tags))
db.execute("drop table tags") db.execute("drop table tags")
db.execute("drop table cardTags") db.execute("drop table cardTags")
# the rest # the rest
########### ###########
db.execute("drop table media") db.execute("drop table media")
db.execute("drop table sources") db.execute("drop table sources")
_migrateModels(db) self._migrateModels()
_updateIndices(db) _updateIndices(db)
return ver
def _migrateDeckTbl(db): def _migrateDeckTbl(self):
import anki.deck import anki.deck
db.execute("delete from deck") db = self.db
db.execute(""" db.execute("delete from deck")
db.execute("""
insert or replace into deck select id, cast(created as int), :t, insert or replace into deck select id, cast(created as int), :t,
:t, 99, 0, 0, cast(lastSync as int), :t, 99, 0, 0, cast(lastSync as int),
"", "", "", "", "" from decks""", t=intTime()) "", "", "", "", "" from decks""", t=intTime())
# prepare a group to store the old deck options # prepare a group to store the old deck options
g, gc, conf = _getDeckVars(db) g, gc, conf = _getDeckVars(db)
# delete old selective study settings, which we can't auto-upgrade easily # delete old selective study settings, which we can't auto-upgrade easily
keys = ("newActive", "newInactive", "revActive", "revInactive") keys = ("newActive", "newInactive", "revActive", "revInactive")
for k in keys: for k in keys:
db.execute("delete from deckVars where key=:k", k=k) db.execute("delete from deckVars where key=:k", k=k)
# copy other settings, ignoring deck order as there's a new default # copy other settings, ignoring deck order as there's a new default
g['newSpread'] = db.scalar("select newCardSpacing from decks") g['newSpread'] = db.scalar("select newCardSpacing from decks")
g['newPerDay'] = db.scalar("select newCardsPerDay from decks") g['newPerDay'] = db.scalar("select newCardsPerDay from decks")
g['repLim'] = db.scalar("select sessionRepLimit from decks") g['repLim'] = db.scalar("select sessionRepLimit from decks")
g['timeLim'] = db.scalar("select sessionTimeLimit from decks") g['timeLim'] = db.scalar("select sessionTimeLimit from decks")
# this needs to be placed in the model later on # this needs to be placed in the model later on
conf['oldNewOrder'] = db.scalar("select newCardOrder from decks") conf['oldNewOrder'] = db.scalar("select newCardOrder from decks")
# no reverse option anymore # no reverse option anymore
conf['oldNewOrder'] = min(1, conf['oldNewOrder']) conf['oldNewOrder'] = min(1, conf['oldNewOrder'])
# add any deck vars and save # add any deck vars and save
dkeys = ("hexCache", "cssCache") dkeys = ("hexCache", "cssCache")
for (k, v) in db.execute("select * from deckVars").fetchall(): for (k, v) in db.execute("select * from deckVars").fetchall():
if k in dkeys: if k in dkeys:
pass pass
else: else:
conf[k] = v conf[k] = v
_addDeckVars(db, g, gc, conf) _addDeckVars(db, g, gc, conf)
# clean up # clean up
db.execute("drop table decks") db.execute("drop table decks")
db.execute("drop table deckVars") db.execute("drop table deckVars")
def _migrateModels(db): def _migrateModels(self):
import anki.models import anki.models
times = {} db = self.db
mods = {} times = {}
for row in db.all( mods = {}
"select id, name from models"): for row in db.all(
while 1: "select id, name from models"):
t = intTime(1000) while 1:
if t not in times: t = intTime(1000)
times[t] = True if t not in times:
break times[t] = True
m = anki.models.defaultModel.copy() break
m['id'] = t m = anki.models.defaultModel.copy()
m['name'] = row[1] m['id'] = t
m['mod'] = intTime() m['name'] = row[1]
m['tags'] = [] m['mod'] = intTime()
m['flds'] = _fieldsForModel(db, row[0]) m['tags'] = []
m['tmpls'] = _templatesForModel(db, row[0], m['flds']) m['flds'] = self._fieldsForModel(row[0])
mods[m['id']] = m m['tmpls'] = self._templatesForModel(row[0], m['flds'])
db.execute("update facts set mid = ? where mid = ?", t, row[0]) mods[m['id']] = m
# save and clean up db.execute("update facts set mid = ? where mid = ?", t, row[0])
db.execute("update deck set models = ?", simplejson.dumps(mods)) # save and clean up
db.execute("drop table fieldModels") db.execute("update deck set models = ?", simplejson.dumps(mods))
db.execute("drop table cardModels") db.execute("drop table fieldModels")
db.execute("drop table models") db.execute("drop table cardModels")
db.execute("drop table models")
def _fieldsForModel(db, mid): def _fieldsForModel(self, mid):
import anki.models import anki.models
dconf = anki.models.defaultField db = self.db
flds = [] dconf = anki.models.defaultField
for c, row in enumerate(db.all(""" flds = []
for c, row in enumerate(db.all("""
select name, features, required, "unique", select name, features, required, "unique",
quizFontFamily, quizFontSize, quizFontColour, editFontSize from fieldModels quizFontFamily, quizFontSize, quizFontColour, editFontSize from fieldModels
where modelId = ? where modelId = ?
order by ordinal""", mid)): order by ordinal""", mid)):
conf = dconf.copy() conf = dconf.copy()
(conf['name'], (conf['name'],
conf['rtl'], conf['rtl'],
conf['req'], conf['req'],
conf['uniq'], conf['uniq'],
conf['font'], conf['font'],
conf['qsize'], conf['qsize'],
conf['qcol'], conf['qcol'],
conf['esize']) = row conf['esize']) = row
conf['ord'] = c conf['ord'] = c
# ensure data is good # ensure data is good
conf['rtl'] = not not conf['rtl'] conf['rtl'] = not not conf['rtl']
conf['pre'] = True conf['pre'] = True
conf['font'] = conf['font'] or "Arial" conf['font'] = conf['font'] or "Arial"
conf['qcol'] = conf['qcol'] or "#000" conf['qcol'] = conf['qcol'] or "#000"
conf['qsize'] = conf['qsize'] or 20 conf['qsize'] = conf['qsize'] or 20
conf['esize'] = conf['esize'] or 20 conf['esize'] = conf['esize'] or 20
flds.append(conf) flds.append(conf)
return flds return flds
def _templatesForModel(db, mid, flds): def _templatesForModel(self, mid, flds):
import anki.models import anki.models
dconf = anki.models.defaultTemplate db = self.db
tmpls = [] dconf = anki.models.defaultTemplate
for c, row in enumerate(db.all(""" tmpls = []
for c, row in enumerate(db.all("""
select name, active, qformat, aformat, questionInAnswer, select name, active, qformat, aformat, questionInAnswer,
questionAlign, lastFontColour, allowEmptyAnswer, typeAnswer from cardModels questionAlign, lastFontColour, allowEmptyAnswer, typeAnswer from cardModels
where modelId = ? where modelId = ?
order by ordinal""", mid)): order by ordinal""", mid)):
conf = dconf.copy() conf = dconf.copy()
(conf['name'], (conf['name'],
conf['actv'], conf['actv'],
conf['qfmt'], conf['qfmt'],
conf['afmt'], conf['afmt'],
conf['hideQ'], conf['hideQ'],
conf['align'], conf['align'],
conf['bg'], conf['bg'],
conf['emptyAns'], conf['emptyAns'],
conf['typeAns']) = row conf['typeAns']) = row
conf['ord'] = c conf['ord'] = c
# convert the field name to an ordinal # convert the field name to an ordinal
ordN = None ordN = None
for (ord, fm) in enumerate(flds): for (ord, fm) in enumerate(flds):
if fm['name'] == conf['typeAns']: if fm['name'] == conf['typeAns']:
ordN = ord ordN = ord
break break
if ordN is not None: if ordN is not None:
conf['typeAns'] = ordN conf['typeAns'] = ordN
else: else:
conf['typeAns'] = None conf['typeAns'] = None
for type in ("qfmt", "afmt"): for type in ("qfmt", "afmt"):
# ensure the new style field format # ensure the new style field format
conf[type] = re.sub("%\((.+?)\)s", "{{\\1}}", conf[type]) conf[type] = re.sub("%\((.+?)\)s", "{{\\1}}", conf[type])
# some special names have changed # some special names have changed
conf[type] = re.sub( conf[type] = re.sub(
"(?i){{tags}}", "{{Tags}}", conf[type]) "(?i){{tags}}", "{{Tags}}", conf[type])
conf[type] = re.sub( conf[type] = re.sub(
"(?i){{cardModel}}", "{{Template}}", conf[type]) "(?i){{cardModel}}", "{{Template}}", conf[type])
conf[type] = re.sub( conf[type] = re.sub(
"(?i){{modelTags}}", "{{Model}}", conf[type]) "(?i){{modelTags}}", "{{Model}}", conf[type])
tmpls.append(conf) tmpls.append(conf)
return tmpls return tmpls
def _postSchemaUpgrade(deck): # Upgrading deck
"Handle the rest of the upgrade to 2.0." ######################################################################
import anki.deck
# make sure we have a current model id def _upgradeDeck(self):
deck.models.setCurrent(deck.models.models.values()[0]) "Handle the rest of the upgrade to 2.0."
# regenerate css, and set new card order import anki.deck
for m in deck.models.all(): deck = self.deck
m['newOrder'] = deck.conf['oldNewOrder'] # make sure we have a current model id
deck.models.save(m) deck.models.setCurrent(deck.models.models.values()[0])
del deck.conf['oldNewOrder'] # regenerate css, and set new card order
# fix creation time for m in deck.models.all():
deck.sched._updateCutoff() m['newOrder'] = deck.conf['oldNewOrder']
d = datetime.datetime.today() deck.models.save(m)
d -= datetime.timedelta(hours=4) del deck.conf['oldNewOrder']
d = datetime.datetime(d.year, d.month, d.day) # fix creation time
d += datetime.timedelta(hours=4) deck.sched._updateCutoff()
d -= datetime.timedelta(days=1+int((time.time()-deck.crt)/86400)) d = datetime.datetime.today()
deck.crt = int(time.mktime(d.timetuple())) d -= datetime.timedelta(hours=4)
deck.sched._updateCutoff() d = datetime.datetime(d.year, d.month, d.day)
# update uniq cache d += datetime.timedelta(hours=4)
deck.updateFieldCache(deck.db.list("select id from facts")) d -= datetime.timedelta(days=1+int((time.time()-deck.crt)/86400))
# remove old views deck.crt = int(time.mktime(d.timetuple()))
for v in ("failedCards", "revCardsOld", "revCardsNew", deck.sched._updateCutoff()
"revCardsDue", "revCardsRandom", "acqCardsRandom", # update uniq cache
"acqCardsOld", "acqCardsNew"): deck.updateFieldCache(deck.db.list("select id from facts"))
deck.db.execute("drop view if exists %s" % v) # remove old views
# remove stats, as it's all in the revlog now for v in ("failedCards", "revCardsOld", "revCardsNew",
deck.db.execute("drop table if exists stats") "revCardsDue", "revCardsRandom", "acqCardsRandom",
# suspended cards don't use ranges anymore "acqCardsOld", "acqCardsNew"):
deck.db.execute("update cards set queue=-1 where queue between -3 and -1") deck.db.execute("drop view if exists %s" % v)
deck.db.execute("update cards set queue=-2 where queue between 3 and 5") # remove stats, as it's all in the revlog now
deck.db.execute("update cards set queue=-3 where queue between 6 and 8") deck.db.execute("drop table if exists stats")
# remove old deleted tables # suspended cards don't use ranges anymore
for t in ("cards", "facts", "models", "media"): deck.db.execute("update cards set queue=-1 where queue between -3 and -1")
deck.db.execute("drop table if exists %sDeleted" % t) deck.db.execute("update cards set queue=-2 where queue between 3 and 5")
# rewrite due times for new cards deck.db.execute("update cards set queue=-3 where queue between 6 and 8")
deck.db.execute(""" # remove old deleted tables
for t in ("cards", "facts", "models", "media"):
deck.db.execute("drop table if exists %sDeleted" % t)
# rewrite due times for new cards
deck.db.execute("""
update cards set due = fid where type=0""") update cards set due = fid where type=0""")
# and failed cards # and failed cards
left = len(deck.groups.conf(1)['new']['delays']) left = len(deck.groups.conf(1)['new']['delays'])
deck.db.execute("update cards set edue = ?, left=? where type = 1", deck.db.execute("update cards set edue = ?, left=? where type = 1",
deck.sched.today+1, left) deck.sched.today+1, left)
# and due cards # and due cards
deck.db.execute(""" deck.db.execute("""
update cards set due = cast( update cards set due = cast(
(case when due < :stamp then 0 else 1 end) + (case when due < :stamp then 0 else 1 end) +
((due-:stamp)/86400) as int)+:today where type = 2 ((due-:stamp)/86400) as int)+:today where type = 2
""", stamp=deck.sched.dayCutoff, today=deck.sched.today) """, stamp=deck.sched.dayCutoff, today=deck.sched.today)
# possibly re-randomize # possibly re-randomize
if deck.models.randomNew(): if deck.models.randomNew():
deck.sched.randomizeCards() deck.sched.randomizeCards()
# update insertion id # update insertion id
deck.conf['nextPos'] = deck.db.scalar("select max(id) from facts")+1 deck.conf['nextPos'] = deck.db.scalar("select max(id) from facts")+1
deck.save() deck.save()
# optimize and finish # optimize and finish
deck.db.commit() deck.db.commit()
deck.db.execute("vacuum") deck.db.execute("vacuum")
deck.db.execute("analyze") deck.db.execute("analyze")
deck.db.execute("update deck set ver = ?", CURRENT_VERSION) deck.db.execute("update deck set ver = ?", SCHEMA_VERSION)
deck.save() deck.save()
# Post-init upgrade
######################################################################
def _upgradeDeck(deck, version):
"Upgrade deck to the latest version."
if version >= CURRENT_VERSION:
return
if version < 100:
_postSchemaUpgrade(deck)

View file

@ -2,13 +2,12 @@
# Copyright: Damien Elmes <anki@ichi2.net> # Copyright: Damien Elmes <anki@ichi2.net>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
CURRENT_VERSION = 1
import os, simplejson import os, simplejson
from anki.lang import _ from anki.lang import _
from anki.utils import intTime from anki.utils import intTime
from anki.db import DB from anki.db import DB
from anki.deck import _Deck from anki.deck import _Deck
from anki.consts import *
from anki.stdmodels import addBasicModel, addClozeModel from anki.stdmodels import addBasicModel, addClozeModel
def Deck(path, queue=True, lock=True, server=False): def Deck(path, queue=True, lock=True, server=False):
@ -30,7 +29,7 @@ def Deck(path, queue=True, lock=True, server=False):
db.execute("pragma cache_size = 10000") db.execute("pragma cache_size = 10000")
# add db to deck and do any remaining upgrades # add db to deck and do any remaining upgrades
deck = _Deck(db, server) deck = _Deck(db, server)
if ver < CURRENT_VERSION: if ver < SCHEMA_VERSION:
_upgradeDeck(deck, ver) _upgradeDeck(deck, ver)
elif create: elif create:
# add in reverse order so basic is default # add in reverse order so basic is default
@ -47,7 +46,7 @@ def Deck(path, queue=True, lock=True, server=False):
# no upgrades necessary at the moment # no upgrades necessary at the moment
def _upgradeSchema(db): def _upgradeSchema(db):
return CURRENT_VERSION return SCHEMA_VERSION
def _upgradeDeck(deck, ver): def _upgradeDeck(deck, ver):
return return
@ -61,7 +60,7 @@ def _createDB(db):
_addSchema(db) _addSchema(db)
_updateIndices(db) _updateIndices(db)
db.execute("analyze") db.execute("analyze")
return CURRENT_VERSION return SCHEMA_VERSION
def _addSchema(db, setDeckConf=True): def _addSchema(db, setDeckConf=True):
db.executescript(""" db.executescript("""
@ -140,7 +139,7 @@ create table if not exists graves (
insert or ignore into deck insert or ignore into deck
values(1,0,0,0,%(v)s,0,0,0,'','{}','','','{}'); values(1,0,0,0,%(v)s,0,0,0,'','{}','','','{}');
""" % ({'v':CURRENT_VERSION})) """ % ({'v':SCHEMA_VERSION}))
import anki.deck import anki.deck
if setDeckConf: if setDeckConf:
_addDeckVars(db, *_getDeckVars(db)) _addDeckVars(db, *_getDeckVars(db))

View file

@ -116,19 +116,6 @@ def test_fieldChecksum():
assert deck.db.scalar( assert deck.db.scalar(
"select count() from fsums") == 2 "select count() from fsums") == 2
def test_upgrade():
dst = getUpgradeDeckPath()
print "upgrade to", dst
deck = Deck(dst)
# creation time should have been adjusted
d = datetime.datetime.fromtimestamp(deck.crt)
assert d.hour == 4 and d.minute == 0
# 3 new, 2 failed, 1 due
deck.conf['counts'] = COUNT_REMAINING
assert deck.sched.cardCounts() == (3,2,1)
# now's a good time to test the integrity check too
deck.fixIntegrity()
def test_selective(): def test_selective():
deck = getEmptyDeck() deck = getEmptyDeck()
f = deck.newFact() f = deck.newFact()

View file

@ -1,7 +1,10 @@
# coding: utf-8 # coding: utf-8
import datetime
from anki.consts import *
from shared import getUpgradeDeckPath from shared import getUpgradeDeckPath
from anki.migration.checker import check from anki.migration.checker import check
from anki.migration.upgrader import Upgrader
def test_checker(): def test_checker():
dst = getUpgradeDeckPath() dst = getUpgradeDeckPath()
@ -10,3 +13,20 @@ def test_checker():
open(dst, "w+").write("foo") open(dst, "w+").write("foo")
assert not check(dst) assert not check(dst)
def test_upgrade():
dst = getUpgradeDeckPath()
u = Upgrader()
print "upgrade to", dst
deck = u.upgrade(dst)
# creation time should have been adjusted
d = datetime.datetime.fromtimestamp(deck.crt)
assert d.hour == 4 and d.minute == 0
# 3 new, 2 failed, 1 due
deck.reset()
deck.conf['counts'] = COUNT_REMAINING
assert deck.sched.cardCounts() == (3,2,1)
# now's a good time to test the integrity check too
deck.fixIntegrity()