From 2f2713370588220d2dd6eb81c4a4f47927a36d68 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Mon, 7 Mar 2011 02:04:04 +0900 Subject: [PATCH] drop sqlalchemy; massive refactor SQLAlchemy is a great tool, but it wasn't a great fit for Anki: - We often had to drop down to raw SQL for performance reasons. - The DB cursors and results were wrapped, which incurred a sizable performance hit due to introspection. Operations like fetching 50k records from a hot cache were taking more than twice as long to complete. - We take advantage of sqlite-specific features, so SQL language abstraction is useless to us. - The anki schema is quite small, so manually saving and loading objects is not a big burden. In the process of porting to DBAPI, I've refactored the database schema: - App configuration data that we don't need in joins or bulk updates has been moved into JSON objects. This simplifies serializing, and means we won't need DB schema changes to store extra options in the future. This change obsoletes the deckVars table. - Renamed tables: -- fieldModels -> fields -- cardModels -> templates -- fields -> fdata - a number of attribute names have been shortened Classes like Card, Fact & Model remain. They maintain a reference to the deck. To write their state to the DB, call .flush(). Objects no longer have their modification time manually updated. Instead, the modification time is updated when they are flushed. This also applies to the deck. Decks will now save on close, because various operations that were done at deck load will be moved into deck close instead. Operations like undoing buried card are cheap on a hot cache, but expensive on startup. Programmatically you can call .close(save=False) to avoid a save and a modification bump. This will be useful for generating due counts. Because of the new saving behaviour, the save and save as options will be removed from the GUI in the future. The q/a cache and field cache generating has been centralized. Facts will automatically rebuild the cache on flush; models can do so with model.updateCache(). Media handling has also been reworked. It has moved into a MediaRegistry object, which the deck holds. Refcounting has been dropped - it meant we had to compare old and new value every time facts or models were changed, and existed for the sole purpose of not showing errors on a missing media download. Instead we just media.registerText(q+a) when it's updated. The download function will be expanded to ask the user if they want to continue after a certain number of files have failed to download, which should be an adequate alternative. And we now add the file into the media DB when it's copied to th emedia directory, not when the card is commited. This fixes duplicates a user would get if they added the same media to a card twice without adding the card. The old DeckStorage object had its upgrade code split in a previous commit; the opening and upgrading code has been merged back together, and put in a separate storage.py file. The correct way to open a deck now is import anki; d = anki.Deck(path). deck.getCard() -> deck.sched.getCard() same with answerCard deck.getCard(id) returns a Card object now. And the DB wrapper has had a few changes: - sql statements are a more standard DBAPI: - statement() -> execute() - statements() -> executemany() - called like execute(sql, 1, 2, 3) or execute(sql, a=1, b=2, c=3) - column0 -> list --- anki/__init__.py | 43 +- anki/cards.py | 223 +-- anki/consts.py | 44 +- anki/db.py | 175 +- anki/deck.py | 1511 ++++++----------- anki/errors.py | 38 +- anki/exporting.py | 3 +- anki/facts.py | 238 ++- anki/find.py | 86 +- anki/graphs.py | 4 +- anki/graves.py | 17 +- anki/groups.py | 20 - anki/importing/__init__.py | 6 +- anki/importing/anki10.py | 8 +- anki/importing/dingsbums.py | 6 +- anki/importing/supermemo_xml.py | 2 +- anki/media.py | 469 ++--- anki/models.py | 349 ++-- anki/revlog.py | 15 +- anki/sched.py | 55 +- anki/stats.py | 1 - anki/stdmodels.py | 68 +- anki/{upgrade.py => storage.py} | 293 +++- anki/sync.py | 27 +- anki/utils.py | 7 +- tests/deck/fake.png | 1 - tests/importing/dingsbums.xml | 50 - tests/importing/supermemo1.xml | 89 - tests/importing/test.mem | 219 --- tests/importing/test10-2.anki | Bin 180224 -> 0 bytes tests/importing/test10-3.anki | Bin 180224 -> 0 bytes tests/importing/test10.anki | Bin 180224 -> 0 bytes tests/importing/text-2fields.txt | 11 - tests/importing/text-tags.txt | 2 - tests/importing/text-update.txt | 11 - tests/shared.py | 4 +- tests/syncing/media-tests/1.anki | Bin 180224 -> 0 bytes .../834a227f8d0abc4e2193f08138e59885.png | Bin 545 -> 0 bytes .../c4ad64a7afe9b09602cdf91e18897959.png | Bin 580 -> 0 bytes tests/syncing/media-tests/2.anki | Bin 184320 -> 0 bytes .../22161b29b0c18e068038021f54eee1ee.png | Bin 644 -> 0 bytes tests/test_deck.py | 312 +--- tests/test_media.py | 89 +- tests/test_sched.py | 30 +- 44 files changed, 1689 insertions(+), 2837 deletions(-) rename anki/{upgrade.py => storage.py} (51%) delete mode 100644 tests/deck/fake.png delete mode 100644 tests/importing/dingsbums.xml delete mode 100644 tests/importing/supermemo1.xml delete mode 100644 tests/importing/test.mem delete mode 100644 tests/importing/test10-2.anki delete mode 100644 tests/importing/test10-3.anki delete mode 100644 tests/importing/test10.anki delete mode 100644 tests/importing/text-2fields.txt delete mode 100644 tests/importing/text-tags.txt delete mode 100644 tests/importing/text-update.txt delete mode 100644 tests/syncing/media-tests/1.anki delete mode 100644 tests/syncing/media-tests/1.media/834a227f8d0abc4e2193f08138e59885.png delete mode 100644 tests/syncing/media-tests/1.media/c4ad64a7afe9b09602cdf91e18897959.png delete mode 100644 tests/syncing/media-tests/2.anki delete mode 100644 tests/syncing/media-tests/2.media/22161b29b0c18e068038021f54eee1ee.png diff --git a/anki/__init__.py b/anki/__init__.py index 3a9b51b9d..cdc14deea 100644 --- a/anki/__init__.py +++ b/anki/__init__.py @@ -3,58 +3,35 @@ # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html """\ -Anki (libanki) -==================== - Open a deck: deck = anki.Deck(path) -Get a card: +Get a due card: - card = deck.getCard() + card = deck.sched.getCard() if not card: # deck is finished Show the card: - print card.question, card.answer + print card.q, card.a Answer the card: - deck.answerCard(card, ease) + deck.sched.answerCard(card, ease) Edit the card: - fields = card.fact.model.fieldModels - for field in fields: - card.fact[field.name] = 'newvalue' - card.fact.setModified(textChanged=True, deck=deck) - deck.setModified() - -Get all cards via ORM (slow): - - from anki.cards import Card - cards = deck.s.query(Card).all() - -Get all q/a/ids via SQL (fast): - - cards = deck.s.all("select id, question, answer from cards") + fact = card.fact() + for (name, value) in fact.items(): + fact[name] = value + " new" + fact.flush() Save & close: - deck.save() deck.close() """ -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - pass - -version = "1.2.8" - -from anki.deck import DeckStorage - -def Deck(*args, **kwargs): - return DeckStorage.Deck(*args, **kwargs) +version = "1.2.6" +from anki.storage import Deck diff --git a/anki/cards.py b/anki/cards.py index f7c8a9ad1..a287c3c11 100644 --- a/anki/cards.py +++ b/anki/cards.py @@ -2,12 +2,8 @@ # Copyright: Damien Elmes # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html -import time, sys, math, random -from anki.db import * -from anki.models import CardModel, Model, FieldModel, formatQA -from anki.facts import Fact, factsTable, Field -from anki.utils import parseTags, findTag, stripHTML, genID, hexifyID, intTime -from anki.media import updateMediaCount, mediaFiles +import time +from anki.utils import genID, intTime, hexifyID MAX_TIMER = 60 @@ -26,57 +22,89 @@ MAX_TIMER = 60 # - rev queue: integer day # - lrn queue: integer timestamp -cardsTable = Table( - 'cards', metadata, - Column('id', Integer, primary_key=True), - Column('factId', Integer, ForeignKey("facts.id"), nullable=False), - Column('groupId', Integer, nullable=False, default=1), - Column('cardModelId', Integer, ForeignKey("cardModels.id"), nullable=False), - Column('modified', Integer, nullable=False, default=intTime), - # general - Column('question', UnicodeText, nullable=False, default=u""), - Column('answer', UnicodeText, nullable=False, default=u""), - Column('ordinal', Integer, nullable=False), - Column('flags', Integer, nullable=False, default=0), - # shared scheduling - Column('type', Integer, nullable=False, default=2), - Column('queue', Integer, nullable=False, default=2), - Column('due', Integer, nullable=False), - # sm2 - Column('interval', Integer, nullable=False, default=0), - Column('factor', Integer, nullable=False), - Column('reps', Integer, nullable=False, default=0), - Column('streak', Integer, nullable=False, default=0), - Column('lapses', Integer, nullable=False, default=0), - # learn - Column('grade', Integer, nullable=False, default=0), - Column('cycles', Integer, nullable=False, default=0) -) - class Card(object): - # called one of three ways: - # - with no args, followed by .fromDB() - # - with all args, when adding cards to db - def __init__(self, fact=None, cardModel=None, group=None): - # timer - self.timerStarted = None - if fact: + def __init__(self, deck, id=None): + self.deck = deck + if id: + self.id = id + self.load() + else: + # to flush, set fid, tid, due and ord self.id = genID() - self.modified = intTime() - self.due = fact.pos - self.fact = fact - self.modelId = fact.modelId - self.cardModel = cardModel - self.groupId = group.id - # placeholder; will get set properly when card graduates - self.factor = 2500 - # for non-orm use - self.cardModelId = cardModel.id - self.ordinal = cardModel.ordinal + self.gid = 1 + self.q = "" + self.a = "" + self.flags = 0 + self.type = 2 + self.queue = 2 + self.interval = 0 + self.factor = 0 + self.reps = 0 + self.streak = 0 + self.lapses = 0 + self.grade = 0 + self.cycles = 0 + self.timerStarted = None - def setModified(self): - self.modified = intTime() + def load(self): + (self.id, + self.fid, + self.tid, + self.gid, + self.mod, + self.q, + self.a, + self.ord, + self.type, + self.queue, + self.due, + self.interval, + self.factor, + self.reps, + self.streak, + self.lapses, + self.grade, + self.cycles) = self.deck.db.first( + "select * from cards where id = ?", self.id) + + def flush(self): + self.mod = intTime() + self.deck.db.execute( + """ +insert or replace into cards values +(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", + self.id, + self.fid, + self.tid, + self.gid, + self.mod, + self.q, + self.a, + self.ord, + self.type, + self.queue, + self.due, + self.interval, + self.factor, + self.reps, + self.streak, + self.lapses, + self.grade, + self.cycles) + + def flushSched(self): + self.mod = intTime() + self.deck.db.execute( + """update cards set +mod=?, type=?, queue=?, due=?, interval=?, factor=?, reps=?, +streak=?, lapses=?, grade=?, cycles=? where id = ?""", + self.mod, self.type, self.queue, self.due, self.interval, + self.factor, self.reps, self.streak, self.lapses, + self.grade, self.cycles, self.id) + + def fact(self): + return self.deck.getFact(self.deck, self.fid) def startTimer(self): self.timerStarted = time.time() @@ -87,40 +115,9 @@ class Card(object): # Questions and answers ########################################################################## - def rebuildQA(self, deck, media=True): - # format qa - d = {} - for f in self.fact.model.fieldModels: - d[f.name] = (f.id, self.fact[f.name]) - qa = formatQA(None, self.fact.modelId, d, self._splitTags(), - self.cardModel, deck) - # find old media references - files = {} - for type in ("question", "answer"): - for f in mediaFiles(getattr(self, type) or ""): - if f in files: - files[f] -= 1 - else: - files[f] = -1 - # update q/a - self.question = qa['question'] - self.answer = qa['answer'] - # determine media delta - for type in ("question", "answer"): - for f in mediaFiles(getattr(self, type)): - if f in files: - files[f] += 1 - else: - files[f] = 1 - # update media counts if we're attached to deck - if media: - for (f, cnt) in files.items(): - updateMediaCount(deck, f, cnt) - self.setModified() - def htmlQuestion(self, type="question", align=True): div = '''
%s
''' % ( - type[0], type[0], hexifyID(self.cardModelId), + type[0], type[0], hexifyID(self.tid), getattr(self, type)) # add outer div & alignment (with tables due to qt's html handling) if not align: @@ -137,59 +134,3 @@ class Card(object): def htmlAnswer(self, align=True): return self.htmlQuestion(type="answer", align=align) - - def _splitTags(self): - return (self.fact._tags, self.fact.model.name, self.cardModel.name) - - # Non-ORM - ########################################################################## - - def fromDB(self, s, id): - r = s.first("""select * from cards where id = :id""", id=id) - if not r: - return - (self.id, - self.factId, - self.groupId, - self.cardModelId, - self.modified, - self.question, - self.answer, - self.ordinal, - self.flags, - self.type, - self.queue, - self.due, - self.interval, - self.factor, - self.reps, - self.streak, - self.lapses, - self.grade, - self.cycles) = r - return True - - def toDB(self, s): - # this shouldn't be used for schema changes - s.execute("""update cards set -modified=:modified, -question=:question, -answer=:answer, -flags=:flags, -type=:type, -queue=:queue, -due=:due, -interval=:interval, -factor=:factor, -reps=:reps, -streak=:streak, -lapses=:lapses, -grade=:grade, -cycles=:cycles -where id=:id""", self.__dict__) - -mapper(Card, cardsTable, properties={ - 'cardModel': relation(CardModel), - 'fact': relation(Fact, backref="cards", primaryjoin= - cardsTable.c.factId == factsTable.c.id), - }) diff --git a/anki/consts.py b/anki/consts.py index a5c9aa6f1..e4bebc4de 100644 --- a/anki/consts.py +++ b/anki/consts.py @@ -14,10 +14,52 @@ NEW_CARDS_RANDOM = 0 NEW_CARDS_DUE = 1 # sort order for day's new cards -NEW_TODAY_ORDINAL = 0 +NEW_TODAY_ORD = 0 NEW_TODAY_DUE = 1 # review card sort order REV_CARDS_OLD_FIRST = 0 REV_CARDS_NEW_FIRST = 1 REV_CARDS_RANDOM = 2 + +# Labels +########################################################################## + +def newCardOrderLabels(): + return { + 0: _("Add new cards in random order"), + 1: _("Add new cards to end of queue"), + } + +def newCardSchedulingLabels(): + return { + 0: _("Spread new cards out through reviews"), + 1: _("Show new cards after all other cards"), + 2: _("Show new cards before reviews"), + } + +# FIXME: order due is not very useful anymore +def revCardOrderLabels(): + return { + 0: _("Review cards from largest interval"), + 1: _("Review cards from smallest interval"), + 2: _("Review cards in order due"), + 3: _("Review cards in random order"), + } + +def failedCardOptionLabels(): + return { + 0: _("Show failed cards soon"), + 1: _("Show failed cards at end"), + 2: _("Show failed cards in 10 minutes"), + 3: _("Show failed cards in 8 hours"), + 4: _("Show failed cards in 3 days"), + 5: _("Custom failed cards handling"), + } + +def alignmentLabels(): + return { + 0: _("Center"), + 1: _("Left"), + 2: _("Right"), + } diff --git a/anki/db.py b/anki/db.py index 873f062fc..3a14fc31c 100644 --- a/anki/db.py +++ b/anki/db.py @@ -2,18 +2,6 @@ # Copyright: Damien Elmes # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html -"""\ -DB tools -==================== - -SessionHelper is a wrapper for the standard sqlalchemy session, which provides -some convenience routines, and manages transactions itself. - -object_session() is a replacement for the standard object_session(), which -provides the features of SessionHelper, and avoids taking out another -transaction. -""" - try: from pysqlite2 import dbapi2 as sqlite except ImportError: @@ -22,128 +10,63 @@ except ImportError: except: raise Exception("Please install pysqlite2 or python2.5") -from sqlalchemy import (Table, Integer, Float, Column, MetaData, - ForeignKey, Boolean, String, Date, - UniqueConstraint, Index, PrimaryKeyConstraint) -from sqlalchemy import create_engine -from sqlalchemy.orm import mapper, sessionmaker as _sessionmaker, relation, backref, \ - object_session as _object_session, class_mapper -from sqlalchemy.sql import select, text, and_ -from sqlalchemy.exceptions import DBAPIError, OperationalError -from sqlalchemy.pool import NullPool -import sqlalchemy - -# some users are still on 0.4.x.. -import warnings -warnings.filterwarnings('ignore', 'Use session.add()') -warnings.filterwarnings('ignore', 'Use session.expunge_all()') - -# sqlalchemy didn't handle the move to unicodetext nicely -try: - from sqlalchemy import UnicodeText -except ImportError: - from sqlalchemy import Unicode - UnicodeText = Unicode - from anki.hooks import runHook +#FIXME: do we need the dbFinished hook? -# shared metadata -metadata = MetaData() +class DB(object): + def __init__(self, path, level="EXCLUSIVE"): + self._db = sqlite.connect( + path, timeout=0, isolation_level=level) + self._path = path + self.echo = False -# this class assumes the provided session is called with transactional=False -class SessionHelper(object): - "Add some convenience routines to a session." - - def __init__(self, session, lock=True, transaction=True): - self._session = session - self._lock = lock - self._transaction = transaction - if self._transaction: - self._session.begin() - if self._lock: - self._lockDB() - self._seen = True - - def save(self, obj): - # compat - if sqlalchemy.__version__.startswith("0.4."): - self._session.save(obj) + def execute(self, sql, *a, **ka): + if self.echo: + print sql, a, ka + if ka: + # execute("...where id = :id", id=5) + res = self._db.execute(sql, ka) else: - self._session.add(obj) - - def expunge_all(self): - # compat - if sqlalchemy.__version__.startswith("0.4."): - self._session.clear() - else: - self._session.expunge_all() - - def update(self, obj): - # compat - if sqlalchemy.__version__.startswith("0.4."): - self._session.update(obj) - else: - self._session.add(obj) - - def execute(self, *a, **ka): - x = self._session.execute(*a, **ka) + # execute("...where id = ?", 5) + res = self._db.execute(sql, a) runHook("dbFinished") - return x + return res - def __getattr__(self, k): - return getattr(self.__dict__['_session'], k) - - def scalar(self, sql, **args): - return self.execute(text(sql), args).scalar() - - def all(self, sql, **args): - return self.execute(text(sql), args).fetchall() - - def first(self, sql, **args): - c = self.execute(text(sql), args) - r = c.fetchone() - c.close() - return r - - def column0(self, sql, **args): - return [x[0] for x in self.execute(text(sql), args).fetchall()] - - def statement(self, sql, **kwargs): - "Execute a statement without returning any results. Flush first." - return self.execute(text(sql), kwargs) - - def statements(self, sql, data): - "Execute a statement across data. Flush first." - return self.execute(text(sql), data) - - def __repr__(self): - return repr(self._session) + def executemany(self, sql, l): + if self.echo: + print sql, l + self._db.executemany(sql, l) + runHook("dbFinished") def commit(self): - self._session.commit() - if self._transaction: - self._session.begin() - if self._lock: - self._lockDB() + self._db.commit() - def _lockDB(self): - "Take out a write lock." - self._session.execute("pragma locking_mode = exclusive") - self._session.execute(text("update deck set modified=modified")) + def scalar(self, *a, **kw): + res = self.execute(*a, **kw).fetchone() + if res: + return res[0] + return None -def object_session(*args): - s = _object_session(*args) - if s: - return SessionHelper(s, lock=False, transaction=False) - return None + def all(self, *a, **kw): + return self.execute(*a, **kw).fetchall() -def sessionmaker(*args, **kwargs): - if sqlalchemy.__version__ < "0.5": - if 'autocommit' in kwargs: - kwargs['transactional'] = not kwargs['autocommit'] - del kwargs['autocommit'] - else: - if 'transactional' in kwargs: - kwargs['autocommit'] = not kwargs['transactional'] - del kwargs['transactional'] - return _sessionmaker(*args, **kwargs) + def first(self, *a, **kw): + c = self.execute(*a, **kw) + res = c.fetchone() + c.close() + return res + + def list(self, *a, **kw): + return [x[0] for x in self.execute(*a, **kw)] + + def executescript(self, sql): + if self.echo: + print sql + self._db.executescript(sql) + runHook("dbFinished") + + def rollback(self): + self._db.rollback() + + def close(self): + self._db.close() diff --git a/anki/deck.py b/anki/deck.py index 280a60421..9bafbc06e 100644 --- a/anki/deck.py +++ b/anki/deck.py @@ -4,29 +4,24 @@ import tempfile, time, os, random, sys, re, stat, shutil import types, traceback, simplejson, datetime - -from anki.db import * -from anki.lang import _, ngettext -from anki.errors import DeckAccessError -from anki.stdmodels import BasicModel -from anki.utils import parseTags, tidyHTML, genID, ids2str, hexifyID, \ - canonifyTags, joinTags, addTags, deleteTags, checksum, fieldChecksum, intTime -from anki.revlog import logReview -from anki.models import Model, CardModel, formatQA -from anki.fonts import toPlatformFont from operator import itemgetter from itertools import groupby -from anki.hooks import runHook, hookEmpty -from anki.template import render -from anki.media import updateMediaCount, mediaFiles, \ - rebuildMediaDir -from anki.upgrade import upgradeSchema, updateIndices, upgradeDeck, DECK_VERSION + +from anki.lang import _, ngettext +from anki.utils import parseTags, tidyHTML, genID, ids2str, hexifyID, \ + canonifyTags, joinTags, addTags, deleteTags, checksum, fieldChecksum, \ + stripHTML, intTime + +from anki.fonts import toPlatformFont +from anki.hooks import runHook, hookEmpty, runFilter + from anki.sched import Scheduler +from anki.media import MediaRegistry + from anki.consts import * import anki.latex # sets up hook -# ensure all the DB metadata in other files is loaded before proceeding -import anki.models, anki.facts, anki.cards, anki.media, anki.groups, anki.graves +import anki.cards, anki.facts, anki.models, anki.graves, anki.template # Settings related to queue building. These may be loaded without the rest of # the config to check due counts faster on mobile clients. @@ -35,7 +30,7 @@ defaultQconf = { 'newGroups': [], 'newPerDay': 20, 'newToday': [0, 0], # currentDay, count - 'newTodayOrder': NEW_TODAY_ORDINAL, + 'newTodayOrder': NEW_TODAY_ORD, 'newCardOrder': 1, 'newCardSpacing': NEW_CARDS_DISTRIBUTE, 'revCardOrder': REV_CARDS_RANDOM, @@ -62,37 +57,21 @@ defaultConf = { 'latexPost': "\\end{document}", } -# syncName: md5sum of current deck location, to detect if deck was moved or -# renamed. mobile clients can treat this as a simple boolean -deckTable = Table( - 'deck', metadata, - Column('id', Integer, nullable=False, primary_key=True), - Column('created', Integer, nullable=False, default=intTime), - Column('modified', Integer, nullable=False, default=intTime), - Column('schemaMod', Integer, nullable=False, default=intTime), - Column('version', Integer, nullable=False, default=DECK_VERSION), - Column('syncName', UnicodeText, nullable=False, default=u""), - Column('lastSync', Integer, nullable=False, default=0), - Column('utcOffset', Integer, nullable=False, default=-2), - Column('qconf', UnicodeText, nullable=False, default=unicode( - simplejson.dumps(defaultQconf))), - Column('config', UnicodeText, nullable=False, default=unicode( - simplejson.dumps(defaultConf))), - Column('data', UnicodeText, nullable=False, default=u"{}") -) - -class Deck(object): - "Top-level object. Manages facts, cards and scheduling information." +# this is initialized by storage.Deck +class _Deck(object): + # fixme: make configurable? factorFour = 1.3 - def _initVars(self): + def __init__(self, db): + self.db = db + self.path = db._path + self.load() if self.utcOffset == -2: # shared deck; reset timezone and creation date self.utcOffset = time.timezone + 60*60*4 - self.created = time.time() - self.mediaPrefix = "" - self.lastLoaded = time.time() + self.created = intTime() + self.mod = self.created self.undoEnabled = False self.sessionStartReps = 0 self.sessionStartTime = 0 @@ -100,20 +79,79 @@ class Deck(object): # counter for reps since deck open self.reps = 0 self.sched = Scheduler(self) + self.media = MediaRegistry(self) - def modifiedSinceSave(self): - return self.modified > self.lastLoaded + # DB-related + ########################################################################## + + def load(self): + (self.created, + self.mod, + self.schema, + self.syncName, + self.lastSync, + self.utcOffset, + self.qconf, + self.conf, + self.data) = self.db.first(""" +select created, mod, schema, syncName, lastSync, +utcOffset, qconf, conf, data from deck""") + self.qconf = simplejson.loads(self.qconf) + self.conf = simplejson.loads(self.conf) + self.data = simplejson.loads(self.data) + + def flush(self): + "Flush state to DB, updating mod time." + self.mod = intTime() + self.db.execute( + """update deck set +mod=?, schema=?, syncName=?, lastSync=?, utcOffset=?, +qconf=?, conf=?, data=?""", + self.mod, self.schema, self.syncName, self.lastSync, + self.utcOffset, simplejson.dumps(self.qconf), + simplejson.dumps(self.conf), simplejson.dumps(self.data)) + + def save(self): + "Flush, then commit DB." + self.flush() + self.db.commit() + + def close(self, save=True): + "Disconnect from DB." + if self.db: + if save: + self.save() + else: + self.rollback() + self.db.close() + self.db = None + runHook("deckClosed", self) + + def reopen(self): + "Reconnect to DB (after changing threads, etc). Doesn't reload." + import anki.db + if not self.db: + self.db = anki.db.DB(self.path) + + def rollback(self): + self.db.rollback() + + def modSchema(self): + self.schema = intTime() + # next sync will be full, so we can forget old gravestones + anki.graves.forgetAll(self.db) + + # unsorted + ########################################################################## def reset(self): self.sched.reset() # recache css self.rebuildCSS() - def getCard(self): - return self.sched.getCard() + def getCard(self, id): + return anki.cards.Card(self, id) - def answerCard(self, card, ease): - self.sched.answerCard(card, ease) # if card: # return card # if sched.name == "main": @@ -129,7 +167,7 @@ class Deck(object): "Reset progress on cards in IDS." print "position in resetCards()" sql = """ -update cards set modified=:now, position=0, type=2, queue=2, lastInterval=0, +update cards set mod=:now, position=0, type=2, queue=2, lastInterval=0, interval=0, due=created, factor=2.5, reps=0, successive=0, lapses=0, flags=0""" sql2 = "delete from revlog" if ids is None: @@ -138,37 +176,35 @@ interval=0, due=created, factor=2.5, reps=0, successive=0, lapses=0, flags=0""" sids = ids2str(ids) sql += " where id in "+sids sql2 += " where cardId in "+sids - self.db.statement(sql, now=time.time()) - self.db.statement(sql2) + self.db.execute(sql, now=time.time()) + self.db.execute(sql2) if self.qconf['newCardOrder'] == NEW_CARDS_RANDOM: # we need to re-randomize now self.randomizeNewCards(ids) - self.flushMod() - self.refreshSession() def randomizeNewCards(self, cardIds=None): "Randomize 'due' on all new cards." now = time.time() - query = "select distinct factId from cards where reps = 0" + query = "select distinct fid from cards where reps = 0" if cardIds: query += " and id in %s" % ids2str(cardIds) - fids = self.db.column0(query) + fids = self.db.list(query) data = [{'fid': fid, 'rand': random.uniform(0, now), 'now': now} for fid in fids] - self.db.statements(""" + self.db.executemany(""" update cards -set due = :rand + ordinal, -modified = :now -where factId = :fid +set due = :rand + ord, +mod = :now +where fid = :fid and type = 2""", data) def orderNewCards(self): "Set 'due' to card creation time." - self.db.statement(""" + self.db.execute(""" update cards set due = created, -modified = :now +mod = :now where type = 2""", now=time.time()) def rescheduleCards(self, ids, min, max): @@ -183,7 +219,7 @@ where type = 2""", now=time.time()) 'int': r / 86400.0, 't': time.time(), }) - self.db.statements(""" + self.db.executemany(""" update cards set interval = :int, due = :due, @@ -194,7 +230,6 @@ firstAnswered = :t, queue = 1, type = 1, where id = :id""", vals) - self.flushMod() # Times ########################################################################## @@ -298,31 +333,28 @@ limit 1""" % self.delay0)) ########################################################################## def suspendCards(self, ids): - "Suspend cards. Caller must .reset()" + "Suspend cards." self.startProgress() - self.db.statement(""" + self.db.execute(""" update cards -set queue = -1, modified = :t +set queue = -1, mod = :t where id in %s""" % ids2str(ids), t=time.time()) - self.flushMod() self.finishProgress() def unsuspendCards(self, ids): - "Unsuspend cards. Caller must .reset()" + "Unsuspend cards." self.startProgress() - self.db.statement(""" -update cards set queue = type, modified=:t + self.db.execute(""" +update cards set queue = type, mod=:t where queue = -1 and id in %s""" % ids2str(ids), t=time.time()) - self.flushMod() self.finishProgress() def buryFact(self, fact): - "Bury all cards for fact until next session. Caller must .reset()" + "Bury all cards for fact until next session." for card in fact.cards: if card.queue in (0,1,2): card.queue = -2 - self.flushMod() # Counts ########################################################################## @@ -409,106 +441,88 @@ due > :now and due < :now""", now=time.time()) def factCount(self): return self.db.scalar("select count() from facts") - def newFact(self, model=None): + def newFact(self): "Return a new fact with the current model." - if model is None: - model = self.currentModel - return anki.facts.Fact(model, self.getFactPos()) + return anki.facts.Fact(self, self.currentModel()) - def addFact(self, fact, reset=True): - "Add a fact to the deck. Return list of new cards." - if not fact.model: - fact.model = self.currentModel - # validate - fact.assertValid() - fact.assertUnique(self.db) + def addFact(self, fact): + "Add a fact to the deck. Return number of new cards." # check we have card models available cms = self.availableCardModels(fact) if not cms: return None - # proceed - cards = [] - self.db.save(fact) - # update field cache - self.flushMod() + # set pos + fact.pos = self.conf['nextFactPos'] + self.conf['nextFactPos'] += 1 + ncards = 0 isRandom = self.qconf['newCardOrder'] == NEW_CARDS_RANDOM if isRandom: due = random.randrange(0, 10000) - for cardModel in cms: - group = self.groupForTemplate(cardModel) - card = anki.cards.Card(fact, cardModel, group) + for template in cms: + print "fixme:specify group on fact add" + group = self.groupForTemplate(template) + card = anki.cards.Card(self) + card.fid = fact.id + card.tid = template.id + card.ord = template.ord + card.gid = 1 #group.id if isRandom: card.due = due - self.flushMod() - cards.append(card) - # update card q/a - fact.setModified(True, self) - self.registerTags(fact.tags()) - self.flushMod() - if reset: - self.reset() - return fact + else: + card.due = fact.pos + card.flush() + ncards += 1 + # save fact last, which will update caches too + fact.flush() + self.registerTags(fact.tags) + return ncards def groupForTemplate(self, template): - print "add default group to template" - id = self.config['currentGroupId'] - return self.db.query(anki.groups.GroupConfig).get(id).load() + return 1 + id = self.conf['currentGroupId'] + return self.db.query(anki.groups.GroupConf).get(id).load() def availableCardModels(self, fact, checkActive=True): "List of active card models that aren't empty for FACT." - models = [] - for cardModel in fact.model.cardModels: - if cardModel.active or not checkActive: - ok = True - for (type, format) in [("q", cardModel.qformat), - ("a", cardModel.aformat)]: - # compat - format = re.sub("%\((.+?)\)s", "{{\\1}}", format) - empty = {} - local = {}; local.update(fact) - local['tags'] = u"" - local['Tags'] = u"" - local['cardModel'] = u"" - local['modelName'] = u"" - for k in local.keys(): - empty[k] = u"" - empty["text:"+k] = u"" - local["text:"+k] = local[k] - empty['tags'] = "" - local['tags'] = fact._tags - try: - if (render(format, local) == - render(format, empty)): - ok = False - break - except (KeyError, TypeError, ValueError): - ok = False - break - if ok or type == "a" and cardModel.allowEmptyAnswer: - models.append(cardModel) - return models + ok = [] + for template in fact.model.templates: + if template.active or not checkActive: + # [cid, fid, qfmt, afmt, tags, model, template, group] + meta = [None, template.qfmt, template.afmt, + "", "", "", ""] + fields = fact.fieldsWithIds() + now = self.formatQA(None, fields, meta, False) + for k in fields.keys(): + fields[k] = (fields[k][0], "") + empty = self.formatQA(None, fields, meta, False) + if now['q'] == empty['q']: + continue + if not template.conf['allowEmptyAns']: + if now['a'] == empty['a']: + continue + ok.append(template) + return ok - def addCards(self, fact, cardModelIds): - "Caller must flush first and flushMod after." + def addCards(self, fact, tids): ids = [] - for cardModel in self.availableCardModels(fact, False): - if cardModel.id not in cardModelIds: + for template in self.availableCardModels(fact, False): + if template.id not in tids: continue if self.db.scalar(""" select count(id) from cards -where factId = :fid and cardModelId = :cmid""", - fid=fact.id, cmid=cardModel.id) == 0: +where fid = :fid and tid = :cmid""", + fid=fact.id, cmid=template.id) == 0: # enough for 10 card models assuming 0.00001 timer precision card = anki.cards.Card( - fact, cardModel, - fact.created+0.0001*cardModel.ordinal) + fact, template, + fact.created+0.0001*template.ord) raise Exception("incorrect; not checking selective study") self.newAvail += 1 ids.append(card.id) if ids: - fact.setModified(textChanged=True, deck=self) - self.setModified() + fact.setMod(textChanged=True, deck=self) + self.setMod() return ids def factIsInvalid(self, fact): @@ -519,41 +533,37 @@ where factId = :fid and cardModelId = :cmid""", except FactInvalidError, e: return e - def factUseCount(self, factId): + def factUseCount(self, fid): "Return number of cards referencing a given fact id." - return self.db.scalar("select count(id) from cards where factId = :id", - id=factId) + return self.db.scalar("select count(id) from cards where fid = :id", + id=fid) - def deleteFact(self, factId): + def deleteFact(self, fid): "Delete a fact. Removes any associated cards. Don't flush." - self.db.flush() # remove any remaining cards - self.db.statement("insert into cardsDeleted select id, :time " - "from cards where factId = :factId", - time=time.time(), factId=factId) - self.db.statement( - "delete from cards where factId = :id", id=factId) + self.db.execute("insert into cardsDeleted select id, :time " + "from cards where fid = :fid", + time=time.time(), fid=fid) + self.db.execute( + "delete from cards where fid = :id", id=fid) # and then the fact - self.deleteFacts([factId]) - self.setModified() + self.deleteFacts([fid]) def deleteFacts(self, ids): - "Bulk delete facts by ID; don't touch cards. Caller must .reset()." + "Bulk delete facts by ID; don't touch cards." if not ids: return - self.db.flush() now = time.time() strids = ids2str(ids) - self.db.statement("delete from facts where id in %s" % strids) - self.db.statement("delete from fields where factId in %s" % strids) + self.db.execute("delete from facts where id in %s" % strids) + self.db.execute("delete from fdata where fid in %s" % strids) anki.graves.registerMany(self.db, anki.graves.FACT, ids) - self.setModified() def deleteDanglingFacts(self): "Delete any facts without cards. Return deleted ids." - ids = self.db.column0(""" + ids = self.db.list(""" select facts.id from facts -where facts.id not in (select distinct factId from cards)""") +where facts.id not in (select distinct fid from cards)""") self.deleteFacts(ids) return ids @@ -567,17 +577,17 @@ where facts.id not in (select distinct factId from cards)""") fact = self.cloneFact(oldFact) # proceed cards = [] - for cardModel in cms: - card = anki.cards.Card(fact, cardModel) + for template in cms: + card = anki.cards.Card(fact, template) cards.append(card) - fact.setModified(textChanged=True, deck=self, media=False) + fact.setMod(textChanged=True, deck=self, media=False) return cards def cloneFact(self, oldFact): "Copy fact into new session." model = self.db.query(Model).get(oldFact.model.id) fact = self.newFact(model) - for field in fact.fields: + for field in fact.fdata: fact[field.name] = oldFact[field.name] fact._tags = oldFact._tags return fact @@ -593,79 +603,66 @@ where facts.id not in (select distinct factId from cards)""") self.deleteCards([id]) def deleteCards(self, ids): - "Bulk delete cards by ID. Caller must .reset()" + "Bulk delete cards by ID." if not ids: return - self.db.flush() now = time.time() strids = ids2str(ids) self.startProgress() # grab fact ids - factIds = self.db.column0("select factId from cards where id in %s" + fids = self.db.list("select fid from cards where id in %s" % strids) # drop from cards - self.db.statement("delete from cards where id in %s" % strids) + self.db.execute("delete from cards where id in %s" % strids) # note deleted anki.graves.registerMany(self.db, anki.graves.CARD, ids) # remove any dangling facts self.deleteDanglingFacts() - self.refreshSession() - self.flushMod() self.finishProgress() # Models ########################################################################## - def getCurrentModel(self): - return self.db.query(anki.models.Model).get(self.currentModelId) - def setCurrentModel(self, model): - self.currentModelId = model.id - currentModel = property(getCurrentModel, setCurrentModel) + def currentModel(self): + return self.getModel(self.conf['currentModelId']) - def getModels(self): - return self.db.query(anki.models.Model).all() - models = property(getModels) + def allModels(self): + return [self.getModel(id) for id in self.db.list( + "select id from models")] + + def getModel(self, mid): + return anki.models.Model(self, mid) def addModel(self, model): - self.db.add(model) - self.setSchemaModified() - self.currentModel = model - self.flushMod() + model.flush() + self.conf['currentModelId'] = model.id - def deleteModel(self, model): - "Delete MODEL, and all its cards/facts. Caller must .reset()." - if self.db.scalar("select count(id) from models where id=:id", - id=model.id): - self.setSchemaModified() - # delete facts/cards - self.currentModel - self.deleteCards(self.db.column0(""" -select cards.id from cards, facts where -facts.modelId = :id and -facts.id = cards.factId""", id=model.id)) - # then the model - self.models.remove(model) - self.db.delete(model) - self.db.flush() - if self.currentModel == model: - self.currentModel = self.models[0] - anki.graves.registerOne(self.db, anki.graves.MODEL, model.id) - self.flushMod() - self.refreshSession() - self.setModified() + def deleteModel(self, mid): + "Delete MODEL, and all its cards/facts." + self.modSchema() + # delete facts/cards + self.deleteCards(self.db.list(""" +select id from cards where fid in (select id from facts where mid = ?)""", + mid)) + # then the model + self.db.execute("delete from models where id = ?", mid) + self.db.execute("delete from templates where mid = ?", mid) + self.db.execute("delete from fields where mid = ?", mid) + anki.graves.registerOne(self.db, anki.graves.MODEL, mid) + # GUI should ensure last model is not deleted + if self.conf['currentModelId'] == mid: + self.conf['currentModelId'] = self.db.scalar( + "select id from models limit 1") def modelUseCount(self, model): "Return number of facts using model." - return self.db.scalar("select count(facts.modelId) from facts " - "where facts.modelId = :id", + return self.db.scalar("select count() from facts " + "where facts.mid = :id", id=model.id) - def deleteEmptyModels(self): - for model in self.models: - if not self.modelUseCount(model): - self.deleteModel(model) - def rebuildCSS(self): + print "fix rebuildCSS()" + return # css for all fields def _genCSS(prefix, row): (id, fam, siz, col, align, rtl, pre) = row @@ -687,122 +684,97 @@ facts.id = cards.factId""", id=model.id)) return t css = "".join([_genCSS(".fm", row) for row in self.db.all(""" select id, quizFontFamily, quizFontSize, quizFontColour, -1, - features, editFontFamily from fieldModels""")]) + features, editFontFamily from fields""")]) cardRows = self.db.all(""" -select id, null, null, null, questionAlign, 0, 0 from cardModels""") +select id, null, null, null, questionAlign, 0, 0 from templates""") css += "".join([_genCSS("#cmq", row) for row in cardRows]) css += "".join([_genCSS("#cma", row) for row in cardRows]) css += "".join([".cmb%s {background:%s;}\n" % (hexifyID(row[0]), row[1]) for row in self.db.all(""" -select id, lastFontColour from cardModels""")]) +select id, lastFontColour from templates""")]) self.css = css self.data['cssCache'] = css self.addHexCache() return css def addHexCache(self): - ids = self.db.column0(""" -select id from fieldModels union -select id from cardModels union + ids = self.db.list(""" +select id from fields union +select id from templates union select id from models""") cache = {} for id in ids: cache[id] = hexifyID(id) self.data['hexCache'] = cache - def copyModel(self, oldModel): - "Add a new model to DB based on MODEL." - m = Model(_("%s copy") % oldModel.name) - for f in oldModel.fieldModels: - f = f.copy() - m.addFieldModel(f) - for c in oldModel.cardModels: - c = c.copy() - m.addCardModel(c) - self.addModel(m) - return m - - def changeModel(self, factIds, newModel, fieldMap, cardMap): - "Caller must .reset()" - self.setSchemaModified() - self.db.flush() - fids = ids2str(factIds) - changed = False + def changeModel(self, fids, newModel, fieldMap, cardMap): + self.modSchema() + sfids = ids2str(fids) + self.startProgress() # field remapping if fieldMap: - changed = True - self.startProgress(len(fieldMap)+2) seen = {} for (old, new) in fieldMap.items(): - self.updateProgress(_("Changing fields...")) seen[new] = 1 if new: # can rename - self.db.statement(""" -update fields set -fieldModelId = :new, -ordinal = :ord -where fieldModelId = :old -and factId in %s""" % fids, new=new.id, ord=new.ordinal, old=old.id) + self.db.execute(""" +update fdata set +fmid = :new, +ord = :ord +where fmid = :old +and fid in %s""" % sfids, new=new.id, ord=new.ord, old=old.id) else: # no longer used - self.db.statement(""" -delete from fields where factId in %s -and fieldModelId = :id""" % fids, id=old.id) + self.db.execute(""" +delete from fdata where fid in %s +and fmid = :id""" % sfids, id=old.id) # new - for field in newModel.fieldModels: - self.updateProgress() + for field in newModel.fields: if field not in seen: d = [{'id': genID(), 'fid': f, 'fmid': field.id, - 'ord': field.ordinal} - for f in factIds] - self.db.statements(''' -insert into fields -(id, factId, fieldModelId, ordinal, value) + 'ord': field.ord} + for f in fids] + self.db.executemany(''' +insert into fdata +(id, fid, fmid, ord, value) values (:id, :fid, :fmid, :ord, "")''', d) # fact modtime - self.updateProgress() - self.db.statement(""" + self.db.execute(""" update facts set -modified = :t, -modelId = :id -where id in %s""" % fids, t=time.time(), id=newModel.id) +mod = :t, +mid = :id +where id in %s""" % sfids, t=time.time(), id=newModel.id) self.finishProgress() # template remapping self.startProgress(len(cardMap)+3) toChange = [] - self.updateProgress(_("Changing cards...")) for (old, new) in cardMap.items(): if not new: # delete - self.db.statement(""" + self.db.execute(""" delete from cards -where cardModelId = :cid and -factId in %s""" % fids, cid=old.id) +where tid = :cid and +fid in %s""" % sfids, cid=old.id) elif old != new: # gather ids so we can rename x->y and y->x - ids = self.db.column0(""" + ids = self.db.list(""" select id from cards where -cardModelId = :id and factId in %s""" % fids, id=old.id) +tid = :id and fid in %s""" % sfids, id=old.id) toChange.append((new, ids)) for (new, ids) in toChange: - self.updateProgress() - self.db.statement(""" + self.db.execute(""" update cards set -cardModelId = :new, -ordinal = :ord -where id in %s""" % ids2str(ids), new=new.id, ord=new.ordinal) - self.updateProgress() - self.updateCardQACacheFromIds(factIds, type="facts") - self.flushMod() - self.updateProgress() - cardIds = self.db.column0( - "select id from cards where factId in %s" % - ids2str(factIds)) - self.refreshSession() +tid = :new, +ord = :ord +where id in %s""" % ids2str(ids), new=new.id, ord=new.ord) + self.updateCache(fids, type="fact") + cardIds = self.db.list( + "select id from cards where fid in %s" % + ids2str(fids)) self.finishProgress() # Fields @@ -810,51 +782,48 @@ where id in %s""" % ids2str(ids), new=new.id, ord=new.ordinal) def allFields(self): "Return a list of all possible fields across all models." - return self.db.column0("select distinct name from fieldmodels") + return self.db.list("select distinct name from fieldmodels") def deleteFieldModel(self, model, field): self.startProgress() - self.setSchemaModified() - self.db.statement("delete from fields where fieldModelId = :id", + self.modSchema() + self.db.execute("delete from fdata where fmid = :id", id=field.id) - self.db.statement("update facts set modified = :t where modelId = :id", + self.db.execute("update facts set mod = :t where mid = :id", id=model.id, t=time.time()) - model.fieldModels.remove(field) + model.fields.remove(field) # update q/a formats - for cm in model.cardModels: + for cm in model.templates: types = ("%%(%s)s" % field.name, "%%(text:%s)s" % field.name, # new style "<<%s>>" % field.name, "<>" % field.name) for t in types: - for fmt in ('qformat', 'aformat'): + for fmt in ('qfmt', 'afmt'): setattr(cm, fmt, getattr(cm, fmt).replace(t, "")) self.updateCardsFromModel(model) - model.setModified() - self.flushMod() + model.flush() self.finishProgress() def addFieldModel(self, model, field): "Add FIELD to MODEL and update cards." - self.setSchemaModified() + self.modSchema() model.addFieldModel(field) - # commit field to disk - self.db.flush() - self.db.statement(""" -insert into fields (factId, fieldModelId, ordinal, value) -select facts.id, :fmid, :ordinal, "" from facts -where facts.modelId = :mid""", fmid=field.id, mid=model.id, ordinal=field.ordinal) + # flush field to disk + self.db.execute(""" +insert into fdata (fid, fmid, ord, value) +select facts.id, :fmid, :ord, "" from facts +where facts.mid = :mid""", fmid=field.id, mid=model.id, ord=field.ord) # ensure facts are marked updated - self.db.statement(""" -update facts set modified = :t where modelId = :mid""" + self.db.execute(""" +update facts set mod = :t where mid = :mid""" , t=time.time(), mid=model.id) - model.setModified() - self.flushMod() + model.flush() def renameFieldModel(self, model, field, newName): "Change FIELD's name in MODEL and update FIELD in all facts." - for cm in model.cardModels: + for cm in model.templates: types = ("%%(%s)s", "%%(text:%s)s", # new styles @@ -864,259 +833,208 @@ update facts set modified = :t where modelId = :mid""" "{{^%s}}", "{{/%s}}") for t in types: - for fmt in ('qformat', 'aformat'): + for fmt in ('qfmt', 'afmt'): setattr(cm, fmt, getattr(cm, fmt).replace(t%field.name, t%newName)) field.name = newName - model.setModified() - self.flushMod() + model.flush() - def fieldModelUseCount(self, fieldModel): - "Return the number of cards using fieldModel." + def fieldUseCount(self, field): + "Return the number of cards using field." return self.db.scalar(""" -select count(id) from fields where -fieldModelId = :id and value != "" -""", id=fieldModel.id) +select count(id) from fdata where +fmid = :id and val != "" +""", id=field.id) - def rebuildFieldOrdinals(self, modelId, ids): - """Update field ordinal for all fields given field model IDS. -Caller must update model modtime.""" - self.setSchemaModified() - self.db.flush() + def rebuildFieldOrds(self, mid, ids): + self.modSchema() strids = ids2str(ids) - self.db.statement(""" -update fields -set ordinal = (select ordinal from fieldModels where id = fieldModelId) -where fields.fieldModelId in %s""" % strids) + self.db.execute(""" +update fdata +set ord = (select ord from fields where id = fmid) +where fdata.fmid in %s""" % strids) # dirty associated facts - self.db.statement(""" + self.db.execute(""" update facts -set modified = strftime("%s", "now") -where modelId = :id""", id=modelId) - self.flushMod() - - def updateAllFieldChecksums(self): - # zero out - self.db.statement("update fields set chksum = ''") - # add back for unique fields - for m in self.models: - for fm in m.fieldModels: - self.updateFieldChecksums(fm.id) - - def updateFieldChecksums(self, fmid): - self.db.flush() - self.setSchemaModified() - unique = self.db.scalar( - "select \"unique\" from fieldModels where id = :id", id=fmid) - if unique: - l = [] - for (id, value) in self.db.all( - "select id, value from fields where fieldModelId = :id", - id=fmid): - l.append({'id':id, 'chk':fieldChecksum(value)}) - self.db.statements( - "update fields set chksum = :chk where id = :id", l) - else: - self.db.statement( - "update fields set chksum = '' where fieldModelId=:id", - id=fmid) +set mod = strftime("%s", "now") +where mid = :id""", id=mid) # Card models ########################################################################## - def cardModelUseCount(self, cardModel): - "Return the number of cards using cardModel." + def templateUseCount(self, template): + "Return the number of cards using template." return self.db.scalar(""" select count(id) from cards where -cardModelId = :id""", id=cardModel.id) +tid = :id""", id=template.id) - def addCardModel(self, model, cardModel): - self.setSchemaModified() - model.addCardModel(cardModel) + def addCardModel(self, model, template): + self.modSchema() + model.addCardModel(template) - def deleteCardModel(self, model, cardModel): + def deleteCardModel(self, model, template): "Delete all cards that use CARDMODEL from the deck." - self.setSchemaModified() - cards = self.db.column0("select id from cards where cardModelId = :id", - id=cardModel.id) + self.modSchema() + cards = self.db.list("select id from cards where tid = :id", + id=template.id) self.deleteCards(cards) - model.cardModels.remove(cardModel) - model.setModified() - self.flushMod() + model.templates.remove(template) + model.flush() - def updateCardsFromModel(self, model, dirty=True): - "Update all card question/answer when model changes." - ids = self.db.all(""" -select cards.id, cards.cardModelId, cards.factId, facts.modelId from -cards, facts where -cards.factId = facts.id and -facts.modelId = :id""", id=model.id) - if not ids: + def rebuildCardOrds(self, ids): + "Update all card models in IDS. Caller must update model modtime." + self.modSchema() + strids = ids2str(ids) + self.db.execute(""" +update cards set +ord = (select ord from templates where id = tid), +mod = :now +where tid in %s""" % strids, now=time.time()) + + # Caches: q/a, facts.cache and fdata.csum + ########################################################################## + + def updateCache(self, ids, type="card"): + "Update cache after cards, facts or models changed." + # gather metadata + if type == "card": + where = "and c.id in " + ids2str(ids) + elif type == "fact": + where = "and f.id in " + ids2str(ids) + elif type == "model": + where = "and m.id in " + ids2str(ids) + (cids, fids, meta) = self._cacheMeta(where) + if not cids: return - self.updateCardQACache(ids, dirty) + # and fact info + facts = self._cacheFacts(fids) + # generate q/a + pend = [self.formatQA(cids[n], facts[fids[n]], meta[cids[n]]) + for n in range(len(cids))] + # update q/a + self.db.executemany( + "update cards set q = :q, a = :a, mod = %d where id = :id" % + intTime(), pend) + for p in pend: + self.media.registerText(p['q']) + self.media.registerText(p['a']) + # fact value cache + self._updateFieldCache(facts) + # and checksum + self._updateFieldChecksums(facts) - def updateCardsFromFactIds(self, ids, dirty=True): - "Update all card question/answer when model changes." - ids = self.db.all(""" -select cards.id, cards.cardModelId, cards.factId, facts.modelId from -cards, facts where -cards.factId = facts.id and -facts.id in %s""" % ids2str(ids)) - if not ids: - return - self.updateCardQACache(ids, dirty) + def formatQA(self, cardId, fact, meta, filters=True): + "Returns hash of id, question, answer." + d = {'id': cardId} + fields = {} + for (k, v) in fact.items(): + fields["text:"+k] = stripHTML(v[1]) + if v[1]: + fields[k] = '%s' % ( + hexifyID(v[0]), v[1]) + else: + fields[k] = u"" + fields['Tags'] = meta[3] + fields['Model'] = meta[4] + fields['Template'] = meta[5] + fields['Group'] = meta[6] + # render q & a + for (type, format) in (("q", meta[1]), ("a", meta[2])): + if filters: + fields = runFilter("formatQA.pre", fields, meta, self) + html = anki.template.render(format, fields) + if filters: + d[type] = runFilter("formatQA.post", html, fields, meta, self) + d[type] = html + return d - def updateCardQACacheFromIds(self, ids, type="cards"): - "Given a list of card or fact ids, update q/a cache." - if type == "facts": - # convert to card ids - ids = self.db.column0( - "select id from cards where factId in %s" % ids2str(ids)) - rows = self.db.all(""" -select c.id, c.cardModelId, f.id, f.modelId -from cards as c, facts as f -where c.factId = f.id -and c.id in %s""" % ids2str(ids)) - self.updateCardQACache(rows) + def _cacheMeta(self, where=""): + "Return cids, fids, and cid -> data hash." + # data is [fid, qfmt, afmt, tags, model, template, group] + meta = {} + cids = [] + fids = [] + for r in self.db.execute(""" +select c.id, f.id, t.qfmt, t.afmt, f.tags, m.name, t.name, g.name +from cards c, facts f, models m, templates t, groups g where +c.fid == f.id and f.mid == m.id and +c.tid = t.id and c.gid = g.id +%s""" % where): + meta[r[0]] = r[1:] + cids.append(r[0]) + fids.append(r[1]) + return (cids, fids, meta) - def updateCardQACache(self, ids, dirty=True): - "Given a list of (cardId, cardModelId, factId, modId), update q/a cache." - if dirty: - mod = ", modified = %f" % time.time() - else: - mod = "" - # tags - cids = ids2str([x[0] for x in ids]) - tags = dict([(x[0], x[1:]) for x in - self.splitTagsList( - where="and cards.id in %s" % cids)]) + def _cacheFacts(self, ids): + "Return a hash of fid -> (name -> (id, val))." facts = {} - # fields - for k, g in groupby(self.db.all(""" -select fields.factId, fieldModels.name, fieldModels.id, fields.value -from fields, fieldModels where fields.factId in %s and -fields.fieldModelId = fieldModels.id -order by fields.factId""" % ids2str([x[2] for x in ids])), - itemgetter(0)): - facts[k] = dict([(r[1], (r[2], r[3])) for r in g]) - # card models - cms = {} - for c in self.db.query(CardModel).all(): - cms[c.id] = c - pend = [formatQA(cid, mid, facts[fid], tags[cid], cms[cmid], self) - for (cid, cmid, fid, mid) in ids] - if pend: - # find existing media references - files = {} - for txt in self.db.column0( - "select question || answer from cards where id in %s" % - cids): - for f in mediaFiles(txt): - if f in files: - files[f] -= 1 - else: - files[f] = -1 - # determine ref count delta - for p in pend: - for type in ("question", "answer"): - txt = p[type] - for f in mediaFiles(txt): - if f in files: - files[f] += 1 - else: - files[f] = 1 - # update references - this could be more efficient - for (f, cnt) in files.items(): - if not cnt: - continue - updateMediaCount(self, f, cnt) - # update q/a - self.db.execute(""" - update cards set - question = :question, answer = :answer - %s - where id = :id""" % mod, pend) - # update fields cache - self.updateFieldCache(facts.keys()) - if dirty: - self.flushMod() + for id, fields in groupby(self.db.all(""" +select fdata.fid, fields.name, fields.id, fdata.val +from fdata, fields where fdata.fid in %s and +fdata.fmid = fields.id +order by fdata.fid""" % ids2str(ids)), itemgetter(0)): + facts[id] = dict([(f[1], f[2:]) for f in fields]) + return facts - def updateFieldCache(self, fids): - "Add stripped HTML cache for sorting/searching." - try: - all = self.db.all( - ("select factId, group_concat(value, ' ') from fields " - "where factId in %s group by factId") % ids2str(fids)) - except: - # older sqlite doesn't support group_concat. this code taken from - # the wm port - all=[] - for factId in fids: - values=self.db.all("select value from fields where value is not NULL and factId=%(factId)i" % {"factId": factId}) - value_list=[] - for row in values: - value_list.append(row[0]) - concatenated_values=' '.join(value_list) - all.append([factId, concatenated_values]) + def _updateFieldCache(self, facts): + "Add stripped HTML cache for searching." r = [] from anki.utils import stripHTMLMedia - for a in all: - r.append({'id':a[0], 'v':stripHTMLMedia(a[1])}) - self.db.statements( - "update facts set cache=:v where id=:id", r) + [r.append((" ".join([x[1] for x in map.values()]), id)) + for (id, map) in facts.items()] + self.db.executemany( + "update facts set cache=? where id=?", r) - def rebuildCardOrdinals(self, ids): - "Update all card models in IDS. Caller must update model modtime." - self.setSchemaModified() - self.db.flush() - strids = ids2str(ids) - self.db.statement(""" -update cards set -ordinal = (select ordinal from cardModels where id = cardModelId), -modified = :now -where cardModelId in %s""" % strids, now=time.time()) - self.flushMod() + def _updateFieldChecksums(self, facts): + print "benchmark updatefieldchecksums" + confs = {} + r = [] + for (fid, map) in facts.items(): + for (fmid, val) in map.values(): + if fmid not in confs: + confs[fmid] = simplejson.loads(self.db.scalar( + "select conf from fields where id = ?", + fmid)) + # if unique checking has been turned off, don't bother to + # zero out old values + if confs[fmid]['unique']: + csum = fieldChecksum(val) + r.append((csum, fid, fmid)) + self.db.executemany( + "update fdata set csum=? where fid=? and fmid=?", r) # Tags ########################################################################## def tagList(self): - return self.db.column0("select name from tags order by name") - - def splitTagsList(self, where=""): - return self.db.all(""" -select cards.id, facts.tags, models.name, cardModels.name -from cards, facts, models, cardModels where -cards.factId == facts.id and facts.modelId == models.id -and cards.cardModelId = cardModels.id -%s""" % where) + return self.db.list("select name from tags order by name") def cardsWithNoTags(self): - return self.db.column0(""" + return self.db.list(""" select cards.id from cards, facts where facts.tags = "" -and cards.factId = facts.id""") +and cards.fid = facts.id""") def cardHasTag(self, card, tag): tags = self.db.scalar("select tags from fact where id = :fid", - fid=card.factId) + fid=card.fid) return tag.lower() in parseTags(tags.lower()) - def updateFactTags(self, factIds=None): + def updateFactTags(self, fids=None): "Add any missing tags to the tags list." - if factIds: - lim = " where id in " + ids2str(factIds) + if fids: + lim = " where id in " + ids2str(fids) else: lim = "" self.registerTags(set(parseTags( - " ".join(self.db.column0("select distinct tags from facts"+lim))))) + " ".join(self.db.list("select distinct tags from facts"+lim))))) def registerTags(self, tags): r = [] for t in tags: r.append({'t': t}) - self.db.statements(""" -insert or ignore into tags (modified, name) values (%d, :t)""" % intTime(), + self.db.executemany(""" +insert or ignore into tags (mod, name) values (%d, :t)""" % intTime(), r) def addTags(self, ids, tags, add=True): @@ -1142,14 +1060,12 @@ insert or ignore into tags (modified, name) values (%d, :t)""" % intTime(), def fix(row): fids.append(row[0]) return {'id': row[0], 't': fn(tags, row[1])} - self.db.statements(""" -update facts set tags = :t, modified = %d + self.db.executemany(""" +update facts set tags = :t, mod = %d where id = :id""" % intTime(), [fix(row) for row in res]) # update q/a cache - self.updateCardQACacheFromIds(fids, type="facts") - self.flushMod() + self.updateCache(fids, type="fact") self.finishProgress() - self.refreshSession() def deleteTags(self, ids, tags): self.addTags(ids, tags, False) @@ -1175,7 +1091,6 @@ where id = :id""" % intTime(), [fix(row) for row in res]) def startProgress(self, max=0, min=0, title=None): self.enableProgressHandler() runHook("startProgress", max, min, title) - self.db.flush() def updateProgress(self, label=None, value=None): runHook("updateProgress", label, value) @@ -1254,58 +1169,6 @@ where id = :id""" % intTime(), [fix(row) for row in res]) return True return False - # Meta vars - ########################################################################## - - def getInt(self, key, type=int): - ret = self.db.scalar("select value from deckVars where key = :k", - k=key) - if ret is not None: - ret = type(ret) - return ret - - def getFloat(self, key): - return self.getInt(key, float) - - def getBool(self, key): - ret = self.db.scalar("select value from deckVars where key = :k", - k=key) - if ret is not None: - # hack to work around ankidroid bug - if ret.lower() == "true": - return True - elif ret.lower() == "false": - return False - else: - ret = not not int(ret) - return ret - - def getVar(self, key): - "Return value for key as string, or None." - return self.db.scalar("select value from deckVars where key = :k", - k=key) - - def setVar(self, key, value, mod=True): - if self.db.scalar(""" -select value = :value from deckVars -where key = :key""", key=key, value=value): - return - # can't use insert or replace as it confuses the undo code - if self.db.scalar("select 1 from deckVars where key = :key", key=key): - self.db.statement("update deckVars set value=:value where key = :key", - key=key, value=value) - else: - self.db.statement("insert into deckVars (key, value) " - "values (:key, :value)", key=key, value=value) - if mod: - self.setModified() - - def setVarDefault(self, key, value): - if not self.db.scalar( - "select 1 from deckVars where key = :key", key=key): - self.db.statement("insert into deckVars (key, value) " - "values (:key, :value)", key=key, value=value) - # Failed card handling ########################################################################## @@ -1348,208 +1211,32 @@ where key = :key""", key=key, value=value): return 4 return 5 - # Media - ########################################################################## - - def mediaDir(self, create=False): - "Return the media directory if exists. None if couldn't create." - if self.mediaPrefix: - dir = os.path.join( - self.mediaPrefix, os.path.basename(self.path)) - else: - dir = self.path - dir = re.sub("(?i)\.(anki)$", ".media", dir) - if create == None: - # don't create, but return dir - return dir - if not os.path.exists(dir) and create: - try: - os.makedirs(dir) - except OSError: - # permission denied - return None - if not dir or not os.path.exists(dir): - return None - # change to the current dir - os.chdir(dir) - return dir - - def addMedia(self, path): - """Add PATH to the media directory. -Return new path, relative to media dir.""" - return anki.media.copyToMedia(self, path) - - def renameMediaDir(self, oldPath): - "Copy oldPath to our current media dir. " - assert os.path.exists(oldPath) - newPath = self.mediaDir(create=None) - # copytree doesn't want the dir to exist - try: - shutil.copytree(oldPath, newPath) - except: - # FIXME: should really remove everything in old dir instead of - # giving up - pass - - # DB helpers - ########################################################################## - - def save(self, config=True): - "Commit any pending changes to disk." - if self.lastLoaded == self.modified: - return - self.lastLoaded = self.modified - if config: - self.flushConfig() - self.db.commit() - - def flushConfig(self): - print "make flushConfig() more intelligent" - self._config = unicode(simplejson.dumps(self.config)) - self._qconf = unicode(simplejson.dumps(self.qconf)) - self._data = unicode(simplejson.dumps(self.data)) - - def close(self): - if self.db: - self.db.rollback() - self.db.close() - self.db = None - self.s = None - self.engine.dispose() - runHook("deckClosed") - - def rollback(self): - "Roll back the current transaction and reset session state." - self.db.rollback() - self.db.expunge_all() - self.db.update(self) - self.db.refresh(self) - - def refreshSession(self): - "Flush and expire all items from the session." - self.db.flush() - self.db.expire_all() - - def openSession(self, first=False): - "Open a new session. Assumes old session is already closed." - self.db = SessionHelper(self.Session()) - self.s = self.db - self.db.update(self) - self.refreshSession() - - def closeSession(self): - "Close the current session, saving any changes. Do nothing if no session." - if self.db: - self.save() - try: - self.db.expunge(self) - except: - import sys - sys.stderr.write("ERROR expunging deck..\n") - self.db.close() - self.db = None - self.s = None - - def setModified(self): - #import traceback; traceback.print_stack() - self.modified = intTime() - - def setSchemaModified(self): - self.schemaMod = intTime() - anki.graves.forgetAll(self.db) - - def getFactPos(self): - "Return next fact position, incrementing it." - # note this is incremented even if facts are not added; gaps are not a bug - p = self.config['nextFactPos'] - self.config['nextFactPos'] += 1 - self.setModified() - return p - - def flushMod(self): - "Mark modified and flush to DB." - self.setModified() - self.db.flush() - - def saveAs(self, newPath): - "Returns new deck. Old connection is closed without saving." - oldMediaDir = self.mediaDir() - self.flushConfig() - self.db.flush() - # remove new deck if it exists - try: - os.unlink(newPath) - except OSError: - pass - self.startProgress() - # copy tables, avoiding implicit commit on current db - DeckStorage.Deck(newPath, backup=False).close() - new = sqlite.connect(newPath) - for table in self.db.column0( - "select name from sqlite_master where type = 'table'"): - if table.startswith("sqlite_"): - continue - new.execute("delete from %s" % table) - cols = [str(x[1]) for x in new.execute( - "pragma table_info('%s')" % table).fetchall()] - q = "select 'insert into %(table)s values(" - q += ",".join(["'||quote(\"" + col + "\")||'" for col in cols]) - q += ")' from %(table)s" - q = q % {'table': table} - c = 0 - for row in self.db.execute(q): - new.execute(row[0]) - if c % 1000: - self.updateProgress() - c += 1 - # save new, close both - new.commit() - new.close() - self.close() - # open again in orm - newDeck = DeckStorage.Deck(newPath, backup=False) - # move media - if oldMediaDir: - newDeck.renameMediaDir(oldMediaDir) - # forget sync name - newDeck.syncName = u"" - newDeck.db.commit() - # and return the new deck - self.finishProgress() - return newDeck - # Syncing ########################################################################## - # toggling does not bump deck mod time, since it may happen on upgrade, - # and the variable is not synced def enableSyncing(self): - self.syncName = unicode(checksum(self.path.encode("utf-8"))) - self.db.commit() + self.syncName = self.getSyncName() def disableSyncing(self): self.syncName = u"" - self.db.commit() def syncingEnabled(self): return self.syncName - def checkSyncHash(self): - if self.syncName and self.syncName != checksum(self.path.encode("utf-8")): - self.notify(_("""\ -Because '%s' has been moved or copied, automatic synchronisation \ -has been disabled (ERR-0100). + def genSyncName(self): + return unicode(checksum(self.path.encode("utf-8"))) -You can disable this check in Settings>Preferences>Network.""") % self.name()) + def syncHashBad(self): + if self.syncName and self.syncName != self.genSyncName(): self.disableSyncing() - self.syncName = u"" + return True # DB maintenance ########################################################################## def recoverCards(self, ids): "Put cards with damaged facts into new facts." - # create a new model in case the user has modified a previous one + # create a new model in case the user has mod a previous one from anki.stdmodels import RecoveryModel m = RecoveryModel() last = self.currentModel @@ -1574,17 +1261,17 @@ where cardId = :cid and ct.tagId = t.id""", cid=id) or u"" raise Exception("Your sqlite is too old.") cards = self.addFact(f) # delete the freshly created card and point old card to this fact - self.db.statement("delete from cards where id = :id", + self.db.execute("delete from cards where id = :id", id=f.cards[0].id) - self.db.statement(""" -update cards set factId = :fid, cardModelId = :cmid, ordinal = 0 -where id = :id""", fid=f.id, cmid=m.cardModels[0].id, id=id) + self.db.execute(""" +update cards set fid = :fid, tid = :cmid, ord = 0 +where id = :id""", fid=f.id, cmid=m.templates[0].id, id=id) # restore old model self.currentModel = last def fixIntegrity(self, quick=False): - "Fix some problems and rebuild caches. Caller must .reset()" - self.db.commit() + "Fix possible problems and rebuild caches." + self.save() self.resetUndo() problems = [] recover = False @@ -1613,29 +1300,29 @@ select decks.id from decks, models where decks.currentModelId = models.id"""): self.currentModelId = self.models[0].id problems.append(_("The current model didn't exist")) - # fields missing a field model - ids = self.db.column0(""" -select id from fields where fieldModelId not in ( -select distinct id from fieldModels)""") + # fdata missing a field model + ids = self.db.list(""" +select id from fdata where fmid not in ( +select distinct id from fields)""") if ids: - self.db.statement("delete from fields where id in %s" % + self.db.execute("delete from fdata where id in %s" % ids2str(ids)) problems.append(ngettext("Deleted %d field with missing field model", - "Deleted %d fields with missing field model", len(ids)) % + "Deleted %d fdata with missing field model", len(ids)) % len(ids)) # facts missing a field? - ids = self.db.column0(""" -select distinct facts.id from facts, fieldModels where -facts.modelId = fieldModels.modelId and fieldModels.id not in -(select fieldModelId from fields where factId = facts.id)""") + ids = self.db.list(""" +select distinct facts.id from facts, fields where +facts.mid = fields.mid and fields.id not in +(select fmid from fdata where fid = facts.id)""") if ids: self.deleteFacts(ids) problems.append(ngettext("Deleted %d fact with missing fields", "Deleted %d facts with missing fields", len(ids)) % len(ids)) # cards missing a fact? - ids = self.db.column0(""" -select id from cards where factId not in (select id from facts)""") + ids = self.db.list(""" +select id from cards where fid not in (select id from facts)""") if ids: recover = True self.recoverCards(ids) @@ -1643,9 +1330,9 @@ select id from cards where factId not in (select id from facts)""") "Recovered %d cards with missing fact", len(ids)) % len(ids)) # cards missing a card model? - ids = self.db.column0(""" -select id from cards where cardModelId not in -(select id from cardModels)""") + ids = self.db.list(""" +select id from cards where tid not in +(select id from templates)""") if ids: recover = True self.recoverCards(ids) @@ -1653,10 +1340,10 @@ select id from cards where cardModelId not in "Recovered %d cards with no card template", len(ids)) % len(ids)) # cards with a card model from the wrong model - ids = self.db.column0(""" -select id from cards where cardModelId not in (select cm.id from -cardModels cm, facts f where cm.modelId = f.modelId and -f.id = cards.factId)""") + ids = self.db.list(""" +select id from cards where tid not in (select cm.id from +templates cm, facts f where cm.mid = f.mid and +f.id = cards.fid)""") if ids: recover = True self.recoverCards(ids) @@ -1670,44 +1357,43 @@ f.id = cards.factId)""") "Deleted %d facts with no cards", len(ids)) % len(ids)) # dangling fields? - ids = self.db.column0(""" -select id from fields where factId not in (select id from facts)""") + ids = self.db.list(""" +select id from fdata where fid not in (select id from facts)""") if ids: - self.db.statement( - "delete from fields where id in %s" % ids2str(ids)) + self.db.execute( + "delete from fdata where id in %s" % ids2str(ids)) problems.append(ngettext("Deleted %d dangling field", "Deleted %d dangling fields", len(ids)) % len(ids)) - self.db.flush() if not quick: self.updateProgress() # these sometimes end up null on upgrade - self.db.statement("update models set source = 0 where source is null") - self.db.statement( - "update cardModels set allowEmptyAnswer = 1, typeAnswer = '' " + self.db.execute("update models set source = 0 where source is null") + self.db.execute( + "update templates set allowEmptyAnswer = 1, typeAnswer = '' " "where allowEmptyAnswer is null or typeAnswer is null") # fix tags self.updateProgress() - self.db.statement("delete from tags") + self.db.execute("delete from tags") self.updateFactTags() print "should ensure tags having leading/trailing space" - # make sure ordinals are correct + # make sure ords are correct self.updateProgress() - self.db.statement(""" -update fields set ordinal = (select ordinal from fieldModels -where id = fieldModelId)""") - self.db.statement(""" -update cards set ordinal = (select ordinal from cardModels -where cards.cardModelId = cardModels.id)""") + self.db.execute(""" +update fdata set ord = (select ord from fields +where id = fmid)""") + self.db.execute(""" +update cards set ord = (select ord from templates +where cards.tid = templates.id)""") # fix problems with stripping html self.updateProgress() - fields = self.db.all("select id, value from fields") - newFields = [] - for (id, value) in fields: - newFields.append({'id': id, 'value': tidyHTML(value)}) - self.db.statements( - "update fields set value=:value where id=:id", - newFields) + fdata = self.db.all("select id, val from fdata") + newFdata = [] + for (id, val) in fdata: + newFdata.append({'id': id, 'val': tidyHTML(val)}) + self.db.executemany( + "update fdata set val=:val where id=:id", + newFdata) # and field checksums self.updateProgress() self.updateAllFieldChecksums() @@ -1718,7 +1404,7 @@ where cards.cardModelId = cardModels.id)""") self.updateProgress() self.rebuildTypes() # force a full sync - self.setSchemaModified() + self.modSchema() # and finally, optimize self.updateProgress() self.optimize() @@ -1728,11 +1414,7 @@ where cards.cardModelId = cardModels.id)""") if save > 0: txt += "\n" + _("Saved %dKB.") % save problems.append(txt) - # update deck and save - if not quick: - self.flushMod() - self.save() - self.refreshSession() + self.save() self.finishProgress() if problems: if recover: @@ -1744,9 +1426,8 @@ original layout of the facts has been lost.""")) return "ok" def optimize(self): - self.db.commit() - self.db.statement("vacuum") - self.db.statement("analyze") + self.db.execute("vacuum") + self.db.execute("analyze") # Undo/redo ########################################################################## @@ -1756,9 +1437,9 @@ original layout of the facts has been lost.""")) self.undoStack = [] self.redoStack = [] self.undoEnabled = True - self.db.statement( + self.db.execute( "create temporary table undoLog (seq integer primary key not null, sql text)") - tables = self.db.column0( + tables = self.db.list( "select name from sqlite_master where type = 'table'") for table in tables: if table in ("undoLog", "sqlite_stat1"): @@ -1766,7 +1447,7 @@ original layout of the facts has been lost.""")) columns = [r[1] for r in self.db.all("pragma table_info(%s)" % table)] # insert - self.db.statement(""" + self.db.execute(""" create temp trigger _undo_%(t)s_it after insert on %(t)s begin insert into undoLog values @@ -1784,7 +1465,7 @@ insert into undoLog values (null, 'update %(t)s """ % {'t': table} 's': sep, 'c': c} sep = "," sql += " where rowid = ' || old.rowid); end" - self.db.statement(sql) + self.db.execute(sql) # delete sql = """ create temp trigger _undo_%(t)s_dt @@ -1799,7 +1480,7 @@ insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table} continue sql += ",' || quote(old.%s) ||'" % c sql += ")'); end" - self.db.statement(sql) + self.db.execute(sql) def undoName(self): for n in reversed(self.undoStack): @@ -1821,7 +1502,7 @@ insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table} def resetUndo(self): try: - self.db.statement("delete from undoLog") + self.db.execute("delete from undoLog") except: pass self.undoStack = [] @@ -1834,7 +1515,6 @@ insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table} def setUndoStart(self, name, merge=False): if not self.undoEnabled: return - self.db.flush() if merge and self.undoStack: if self.undoStack[-1] and self.undoStack[-1][0] == name: # merge with last entry? @@ -1845,7 +1525,6 @@ insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table} def setUndoEnd(self, name): if not self.undoEnabled: return - self.db.flush() end = self._latestUndoRow() while self.undoStack[-1] is None: # strip off barrier @@ -1861,7 +1540,6 @@ insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table} return self.db.scalar("select max(rowid) from undoLog") or 0 def _undoredo(self, src, dst): - self.db.flush() while 1: u = src.pop() if u: @@ -1869,7 +1547,7 @@ insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table} (start, end) = (u[1], u[2]) if end is None: end = self._latestUndoRow() - sql = self.db.column0(""" + sql = self.db.list(""" select sql from undoLog where seq > :s and seq <= :e order by seq desc""", s=start, e=end) mod = len(sql) / 35 @@ -1887,15 +1565,13 @@ seq > :s and seq <= :e order by seq desc""", s=start, e=end) self.finishProgress() def undo(self): - "Undo the last action(s). Caller must .reset()" + "Undo the last action(s)." self._undoredo(self.undoStack, self.redoStack) - self.refreshSession() runHook("postUndoRedo") def redo(self): - "Redo the last action(s). Caller must .reset()" + "Redo the last action(s)." self._undoredo(self.redoStack, self.undoStack) - self.refreshSession() runHook("postUndoRedo") # Dynamic indices @@ -1904,8 +1580,8 @@ seq > :s and seq <= :e order by seq desc""", s=start, e=end) def updateDynamicIndices(self): # determine required columns required = [] - if self.qconf['newTodayOrder'] == NEW_TODAY_ORDINAL: - required.append("ordinal") + if self.qconf['newTodayOrder'] == NEW_TODAY_ORD: + required.append("ord") if self.qconf['revCardOrder'] in (REV_CARDS_OLD_FIRST, REV_CARDS_NEW_FIRST): required.append("interval") cols = ["queue", "due", "groupId"] + required @@ -1916,195 +1592,20 @@ seq > :s and seq <= :e order by seq desc""", s=start, e=end) else: rows = None if not (rows and cols == [r[2] for r in rows]): - self.db.statement("drop index if exists ix_cards_multi") - self.db.statement("create index ix_cards_multi on cards (%s)" % + self.db.execute("drop index if exists ix_cards_multi") + self.db.execute("create index ix_cards_multi on cards (%s)" % ", ".join(cols)) - self.db.statement("analyze") - -mapper(Deck, deckTable, properties={ - '_qconf': deckTable.c.qconf, - '_config': deckTable.c.config, - '_data': deckTable.c.data, -}) + self.db.execute("analyze") # Shared decks ########################################################################## -sourcesTable = Table( - 'sources', metadata, - Column('id', Integer, nullable=False, primary_key=True), - Column('name', UnicodeText, nullable=False, default=""), - Column('created', Integer, nullable=False, default=intTime), - Column('lastSync', Integer, nullable=False, default=0), - # -1 = never check, 0 = always check, 1+ = number of seconds passed. - # not currently exposed in the GUI - Column('syncPeriod', Integer, nullable=False, default=0)) - -# Labels -########################################################################## - -def newCardOrderLabels(): - return { - 0: _("Add new cards in random order"), - 1: _("Add new cards to end of queue"), - } - -def newCardSchedulingLabels(): - return { - 0: _("Spread new cards out through reviews"), - 1: _("Show new cards after all other cards"), - 2: _("Show new cards before reviews"), - } - -# FIXME: order due is not very useful anymore -def revCardOrderLabels(): - return { - 0: _("Review cards from largest interval"), - 1: _("Review cards from smallest interval"), - 2: _("Review cards in order due"), - 3: _("Review cards in random order"), - } - -def failedCardOptionLabels(): - return { - 0: _("Show failed cards soon"), - 1: _("Show failed cards at end"), - 2: _("Show failed cards in 10 minutes"), - 3: _("Show failed cards in 8 hours"), - 4: _("Show failed cards in 3 days"), - 5: _("Custom failed cards handling"), - } - -# Deck storage -########################################################################## - -class DeckStorage(object): - - def _getDeck(path, create, pool): - engine = None - try: - (engine, session) = DeckStorage._attach(path, create, pool) - s = session() - if create: - DeckStorage._addTables(engine) - metadata.create_all(engine) - DeckStorage._addConfig(engine) - deck = DeckStorage._init(s) - updateIndices(engine) - engine.execute("analyze") - else: - ver = upgradeSchema(engine, s) - # add any possibly new tables if we're upgrading - if ver < DECK_VERSION: - DeckStorage._addTables(engine) - metadata.create_all(engine) - deck = s.query(Deck).get(1) - if not deck: - raise DeckAccessError(_("Deck missing core table"), - type="nocore") - # attach db vars - deck.path = path - deck.Session = session - deck.engine = engine - # db is new style; s old style - deck.db = SessionHelper(s) - deck.s = deck.db - deck._initVars() - if not create: - upgradeDeck(deck) - return deck - except OperationalError, e: - if engine: - engine.dispose() - if (str(e.orig).startswith("database table is locked") or - str(e.orig).startswith("database is locked")): - raise DeckAccessError(_("File is in use by another process"), - type="inuse") - else: - raise e - _getDeck = staticmethod(_getDeck) - - def _attach(path, create, pool=True): - "Attach to a file, maybe initializing DB" - path = "sqlite:///" + path.encode("utf-8") - if pool: - # open and lock connection for single use - engine = create_engine(path, connect_args={'timeout': 0}) - else: - # no pool & concurrent access w/ timeout - engine = create_engine( - path, poolclass=NullPool, connect_args={'timeout': 60}) - session = sessionmaker(bind=engine, autoflush=False, autocommit=True) - if create: - engine.execute("pragma page_size = 4096") - engine.execute("pragma legacy_file_format = 0") - engine.execute("vacuum") - engine.execute("pragma cache_size = 20000") - return (engine, session) - _attach = staticmethod(_attach) - - def _init(s): - "Add a new deck to the database. Return saved deck." - deck = Deck() - if sqlalchemy.__version__.startswith("0.4."): - s.save(deck) - else: - s.add(deck) - s.flush() - return deck - _init = staticmethod(_init) - - def _addConfig(s): - "Add a default group & config." - s.execute(""" -insert into groupConfig values (1, :t, :name, :conf)""", - t=intTime(), name=_("Default Config"), - conf=simplejson.dumps(anki.groups.defaultConf)) - s.execute(""" -insert into groups values (1, :t, "Default", 1)""", - t=intTime()) - _addConfig = staticmethod(_addConfig) - - def _addTables(s): - "Add tables with syntax that older sqlalchemy versions don't support." - sql = [ - """ -create table tags ( -id integer not null, -modified integer not null, -name text not null collate nocase unique, -primary key(id))""", - """ -create table groups ( -id integer primary key autoincrement, -modified integer not null, -name text not null collate nocase unique, -confId integer not null)""" - ] - for table in sql: - try: - s.execute(table) - except: - pass - - _addTables = staticmethod(_addTables) - - def Deck(path, backup=True, pool=True, minimal=False): - "Create a new deck or attach to an existing one. Path should be unicode." - path = os.path.abspath(path) - create = not os.path.exists(path) - deck = DeckStorage._getDeck(path, create, pool) - oldMod = deck.modified - deck.qconf = simplejson.loads(deck._qconf) - deck.config = simplejson.loads(deck._config) - deck.data = simplejson.loads(deck._data) - if minimal: - return deck - # check if deck has been moved, and disable syncing - deck.checkSyncHash() - # rebuild queue - deck.reset() - # make sure we haven't accidentally bumped the modification time - assert deck.modified == oldMod - return deck - Deck = staticmethod(Deck) +# sourcesTable = Table( +# 'sources', metadata, +# Column('id', Integer, nullable=False, primary_key=True), +# Column('name', UnicodeText, nullable=False, default=""), +# Column('created', Integer, nullable=False, default=intTime), +# Column('lastSync', Integer, nullable=False, default=0), +# # -1 = never check, 0 = always check, 1+ = number of seconds passed. +# # not currently exposed in the GUI +# Column('syncPeriod', Integer, nullable=False, default=0)) diff --git a/anki/errors.py b/anki/errors.py index 80f24aacc..fdaca834d 100644 --- a/anki/errors.py +++ b/anki/errors.py @@ -2,42 +2,12 @@ # Copyright: Damien Elmes # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html -class Error(Exception): - def __init__(self, message="", **data): +class AnkiError(Exception): + def __init__(self, type, **data): + self.type = type self.data = data - self._message = message def __str__(self): - m = self._message + m = self.type if self.data: m += ": %s" % repr(self.data) return m - -class DeckAccessError(Error): - pass - -class ImportFileError(Error): - "Unable to load file to import from." - pass - -class ImportFormatError(Error): - "Unable to determine pattern in text file." - pass - -class ImportEncodingError(Error): - "The file was not in utf-8." - pass - -class ExportFileError(Error): - "Unable to save file." - pass - -class SyncError(Error): - "A problem occurred during syncing." - pass - -# facts, models -class FactInvalidError(Error): - """A fact was invalid/not unique according to the model. -'field' defines the problem field. -'type' defines the type of error ('fieldEmpty', 'fieldNotUnique')""" - pass diff --git a/anki/exporting.py b/anki/exporting.py index 0a9b87a36..fe472eab2 100644 --- a/anki/exporting.py +++ b/anki/exporting.py @@ -8,9 +8,8 @@ from anki import DeckStorage from anki.cards import Card from anki.sync import SyncClient, SyncServer, copyLocalMedia from anki.lang import _ -from anki.utils import findTag, parseTags, stripHTML, ids2str +from anki.utils import parseTags, stripHTML, ids2str from anki.tags import tagIds -from anki.db import * class Exporter(object): def __init__(self, deck): diff --git a/anki/facts.py b/anki/facts.py index a04b93577..1a5c9a964 100644 --- a/anki/facts.py +++ b/anki/facts.py @@ -3,166 +3,126 @@ # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html import time -from anki.db import * -from anki.errors import * -from anki.models import Model, FieldModel, fieldModelsTable +from anki.errors import AnkiError from anki.utils import genID, stripHTMLMedia, fieldChecksum, intTime, \ addTags, deleteTags, parseTags -from anki.hooks import runHook - -# Fields in a fact -########################################################################## - -fieldsTable = Table( - 'fields', metadata, - Column('id', Integer, primary_key=True), - Column('factId', Integer, ForeignKey("facts.id"), nullable=False), - Column('fieldModelId', Integer, ForeignKey("fieldModels.id"), - nullable=False), - Column('ordinal', Integer, nullable=False), - Column('value', UnicodeText, nullable=False), - Column('chksum', String, nullable=False, default="")) - -class Field(object): - "A field in a fact." - - def __init__(self, fieldModel=None): - if fieldModel: - self.fieldModel = fieldModel - self.ordinal = fieldModel.ordinal - self.value = u"" - self.id = genID() - - def getName(self): - return self.fieldModel.name - name = property(getName) - -mapper(Field, fieldsTable, properties={ - 'fieldModel': relation(FieldModel) - }) - -# Facts: a set of fields and a model -########################################################################## - -# Pos: incrementing number defining add order. There may be duplicates if -# content is added on two sync locations at once. Importing adds to end. -# Cache: a HTML-stripped amalgam of the field contents, so we can perform -# searches of marked up text in a reasonable time. - -factsTable = Table( - 'facts', metadata, - Column('id', Integer, primary_key=True), - Column('modelId', Integer, ForeignKey("models.id"), nullable=False), - Column('pos', Integer, nullable=False), - Column('modified', Integer, nullable=False, default=intTime), - Column('tags', UnicodeText, nullable=False, default=u""), - Column('cache', UnicodeText, nullable=False, default=u"")) class Fact(object): - "A single fact. Fields exposed as dict interface." - def __init__(self, model=None, pos=None): - self.model = model - self.id = genID() - self._tags = u"" - if model: - # creating - for fm in model.fieldModels: - self.fields.append(Field(fm)) - self.pos = pos - self.new = True + def __init__(self, deck, model=None, id=None): + assert not (model and id) + self.deck = deck + if id: + self.id = id + self.load() + else: + self.id = genID() + self.model = model + self.mid = model.id + self.mod = intTime() + self.tags = "" + self.cache = "" + self._fields = [""] * len(self.model.fields) + self._fmap = self.model.fieldMap() - def isNew(self): - return getattr(self, 'new', False) + def load(self): + (self.mid, + self.mod, + self.pos, + self.tags) = self.deck.db.first(""" +select mid, mod, pos, tags from facts where id = ?""", self.id) + self._fields = self.deck.db.list(""" +select value from fdata where fid = ? order by ordinal""", self.id) + self.model = self.deck.getModel(self.mid) + + def flush(self): + self.mod = intTime() + # facts table + self.cache = stripHTMLMedia(u" ".join(self._fields)) + self.deck.db.execute(""" +insert or replace into facts values (?, ?, ?, ?, ?, ?)""", + self.id, self.mid, self.mod, + self.pos, self.tags, self.cache) + # fdata table + self.deck.db.execute("delete from fdata where fid = ?", self.id) + d = [] + for (fmid, ord, conf) in self._fmap.values(): + val = self._fields[ord] + d.append(dict(fid=self.id, fmid=fmid, ord=ord, + val=val)) + self.deck.db.executemany(""" +insert into fdata values (:fid, :fmid, :ord, :val, '')""", d) + # media and caches + self.deck.updateCache([self.id], "fact") + + def cards(self): + return [self.deck.getCard(id) for id in self.deck.db.list( + "select id from cards where fid = ? order by ord", self.id)] + + # Dict interface + ################################################## def keys(self): - return [field.name for field in self.fields] + return self._fmap.keys() def values(self): - return [field.value for field in self.fields] + return self._fields - def __getitem__(self, key): + def items(self): + return [(k, self._fields[v]) + for (k, v) in self._fmap.items()] + + def _fieldOrd(self, key): try: - return [f.value for f in self.fields if f.name == key][0] - except IndexError: + return self._fmap[key][1] + except: raise KeyError(key) - def __setitem__(self, key, value): - try: - item = [f for f in self.fields if f.name == key][0] - except IndexError: - raise KeyError - item.value = value - if item.fieldModel.unique: - item.chksum = fieldChecksum(value) - else: - item.chksum = "" + def __getitem__(self, key): + return self._fields[self._fieldOrd(key)] - def get(self, key, default): - try: - return self[key] - except (IndexError, KeyError): - return default + def __setitem__(self, key, value): + self._fields[self._fieldOrd(key)] = value + + def fieldsWithIds(self): + return dict( + [(k, (v[0], self[k])) for (k,v) in self._fmap.items()]) + + # Tags + ################################################## def addTags(self, tags): - self._tags = addTags(tags, self._tags) + self.tags = addTags(tags, self.tags) def deleteTags(self, tags): - self._tags = deleteTags(tags, self._tags) + self.tags = deleteTags(tags, self.tags) - def tags(self): - return parseTags(self._tags) + # Unique/duplicate checks + ################################################## - def assertValid(self): - "Raise an error if required fields are empty." - for field in self.fields: - if not self.fieldValid(field): - raise FactInvalidError(type="fieldEmpty", - field=field.name) - - def fieldValid(self, field): - return not (field.fieldModel.required and not field.value.strip()) - - def assertUnique(self, s): - "Raise an error if duplicate fields are found." - for field in self.fields: - if not self.fieldUnique(field, s): - raise FactInvalidError(type="fieldNotUnique", - field=field.name) - - def fieldUnique(self, field, s): - if not field.fieldModel.unique: + def fieldUnique(self, name): + (fmid, ord, conf) = self._fmap[name] + if not conf['unique']: return True - req = ("select value from fields " - "where fieldModelId = :fmid and value = :val and chksum = :chk") - if field.id: - req += " and id != %s" % field.id - return not s.scalar(req, val=field.value, fmid=field.fieldModel.id, - chk=fieldChecksum(field.value)) + val = self[name] + csum = fieldChecksum(val) + return not self.deck.db.scalar( + "select 1 from fdata where csum = ? and fid != ? and val = ?", + csum, self.id, val) - def focusLost(self, field): - runHook('fact.focusLost', self, field) + def fieldComplete(self, name, text=None): + (fmid, ord, conf) = self._fmap[name] + if not conf['required']: + return True + return self[name] - def setModified(self, textChanged=False, deck=None, media=True): - "Mark modified and update cards." - self.modified = intTime() - if textChanged: - if not deck: - # FIXME: compat code - import ankiqt - if not getattr(ankiqt, 'setModWarningShown', None): - import sys; sys.stderr.write( - "plugin needs to pass deck to fact.setModified()") - ankiqt.setModWarningShown = True - deck = ankiqt.mw.deck - assert deck - self.cache = stripHTMLMedia(u" ".join( - self.values())) - for card in self.cards: - card.rebuildQA(deck) - -mapper(Fact, factsTable, properties={ - 'model': relation(Model), - 'fields': relation(Field, backref="fact", order_by=Field.ordinal), - '_tags': factsTable.c.tags - }) + def problems(self): + d = [] + for k in self._fmap.keys(): + if not self.fieldUnique(k): + d.append("unique") + elif not self.fieldComplete(k): + d.append("required") + else: + d.append(None) + return d diff --git a/anki/find.py b/anki/find.py index 45a1f7e89..61f52dfa9 100644 --- a/anki/find.py +++ b/anki/find.py @@ -24,7 +24,7 @@ SEARCH_PHRASE_WB = 9 def findCards(deck, query): (q, cmquery, showdistinct, filters, args) = findCardsWhere(deck, query) - (factIdList, cardIdList) = findCardsMatchingFilters(deck, filters) + (fidList, cardIdList) = findCardsMatchingFilters(deck, filters) query = "select id from cards" hasWhere = False if q: @@ -36,18 +36,18 @@ def findCards(deck, query): hasWhere = True else: query += " and " if cmquery['pos']: - query += (" factId in(select distinct factId from cards "+ + query += (" fid in(select distinct fid from cards "+ "where id in (" + cmquery['pos'] + ")) ") query += " and id in(" + cmquery['pos'] + ") " if cmquery['neg']: - query += (" factId not in(select distinct factId from "+ + query += (" fid not in(select distinct fid from "+ "cards where id in (" + cmquery['neg'] + ")) ") - if factIdList is not None: + if fidList is not None: if hasWhere is False: query += " where " hasWhere = True else: query += " and " - query += " factId IN %s" % ids2str(factIdList) + query += " fid IN %s" % ids2str(fidList) if cardIdList is not None: if hasWhere is False: query += " where " @@ -55,9 +55,9 @@ def findCards(deck, query): else: query += " and " query += " id IN %s" % ids2str(cardIdList) if showdistinct: - query += " group by factId" + query += " group by fid" #print query, args - return deck.db.column0(query, **args) + return deck.db.list(query, **args) def findCardsWhere(deck, query): (tquery, fquery, qquery, fidquery, cmquery, sfquery, qaquery, @@ -65,15 +65,15 @@ def findCardsWhere(deck, query): q = "" x = [] if tquery: - x.append(" factId in (%s)" % tquery) + x.append(" fid in (%s)" % tquery) if fquery: - x.append(" factId in (%s)" % fquery) + x.append(" fid in (%s)" % fquery) if qquery: x.append(" id in (%s)" % qquery) if fidquery: x.append(" id in (%s)" % fidquery) if sfquery: - x.append(" factId in (%s)" % sfquery) + x.append(" fid in (%s)" % sfquery) if qaquery: x.append(" id in (%s)" % qaquery) if x: @@ -83,7 +83,7 @@ def findCardsWhere(deck, query): def allFMFields(deck, tolower=False): fields = [] try: - fields = deck.db.column0( + fields = deck.db.list( "select distinct name from fieldmodels order by name") except: fields = [] @@ -269,17 +269,17 @@ def findCardsMatchingFilters(deck, filters): if fquery: if filter['is_neg']: fquery += " except " else: fquery += " intersect " - elif filter['is_neg']: fquery += "select id from fields except " + elif filter['is_neg']: fquery += "select id from fdata except " value = filter['value'].replace("*", "%") args["_ff_%d" % c] = "%"+value+"%" fquery += ( - "select id from fields where value like "+ + "select id from fdata where value like "+ ":_ff_%d escape '\\'" % c) rows = deck.db.execute( - 'select factId, value from fields where id in (' + + 'select fid, value from fdata where id in (' + fquery + ')', args) while (1): row = rows.fetchone() @@ -300,21 +300,21 @@ def findCardsMatchingFilters(deck, filters): if sfquery: if filter['is_neg']: sfquery += " except " else: sfquery += " intersect " - elif filter['is_neg']: sfquery += "select id from fields except " + elif filter['is_neg']: sfquery += "select id from fdata except " field = field.replace("*", "%") value = filter['value'].replace("*", "%") args["_ff_%d" % c] = "%"+value+"%" - ids = deck.db.column0( + ids = deck.db.list( "select id from fieldmodels where name like "+ ":field escape '\\'", field=field) - sfquery += ("select id from fields where "+ - "fieldModelId in %s and value like "+ + sfquery += ("select id from fdata where "+ + "fmid in %s and value like "+ ":_ff_%d escape '\\'") % (ids2str(ids), c) rows = deck.db.execute( - 'select f.factId, f.value, fm.name from fields as f '+ - 'left join fieldmodels as fm ON (f.fieldModelId = '+ + 'select f.fid, f.value, fm.name from fdata as f '+ + 'left join fieldmodels as fm ON (f.fmid = '+ 'fm.id) where f.id in (' + sfquery + ')', args) while (1): row = rows.fetchone() @@ -364,18 +364,18 @@ def findCardsMatchingFilters(deck, filters): (filter['is_neg'] is True and res is None)): cardFilterMatches.append(row[0]) - factIds = None + fids = None if len(factFilters) > 0 or len(fieldFilters) > 0: - factIds = [] - factIds.extend(factFilterMatches) - factIds.extend(fieldFilterMatches) + fids = [] + fids.extend(factFilterMatches) + fids.extend(fieldFilterMatches) cardIds = None if len(cardFilters) > 0: cardIds = [] cardIds.extend(cardFilterMatches) - return (factIds, cardIds) + return (fids, cardIds) def _findCards(deck, query): "Find facts matching QUERY." @@ -400,7 +400,7 @@ def _findCards(deck, query): tquery += "select id from facts except " if token == "none": tquery += """ -select cards.id from cards, facts where facts.tags = '' and cards.factId = facts.id """ +select cards.id from cards, facts where facts.tags = '' and cards.fid = facts.id """ else: token = token.replace("*", "%") if not token.startswith("%"): @@ -450,11 +450,11 @@ select id from facts where tags like :_tag_%d""" % c fidquery += " intersect " elif isNeg: fidquery += "select id from cards except " - fidquery += "select id from cards where factId in (%s)" % token + fidquery += "select id from cards where fid in (%s)" % token elif type == SEARCH_CARD: print "search_card broken" token = token.replace("*", "%") - ids = deck.db.column0(""" + ids = deck.db.list(""" select id from tags where name like :tag escape '\\'""", tag=token) if isNeg: if cmquery['neg']: @@ -493,10 +493,10 @@ select cardId from cardTags where src = 2 and cardTags.tagId in %s""" % ids2str( field = field.replace("*", "%") value = value.replace("*", "%") args["_ff_%d" % c] = "%"+value+"%" - ids = deck.db.column0(""" + ids = deck.db.list(""" select id from fieldmodels where name like :field escape '\\'""", field=field) sfquery += """ -select factId from fields where fieldModelId in %s and +select fid from fdata where fmid in %s and value like :_ff_%d escape '\\'""" % (ids2str(ids), c) elif type == SEARCH_QA: field = value = '' @@ -555,17 +555,17 @@ select id from facts where cache like :_ff_%d escape '\\'""" % c # Find and replace ########################################################################## -def findReplace(deck, factIds, src, dst, isRe=False, field=None): +def findReplace(deck, fids, src, dst, isRe=False, field=None): "Find and replace fields in a fact." # find - s = "select id, factId, value from fields where factId in %s" + s = "select id, fid, value from fdata where fid in %s" if isRe: isRe = re.compile(src) else: s += " and value like :v" if field: - s += " and fieldModelId = :fmid" - rows = deck.db.all(s % ids2str(factIds), + s += " and fmid = :fmid" + rows = deck.db.all(s % ids2str(fids), v="%"+src.replace("%", "%%")+"%", fmid=field) modded = [] @@ -581,8 +581,8 @@ def findReplace(deck, factIds, src, dst, isRe=False, field=None): if val.find(src) != -1] # update if modded: - deck.db.statements( - 'update fields set value = :val where id = :id', modded) + deck.db.executemany( + 'update fdata set value = :val where id = :id', modded) deck.updateCardQACacheFromIds([f['fid'] for f in modded], type="facts") if field: @@ -596,7 +596,7 @@ def findReplace(deck, factIds, src, dst, isRe=False, field=None): def findDuplicates(deck, fmids): data = deck.db.all( - "select factId, value from fields where fieldModelId in %s" % + "select fid, value from fdata where fmid in %s" % ids2str(fmids)) vals = {} for (fid, val) in data: @@ -657,7 +657,7 @@ def findSorted(deck, query, sortKey): if sortKey == "fact": query = """ select cards.id from cards, facts -where cards.factId = facts.id """ +where cards.fid = facts.id """ if ads: query += "and " + ads + " " else: @@ -668,20 +668,20 @@ where cards.factId = facts.id """ else: # field value ret = self.deck.db.all( - "select id, numeric from fieldModels where name = :name", + "select id, numeric from fields where name = :name", name=sortKey[1]) fields = ",".join([str(x[0]) for x in ret]) # if multiple models have the same field, use the first numeric bool numeric = ret[0][1] if numeric: - order = "cast(fields.value as real)" + order = "cast(fdata.value as real)" else: - order = "fields.value collate nocase" + order = "fdata.value collate nocase" if ads: ads = " and " + ads query = ("select cards.id " - "from fields, cards where fields.fieldModelId in (%s) " - "and fields.factId = cards.factId" + ads + + "from fdata, cards where fdata.fmid in (%s) " + "and fdata.fid = cards.fid" + ads + " order by cards.ordinal, %s") % (fields, order) # run the query self.cards = self.deck.db.all(query) diff --git a/anki/graphs.py b/anki/graphs.py index 1b689b458..d1e146891 100644 --- a/anki/graphs.py +++ b/anki/graphs.py @@ -239,11 +239,11 @@ group by day order by day fig = Figure(figsize=(self.width, self.height), dpi=self.dpi) limit = self.endOfDay - (numdays) * 86400 if attr == "created": - res = self.deck.db.column0("select %s from cards where %s >= %f" % + res = self.deck.db.list("select %s from cards where %s >= %f" % (attr, attr, limit)) else: # firstAnswered - res = self.deck.db.column0( + res = self.deck.db.list( "select time/1000 from revlog where rep = 1") for r in res: d = int((r - self.endOfDay) / 86400.0) diff --git a/anki/graves.py b/anki/graves.py index 423ad3a5d..473242368 100644 --- a/anki/graves.py +++ b/anki/graves.py @@ -7,7 +7,6 @@ # - port all the code referencing the old tables import time -from anki.db import * from anki.utils import intTime FACT = 0 @@ -17,19 +16,13 @@ MEDIA = 3 GROUP = 4 GROUPCONFIG = 5 -gravestonesTable = Table( - 'gravestones', metadata, - Column('delTime', Integer, nullable=False), - Column('objectId', Integer, nullable=False), - Column('type', Integer, nullable=False)) - def registerOne(db, type, id): - db.statement("insert into gravestones values (:t, :id, :ty)", - t=intTime(), id=id, ty=type) + db.execute("insert into gravestones values (:t, :id, :ty)", + t=intTime(), id=id, ty=type) def registerMany(db, type, ids): - db.statements("insert into gravestones values (:t, :id, :ty)", - [{'t':intTime(), 'id':x, 'ty':type} for x in ids]) + db.executemany("insert into gravestones values (:t, :id, :ty)", + [{'t':intTime(), 'id':x, 'ty':type} for x in ids]) def forgetAll(db): - db.statement("delete from gravestones") + db.execute("delete from gravestones") diff --git a/anki/groups.py b/anki/groups.py index e18028781..c9eadf24a 100644 --- a/anki/groups.py +++ b/anki/groups.py @@ -4,14 +4,6 @@ import simplejson, time from anki.utils import intTime -from anki.db import * - -groupsTable = Table( - 'groups', metadata, - Column('id', Integer, primary_key=True), - Column('modified', Integer, nullable=False, default=intTime), - Column('name', UnicodeText, nullable=False), - Column('confId', Integer, nullable=False)) # maybe define a random cutoff at say +/-30% which controls exit interval # variation - 30% of 1 day is 0.7 or 1.3 so always 1 day; 30% of 4 days is @@ -32,14 +24,6 @@ defaultConf = { 'leechFails': 16, } -groupConfigTable = Table( - 'groupConfig', metadata, - Column('id', Integer, primary_key=True), - Column('modified', Integer, nullable=False, default=intTime), - Column('name', UnicodeText, nullable=False), - Column('config', UnicodeText, nullable=False, - default=unicode(simplejson.dumps(defaultConf)))) - class GroupConfig(object): def __init__(self, name): self.name = name @@ -53,7 +37,3 @@ class GroupConfig(object): def save(self): self._config = simplejson.dumps(self.config) self.modified = intTime() - -mapper(GroupConfig, groupConfigTable, properties={ - '_config': groupConfigTable.c.config, -}) diff --git a/anki/importing/__init__.py b/anki/importing/__init__.py index 9ec9cc480..c650e23b7 100644 --- a/anki/importing/__init__.py +++ b/anki/importing/__init__.py @@ -12,8 +12,8 @@ particular FieldModel, replace it with None. A special number 0 donates a tags field. The same field model should not occur more than once.""" import time -from anki.cards import cardsTable -from anki.facts import factsTable, fieldsTable +#from anki.cards import cardsTable +#from anki.facts import factsTable, fieldsTable from anki.lang import _ from anki.utils import genID, canonifyTags, fieldChecksum from anki.utils import canonifyTags, ids2str @@ -49,7 +49,7 @@ class Importer(object): self.tagsToAdd = u"" def doImport(self): - "Import. Caller must .reset()" + "Import." if self.updateKey is not None: return self.doUpdate() random = self.deck.newCardOrder == NEW_CARDS_RANDOM diff --git a/anki/importing/anki10.py b/anki/importing/anki10.py index 8dca38147..9997888a3 100644 --- a/anki/importing/anki10.py +++ b/anki/importing/anki10.py @@ -2,7 +2,7 @@ # Copyright: Damien Elmes # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html -from anki import DeckStorage +from anki import Deck from anki.importing import Importer from anki.sync import SyncClient, SyncServer, copyLocalMedia from anki.lang import _ @@ -57,13 +57,13 @@ class Anki10Importer(Importer): fids = [f[0] for f in res['added-facts']['facts']] self.deck.addTags(fids, self.tagsToAdd) # mark import material as newly added - self.deck.db.statement( + self.deck.db.execute( "update cards set modified = :t where id in %s" % ids2str([x[0] for x in res['added-cards']]), t=time.time()) - self.deck.db.statement( + self.deck.db.execute( "update facts set modified = :t where id in %s" % ids2str([x[0] for x in res['added-facts']['facts']]), t=time.time()) - self.deck.db.statement( + self.deck.db.execute( "update models set modified = :t where id in %s" % ids2str([x['id'] for x in res['added-models']]), t=time.time()) # update total and refresh diff --git a/anki/importing/dingsbums.py b/anki/importing/dingsbums.py index bfdde4234..1804140e2 100644 --- a/anki/importing/dingsbums.py +++ b/anki/importing/dingsbums.py @@ -50,11 +50,9 @@ CHANGES MADE TO LIBANKI: """ from anki.importing import Importer -from anki import DeckStorage +from anki import Deck from anki.facts import Fact -from anki.models import FieldModel -from anki.models import CardModel -from anki.models import Model +from anki.models import Field, Template, Model from anki.lang import _ from xml.sax import make_parser diff --git a/anki/importing/supermemo_xml.py b/anki/importing/supermemo_xml.py index 2db8c2997..6e3e8f1ee 100644 --- a/anki/importing/supermemo_xml.py +++ b/anki/importing/supermemo_xml.py @@ -15,7 +15,7 @@ import re, unicodedata, time #import chardet -from anki.deck import Deck +from anki import Deck class SmartDict(dict): """ diff --git a/anki/media.py b/anki/media.py index 83e9d4182..58a95fa90 100644 --- a/anki/media.py +++ b/anki/media.py @@ -3,260 +3,271 @@ # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html import os, shutil, re, urllib2, time, tempfile, unicodedata, urllib -from anki.db import * from anki.utils import checksum, genID, intTime from anki.lang import _ -# other code depends on this order, so don't reorder -regexps = ("(?i)(\[sound:([^]]+)\])", - "(?i)(]+src=[\"']?([^\"'>]+)[\"']?[^>]*>)") +class MediaRegistry(object): -# Tables -########################################################################## + # other code depends on this order, so don't reorder + regexps = ("(?i)(\[sound:([^]]+)\])", + "(?i)(]+src=[\"']?([^\"'>]+)[\"']?[^>]*>)") -mediaTable = Table( - 'media', metadata, - Column('id', Integer, primary_key=True, nullable=False), - Column('filename', UnicodeText, nullable=False, unique=True), - Column('refcnt', Integer, nullable=False), - Column('modified', Integer, nullable=False), - Column('chksum', UnicodeText, nullable=False, default=u"")) + def __init__(self, deck): + self.deck = deck + self.mediaPrefix = "" + self._mediaDir = None + self._updateMediaDir() -# File handling -########################################################################## + def mediaDir(self, create=False): + if self._mediaDir: + return self._mediaDir + elif create: + self._updateMediaDir(True) + return self._mediaDir -def copyToMedia(deck, path): - """Copy PATH to MEDIADIR, and return new filename. + def _updateMediaDir(self, create=False): + if self.mediaPrefix: + dir = os.path.join( + self.mediaPrefix, os.path.basename(self.deck.path)) + else: + dir = self.deck.path + dir = re.sub("(?i)\.(anki)$", ".media", dir) + if create == None: + # don't create, but return dir + return dir + if not os.path.exists(dir): + if not create: + return + # will raise error if we can't create + os.makedirs(dir) + # change to the current dir + os.chdir(dir) + self._mediaDir = dir + # Adding and registering media + ########################################################################## + + def addFile(self, path): + """Copy PATH to MEDIADIR, and return new filename. If a file with the same md5sum exists in the DB, return that. -If a file with the same name exists, return a unique name. -This does not modify the media table.""" - # see if have duplicate contents - newpath = deck.db.scalar( - "select filename from media where chksum = :cs", - cs=checksum(open(path, "rb").read())) - # check if this filename already exists - if not newpath: - base = os.path.basename(path) - mdir = deck.mediaDir(create=True) - newpath = uniquePath(mdir, base) - shutil.copy2(path, newpath) - return os.path.basename(newpath) +If a file with the same name exists, return a unique name.""" + # see if have duplicate contents + csum = self.mediaChecksum(path) + if not csum: + # file was unreadable or didn't exist + return None + file = self.deck.db.scalar( + "select file from media where csum = :cs", + cs=csum) + if not file: + base = os.path.basename(path) + mdir = self.mediaDir(create=True) + file = self.uniquePath(mdir, base) + shutil.copy2(path, file) + self.registerFile(base) + return os.path.basename(file) -def uniquePath(dir, base): - # remove any dangerous characters - base = re.sub(r"[][<>:/\\&]", "", base) - # find a unique name - (root, ext) = os.path.splitext(base) - def repl(match): - n = int(match.group(1)) - return " (%d)" % (n+1) - while True: - path = os.path.join(dir, root + ext) - if not os.path.exists(path): - break - reg = " \((\d+)\)$" - if not re.search(reg, root): - root = root + " (1)" + def registerFile(self, file): + "Add a single file to the media database." + if self.mediaDir(): + csum = self.mediaChecksum(os.path.join(self.mediaDir(), file)) else: - root = re.sub(reg, repl, root) - return path + csum = "" + self.deck.db.execute( + "insert or replace into media values (?, ?, ?)", + file, intTime(), csum) -# DB routines -########################################################################## + def registerText(self, string): + "Add all media in string to the media database." + for f in self.mediaFiles(string): + self.registerFile(f) -def updateMediaCount(deck, file, count=1): - mdir = deck.mediaDir() - if deck.db.scalar( - "select 1 from media where filename = :file", file=file): - deck.db.statement( - "update media set refcnt = refcnt + :c, modified = :t where filename = :file", - file=file, c=count, t=intTime()) - elif count > 0: + def removeUnusedMedia(deck): + ids = deck.s.list("select id from media where size = 0") + for id in ids: + deck.s.statement("insert into mediaDeleted values (:id, :t)", + id=id, t=time.time()) + deck.s.statement("delete from media where size = 0") + + # Moving media + ########################################################################## + + def renameMediaDir(self, oldPath): + "Copy oldPath to our current media dir. " + assert os.path.exists(oldPath) + newPath = self.mediaDir(create=None) + # copytree doesn't want the dir to exist try: - sum = unicode( - checksum(open(os.path.join(mdir, file), "rb").read())) + shutil.copytree(oldPath, newPath) except: - sum = u"" - deck.db.statement(""" -insert into media (id, filename, refcnt, modified, chksum) -values (:id, :file, :c, :mod, :sum)""", - id=genID(), file=file, c=count, mod=intTime(), - sum=sum) + # FIXME: should really remove everything in old dir instead of + # giving up + pass -def removeUnusedMedia(deck): - ids = deck.db.column0("select id from media where refcnt = 0") - for id in ids: - deck.db.statement("insert into mediaDeleted values (:id, :t)", - id=id, t=time.time()) - deck.db.statement("delete from media where refcnt = 0") + # Tools + ########################################################################## -# String manipulation -########################################################################## + def mediaChecksum(self, path): + "Return checksum of PATH, or empty string." + try: + return checksum(open(path, "rb").read()) + except: + return "" -def mediaFiles(string, remote=False): - l = [] - for reg in regexps: - for (full, fname) in re.findall(reg, string): - isLocal = not re.match("(https?|ftp)://", fname.lower()) - if not remote and isLocal: - l.append(fname) - elif remote and not isLocal: - l.append(fname) - return l + def uniquePath(self, dir, base): + # remove any dangerous characters + base = re.sub(r"[][<>:/\\&]", "", base) + # find a unique name + (root, ext) = os.path.splitext(base) + def repl(match): + n = int(match.group(1)) + return " (%d)" % (n+1) + while True: + path = os.path.join(dir, root + ext) + if not os.path.exists(path): + break + reg = " \((\d+)\)$" + if not re.search(reg, root): + root = root + " (1)" + else: + root = re.sub(reg, repl, root) + return path -def stripMedia(txt): - for reg in regexps: - txt = re.sub(reg, "", txt) - return txt + # String manipulation + ########################################################################## -def escapeImages(string): - def repl(match): - tag = match.group(1) - fname = match.group(2) - if re.match("(https?|ftp)://", fname): - return tag - return tag.replace( - fname, urllib.quote(fname.encode("utf-8"))) - return re.sub(regexps[1], repl, string) + def mediaFiles(self, string, includeRemote=False): + l = [] + for reg in self.regexps: + for (full, fname) in re.findall(reg, string): + isLocal = not re.match("(https?|ftp)://", fname.lower()) + if isLocal or includeRemote: + l.append(fname) + return l -# Rebuilding DB -########################################################################## + def stripMedia(self, txt): + for reg in self.regexps: + txt = re.sub(reg, "", txt) + return txt -def rebuildMediaDir(deck, delete=False, dirty=True): - mdir = deck.mediaDir() - if not mdir: - return (0, 0) - deck.startProgress(title=_("Check Media DB")) - # set all ref counts to 0 - deck.db.statement("update media set refcnt = 0") - # look through cards for media references - refs = {} - normrefs = {} - def norm(s): - if isinstance(s, unicode): - return unicodedata.normalize('NFD', s) - return s - for (question, answer) in deck.db.all( - "select question, answer from cards"): - for txt in (question, answer): - for f in mediaFiles(txt): - if f in refs: - refs[f] += 1 - else: - refs[f] = 1 + def escapeImages(self, string): + def repl(match): + tag = match.group(1) + fname = match.group(2) + if re.match("(https?|ftp)://", fname): + return tag + return tag.replace( + fname, urllib.quote(fname.encode("utf-8"))) + return re.sub(self.regexps[1], repl, string) + + # Rebuilding DB + ########################################################################## + + def rebuildMediaDir(self, delete=False): + mdir = self.mediaDir() + if not mdir: + return (0, 0) + self.deck.startProgress() + # delete all media entries in database + self.deck.db.execute("delete from media") + # look through cards for media references + normrefs = {} + def norm(s): + if isinstance(s, unicode): + return unicodedata.normalize('NFD', s) + return s + for (question, answer) in self.deck.db.all( + "select q, a from cards"): + for txt in (question, answer): + for f in self.mediaFiles(txt): normrefs[norm(f)] = True - # update ref counts - for (file, count) in refs.items(): - updateMediaCount(deck, file, count) - # find unused media - unused = [] - for file in os.listdir(mdir): - path = os.path.join(mdir, file) - if not os.path.isfile(path): - # ignore directories - continue - nfile = norm(file) - if nfile not in normrefs: - unused.append(file) - # optionally delete - if delete: - for f in unused: + self.registerFile(f) + # find unused media + unused = [] + for file in os.listdir(mdir): + path = os.path.join(mdir, file) + if not os.path.isfile(path): + # ignore directories + continue + nfile = norm(file) + if nfile not in normrefs: + unused.append(file) + # optionally delete + if delete: + for f in unused: + path = os.path.join(mdir, f) + os.unlink(path) + nohave = self.deck.db.list( + "select file from media where csum = ''") + self.deck.finishProgress() + return (nohave, unused) + + # Download missing + ########################################################################## + + def downloadMissing(self): + urlbase = self.deck.getVar("mediaURL") + if not urlbase: + return None + mdir = self.deck.mediaDir(create=True) + self.deck.startProgress() + missing = 0 + grabbed = 0 + for c, (f, sum) in enumerate(self.deck.db.all( + "select file, csum from media")): path = os.path.join(mdir, f) - os.unlink(path) - # remove entries in db for unused media - removeUnusedMedia(deck) - # check md5s are up to date - update = [] - for (file, md5) in deck.db.all( - "select filename, chksum from media"): - path = os.path.join(mdir, file) - if not os.path.exists(path): - if md5: - update.append({'f':file, 'sum':u"", 'c':intTime()}) - else: - sum = unicode( - checksum(open(os.path.join(mdir, file), "rb").read())) - if md5 != sum: - update.append({'f':file, 'sum':sum, 'c':intTime()}) - if update: - deck.db.statements(""" -update media set chksum = :sum, modified = :c where filename = :f""", - update) - # update deck and get return info - if dirty: - deck.flushMod() - nohave = deck.db.column0("select filename from media where chksum = ''") - deck.finishProgress() - return (nohave, unused) + if not os.path.exists(path): + try: + rpath = urlbase + f + url = urllib2.urlopen(rpath) + open(f, "wb").write(url.read()) + grabbed += 1 + except: + if sum: + # the file is supposed to exist + self.deck.finishProgress() + return (False, rpath) + else: + # ignore and keep going + missing += 1 + self.deck.updateProgress(label=_("File %d...") % (grabbed+missing)) + self.deck.finishProgress() + return (True, grabbed, missing) -# Download missing -########################################################################## + # Convert remote links to local ones + ########################################################################## -def downloadMissing(deck): - urlbase = deck.getVar("mediaURL") - if not urlbase: - return None - mdir = deck.mediaDir(create=True) - deck.startProgress() - missing = 0 - grabbed = 0 - for c, (f, sum) in enumerate(deck.db.all( - "select filename, chksum from media")): - path = os.path.join(mdir, f) - if not os.path.exists(path): + def downloadRemote(self): + mdir = self.deck.mediaDir(create=True) + refs = {} + self.deck.startProgress() + for (question, answer) in self.deck.db.all( + "select question, answer from cards"): + for txt in (question, answer): + for f in mediaFiles(txt, remote=True): + refs[f] = True + + tmpdir = tempfile.mkdtemp(prefix="anki") + failed = [] + passed = [] + for c, link in enumerate(refs.keys()): try: - rpath = urlbase + f - url = urllib2.urlopen(rpath) - open(f, "wb").write(url.read()) - grabbed += 1 + path = os.path.join(tmpdir, os.path.basename(link)) + url = urllib2.urlopen(link) + open(path, "wb").write(url.read()) + newpath = copyToMedia(self.deck, path) + passed.append([link, newpath]) except: - if sum: - # the file is supposed to exist - deck.finishProgress() - return (False, rpath) - else: - # ignore and keep going - missing += 1 - deck.updateProgress(label=_("File %d...") % (grabbed+missing)) - deck.finishProgress() - return (True, grabbed, missing) - -# Convert remote links to local ones -########################################################################## - -def downloadRemote(deck): - mdir = deck.mediaDir(create=True) - refs = {} - deck.startProgress() - for (question, answer) in deck.db.all( - "select question, answer from cards"): - for txt in (question, answer): - for f in mediaFiles(txt, remote=True): - refs[f] = True - - tmpdir = tempfile.mkdtemp(prefix="anki") - failed = [] - passed = [] - for c, link in enumerate(refs.keys()): - try: - path = os.path.join(tmpdir, os.path.basename(link)) - url = urllib2.urlopen(link) - open(path, "wb").write(url.read()) - newpath = copyToMedia(deck, path) - passed.append([link, newpath]) - except: - failed.append(link) - deck.updateProgress(label=_("Download %d...") % c) - for (url, name) in passed: - deck.db.statement( - "update fields set value = replace(value, :url, :name)", - url=url, name=name) - deck.updateProgress(label=_("Updating references...")) - deck.updateProgress(label=_("Updating cards...")) - # rebuild entire q/a cache - for m in deck.models: - deck.updateCardsFromModel(m, dirty=True) - deck.finishProgress() - deck.flushMod() - return (passed, failed) + failed.append(link) + self.deck.updateProgress(label=_("Download %d...") % c) + for (url, name) in passed: + self.deck.db.execute( + "update fields set value = replace(value, :url, :name)", + url=url, name=name) + self.deck.updateProgress(label=_("Updating references...")) + self.deck.updateProgress(label=_("Updating cards...")) + # rebuild entire q/a cache + for m in self.deck.models: + self.deck.updateCardsFromModel(m, dirty=True) + self.deck.finishProgress() + return (passed, failed) diff --git a/anki/models.py b/anki/models.py index 00f7493ce..c272c6888 100644 --- a/anki/models.py +++ b/anki/models.py @@ -2,9 +2,13 @@ # Copyright: Damien Elmes # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html -import time, re, simplejson -from sqlalchemy.ext.orderinglist import ordering_list -from anki.db import * +"""\ +Models load their templates and fields when they are loaded. If you update a +template or field, you should call model.flush(), rather than trying to save +the subobject directly. +""" + +import time, re, simplejson, copy as copyMod from anki.utils import genID, canonifyTags, intTime from anki.fonts import toPlatformFont from anki.utils import parseTags, hexifyID, checksum, stripHTML, intTime @@ -13,185 +17,176 @@ from anki.hooks import runFilter from anki.template import render from copy import copy -def alignmentLabels(): - return { - 0: _("Center"), - 1: _("Left"), - 2: _("Right"), - } - -# Field models +# Models ########################################################################## -fieldModelsTable = Table( - 'fieldModels', metadata, - Column('id', Integer, primary_key=True), - Column('ordinal', Integer, nullable=False), - Column('modelId', Integer, ForeignKey('models.id'), nullable=False), - Column('name', UnicodeText, nullable=False), - Column('description', UnicodeText, nullable=False, default=u""), # obsolete - # reused as RTL marker - Column('features', UnicodeText, nullable=False, default=u""), - Column('required', Boolean, nullable=False, default=True), - # if code changes this, it should call deck.updateFieldChecksums() - Column('unique', Boolean, nullable=False, default=True), # sqlite keyword - Column('numeric', Boolean, nullable=False, default=False), - # display - Column('quizFontFamily', UnicodeText, default=u"Arial"), - Column('quizFontSize', Integer, default=20), - Column('quizFontColour', String(7)), - Column('editFontFamily', UnicodeText, default=u"1"), # reused as
 toggle
-    Column('editFontSize', Integer, default=20))
-
-class FieldModel(object):
-    "The definition of one field in a fact."
-
-    def __init__(self, name=u"", required=True, unique=True):
-        self.name = name
-        self.required = required
-        self.unique = unique
-        self.id = genID()
-
-    def copy(self):
-        new = FieldModel()
-        for p in class_mapper(FieldModel).iterate_properties:
-            setattr(new, p.key, getattr(self, p.key))
-        new.id = genID()
-        new.model = None
-        return new
-
-mapper(FieldModel, fieldModelsTable)
-
-# Card models
-##########################################################################
-
-cardModelsTable = Table(
-    'cardModels', metadata,
-    Column('id', Integer, primary_key=True),
-    Column('ordinal', Integer, nullable=False),
-    Column('modelId', Integer, ForeignKey('models.id'), nullable=False),
-    Column('name', UnicodeText, nullable=False),
-    Column('description', UnicodeText, nullable=False, default=u""), # obsolete
-    Column('active', Boolean, nullable=False, default=True),
-    # formats: question/answer/last(not used)
-    Column('qformat', UnicodeText, nullable=False),
-    Column('aformat', UnicodeText, nullable=False),
-    Column('lformat', UnicodeText),
-    # question/answer editor format (not used yet)
-    Column('qedformat', UnicodeText),
-    Column('aedformat', UnicodeText),
-    Column('questionInAnswer', Boolean, nullable=False, default=False),
-    # unused
-    Column('questionFontFamily', UnicodeText, default=u"Arial"),
-    Column('questionFontSize', Integer, default=20),
-    Column('questionFontColour', String(7), default=u"#000000"),
-    # used for both question & answer
-    Column('questionAlign', Integer, default=0),
-    # ununsed
-    Column('answerFontFamily', UnicodeText, default=u"Arial"),
-    Column('answerFontSize', Integer, default=20),
-    Column('answerFontColour', String(7), default=u"#000000"),
-    Column('answerAlign', Integer, default=0),
-    Column('lastFontFamily', UnicodeText, default=u"Arial"),
-    Column('lastFontSize', Integer, default=20),
-    # used as background colour
-    Column('lastFontColour', String(7), default=u"#FFFFFF"),
-    Column('editQuestionFontFamily', UnicodeText, default=None),
-    Column('editQuestionFontSize', Integer, default=None),
-    Column('editAnswerFontFamily', UnicodeText, default=None),
-    Column('editAnswerFontSize', Integer, default=None),
-    # empty answer
-    Column('allowEmptyAnswer', Boolean, nullable=False, default=True),
-    Column('typeAnswer', UnicodeText, nullable=False, default=u""))
-
-class CardModel(object):
-    """Represents how to generate the front and back of a card."""
-    def __init__(self, name=u"", qformat=u"q", aformat=u"a", active=True):
-        self.name = name
-        self.qformat = qformat
-        self.aformat = aformat
-        self.active = active
-        self.id = genID()
-
-    def copy(self):
-        new = CardModel()
-        for p in class_mapper(CardModel).iterate_properties:
-            setattr(new, p.key, getattr(self, p.key))
-        new.id = genID()
-        new.model = None
-        return new
-
-mapper(CardModel, cardModelsTable)
-
-def formatQA(cid, mid, fact, tags, cm, deck):
-    "Return a dict of {id, question, answer}"
-    d = {'id': cid}
-    fields = {}
-    for (k, v) in fact.items():
-        fields["text:"+k] = stripHTML(v[1])
-        if v[1]:
-            fields[k] = '%s' % (
-                hexifyID(v[0]), v[1])
-        else:
-            fields[k] = u""
-    fields['tags'] = tags[0]
-    fields['Tags'] = tags[0]
-    fields['modelTags'] = tags[1]
-    fields['cardModel'] = tags[2]
-    # render q & a
-    ret = []
-    for (type, format) in (("question", cm.qformat),
-                           ("answer", cm.aformat)):
-        # convert old style
-        format = re.sub("%\((.+?)\)s", "{{\\1}}", format)
-        # allow custom rendering functions & info
-        fields = runFilter("prepareFields", fields, cid, mid, fact, tags, cm, deck)
-        html = render(format, fields)
-        d[type] = runFilter("formatQA", html, type, cid, mid, fact, tags, cm, deck)
-    return d
-
-# Model table
-##########################################################################
-
-modelsTable = Table(
-    'models', metadata,
-    Column('id', Integer, primary_key=True),
-    Column('modified', Integer, nullable=False, default=intTime),
-    Column('name', UnicodeText, nullable=False),
-    # currently unused
-    Column('config', UnicodeText, nullable=False, default=u"")
-)
+defaultConf = {
+}
 
 class Model(object):
-    "Defines the way a fact behaves, what fields it can contain, etc."
-    def __init__(self, name=u""):
-        self.name = name
-        self.id = genID()
 
-    def setModified(self):
-        self.modified = intTime()
+    def __init__(self, deck, id=None):
+        self.deck = deck
+        if id:
+            self.id = id
+            self.load()
+        else:
+            self.id = genID()
+            self.name = u""
+            self.mod = intTime()
+            self.conf = defaultConf.copy()
+            self.fields = []
+            self.templates = []
 
-    def addFieldModel(self, field):
-        "Add a field model. Don't call this directly."
-        self.fieldModels.append(field)
-        s = object_session(self)
-        if s:
-            s.flush()
+    def load(self):
+        (self.mod,
+         self.name,
+         self.conf) = self.deck.db.first("""
+select mod, name, conf from models where id = ?""", self.id)
+        self.conf = simplejson.loads(self.conf)
+        self.loadFields()
+        self.loadTemplates()
 
-    def addCardModel(self, card):
-        "Add a card model. Don't call this directly."
-        self.cardModels.append(card)
-        s = object_session(self)
-        if s:
-            s.flush()
+    def flush(self):
+        self.mod = intTime()
+        self.deck.db.execute("""
+insert or replace into models values (?, ?, ?, ?)""",
+                             self.id, self.mod, self.name,
+                             simplejson.dumps(self.conf))
+        [f._flush() for f in self.fields]
+        [t._flush() for t in self.templates]
 
-mapper(Model, modelsTable, properties={
-    'fieldModels': relation(FieldModel, backref='model',
-                             collection_class=ordering_list('ordinal'),
-                             order_by=[fieldModelsTable.c.ordinal],
-                            cascade="all, delete-orphan"),
-    'cardModels': relation(CardModel, backref='model',
-                           collection_class=ordering_list('ordinal'),
-                           order_by=[cardModelsTable.c.ordinal],
-                           cascade="all, delete-orphan"),
-       })
+    def updateCache(self):
+        self.deck.updateCache([self.id], "model")
+
+    # Fields
+    ##################################################
+
+    def loadFields(self):
+        sql = "select * from fields where mid = ? order by ord"
+        self.fields = [Field(self.deck, data)
+                       for data in self.deck.db.all(sql, self.id)]
+
+    def addField(self, field):
+        self.deck.modSchema()
+        field.mid = self.id
+        field.ord = len(self.fields)
+        self.fields.append(field)
+
+    def fieldMap(self):
+        "Mapping of field name -> (fmid, ord)."
+        return dict([(f.name, (f.id, f.ord, f.conf)) for f in self.fields])
+
+    # Templates
+    ##################################################
+
+    def loadTemplates(self):
+        sql = "select * from templates where mid = ? order by ord"
+        self.templates = [Template(self.deck, data)
+                          for data in self.deck.db.all(sql, self.id)]
+
+    def addTemplate(self, template):
+        self.deck.modSchema()
+        template.mid = self.id
+        template.ord = len(self.templates)
+        self.templates.append(template)
+
+    # Copying
+    ##################################################
+
+    def copy(self):
+        "Copy, flush and return."
+        new = Model(self.deck, self.id)
+        new.id = genID()
+        new.name += _(" copy")
+        for f in new.fields:
+            f.id = genID()
+            f.mid = new.id
+        for t in new.templates:
+            t.id = genID()
+            t.mid = new.id
+        new.flush()
+        return new
+
+# Field model object
+##########################################################################
+
+defaultFieldConf = {
+    'rtl': False, # features
+    'required': False,
+    'unique': False,
+    'font': "Arial",
+    'editSize': 20,
+    'quizSize': 20,
+    'quizColour': "#fff",
+    'pre': True,
+}
+
+class Field(object):
+
+    def __init__(self, deck, data=None):
+        self.deck = deck
+        if data:
+            self.initFromData(data)
+        else:
+            self.id = genID()
+            self.numeric = 0
+            self.conf = defaultFieldConf.copy()
+
+    def initFromData(self, data):
+        (self.id,
+         self.mid,
+         self.ord,
+         self.name,
+         self.numeric,
+         self.conf) = data
+        self.conf = simplejson.loads(self.conf)
+
+    def _flush(self):
+        self.deck.db.execute("""
+insert or replace into fields values (?, ?, ?, ?, ?, ?)""",
+                             self.id, self.mid, self.ord,
+                             self.name, self.numeric,
+                             simplejson.dumps(self.conf))
+
+# Template object
+##########################################################################
+
+# FIXME: change typeAnswer to field id
+
+defaultTemplateConf = {
+    'hideQ': False,
+    'align': 0,
+    'bg': "#000",
+    'allowEmptyAns': None,
+    'typeAnswer': None,
+}
+
+class Template(object):
+
+    def __init__(self, deck, data=None):
+        self.deck = deck
+        if data:
+            self.initFromData(data)
+        else:
+            self.id = genID()
+            self.active = True
+            self.conf = defaultTemplateConf.copy()
+
+    def initFromData(self, data):
+        (self.id,
+         self.mid,
+         self.ord,
+         self.name,
+         self.active,
+         self.qfmt,
+         self.afmt,
+         self.conf) = data
+        self.conf = simplejson.loads(self.conf)
+
+    def _flush(self):
+        self.deck.db.execute("""
+insert or replace into templates values (?, ?, ?, ?, ?, ?, ?, ?)""",
+                             self.id, self.mid, self.ord, self.name,
+                             self.active, self.qfmt, self.afmt,
+                             simplejson.dumps(self.conf))
diff --git a/anki/revlog.py b/anki/revlog.py
index bf8d7cf81..2c1d47f11 100644
--- a/anki/revlog.py
+++ b/anki/revlog.py
@@ -3,7 +3,6 @@
 # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
 
 import time
-from anki.db import *
 
 # Flags: 0=standard review, 1=reschedule due to cram, drill, etc
 # Rep: Repetition number. The same number may appear twice if a card has been
@@ -12,20 +11,8 @@ from anki.db import *
 # We store the times in integer milliseconds to avoid an extra index on the
 # primary key.
 
-revlogTable = Table(
-    'revlog', metadata,
-    Column('time', Integer, nullable=False, primary_key=True),
-    Column('cardId', Integer, nullable=False),
-    Column('ease', Integer, nullable=False),
-    Column('rep', Integer, nullable=False),
-    Column('lastInterval', Integer, nullable=False),
-    Column('interval', Integer, nullable=False),
-    Column('factor', Integer, nullable=False),
-    Column('userTime', Integer, nullable=False),
-    Column('flags', Integer, nullable=False, default=0))
-
 def logReview(db, card, ease, flags=0):
-    db.statement("""
+    db.execute("""
 insert into revlog values (
 :created, :cardId, :ease, :rep, :lastInterval, :interval, :factor,
 :userTime, :flags)""",
diff --git a/anki/sched.py b/anki/sched.py
index 065297f05..c80dee555 100644
--- a/anki/sched.py
+++ b/anki/sched.py
@@ -5,8 +5,7 @@
 import time, datetime, simplejson, random
 from operator import itemgetter
 from heapq import *
-from anki.db import *
-from anki.cards import Card
+#from anki.cards import Card
 from anki.utils import parseTags, ids2str
 from anki.lang import _
 from anki.consts import *
@@ -26,12 +25,10 @@ class Scheduler(object):
         self.checkDay()
         id = self.getCardId()
         if id:
-            card = Card()
-            assert card.fromDB(self.db, id)
-            return card
+            return self.deck.getCard(id)
 
     def reset(self):
-        self.resetConfig()
+        self.resetConf()
         t = time.time()
         self.resetLearn()
         print "lrn %0.2fms" % ((time.time() - t)*1000); t = time.time()
@@ -53,7 +50,7 @@ class Scheduler(object):
             self.answerLearnCard(card, ease)
         else:
             raise Exception("Invalid queue")
-        card.toDB(self.db)
+        card.flushSched()
 
     def counts(self):
         # FIXME: should learn count include new cards due today, or be separate?
@@ -113,7 +110,7 @@ queue = 2 %s order by due limit %d""" % (self.newOrder(), self.groupLimit('new')
             return self.newQueue.pop()[0]
 
     def newOrder(self):
-        return (",ordinal", "")[self.deck.qconf['newTodayOrder']]
+        return (",ord", "")[self.deck.qconf['newTodayOrder']]
 
     def updateNewCardRatio(self):
         if self.deck.qconf['newCardSpacing'] == NEW_CARDS_DISTRIBUTE:
@@ -172,7 +169,7 @@ limit %d""" % self.learnLimit, lim=self.dayCutoff)
             card.due = time.time() + conf['delays'][card.grade]*60
 
     def learnConf(self, card):
-        conf = self.configForCard(card)
+        conf = self.confForCard(card)
         if card.type == 2:
             return conf['new']
         else:
@@ -287,7 +284,7 @@ queue = 1 %s and due < :lim order by %s limit %d""" % (
             self.answerPreSave(card, ease)
         # save
         card.due = card.due
-        card.toDB(self.db)
+        card.saveSched()
         # review history
         print "make sure flags is set correctly when reviewing early"
         logReview(self.db, card, ease, 0)
@@ -309,11 +306,10 @@ queue = 1 %s and due < :lim order by %s limit %d""" % (
             card.successive += 1
         # if not card.firstAnswered:
         #     card.firstAnswered = time.time()
-        card.setModified()
 
     def spaceCards(self, card):
         new = time.time() + self.newSpacing
-        self.db.statement("""
+        self.db.execute("""
 update cards set
 due = (case
 when queue = 1 then due + 86400 * (case
@@ -323,13 +319,13 @@ when queue = 1 then due + 86400 * (case
 when queue = 2 then :new
 end),
 modified = :now
-where id != :id and factId = :factId
+where id != :id and fid = :fid
 and due < :cut
 and queue between 1 and 2""",
-                         id=card.id, now=time.time(), factId=card.factId,
+                         id=card.id, now=time.time(), fid=card.fid,
                          cut=self.dayCutoff, new=new, rev=self.revSpacing)
         # update local cache of seen facts
-        self.spacedFacts[card.factId] = new
+        self.spacedFacts[card.fid] = new
 
     # Interval management
     ##########################################################################
@@ -444,39 +440,36 @@ and queue between 1 and 2""",
             (fmax - no) % (max(fmax/2, 1)) == 0)
 
     def handleLeech(self, card):
-        self.refreshSession()
         scard = self.cardFromId(card.id, True)
         tags = scard.fact.tags
         tags = addTags("Leech", tags)
         scard.fact.tags = canonifyTags(tags)
         scard.fact.setModified(textChanged=True, deck=self)
         self.updateFactTags([scard.fact.id])
-        self.db.flush()
         self.db.expunge(scard)
         if self.getBool('suspendLeeches'):
             self.suspendCards([card.id])
         self.reset()
-        self.refreshSession()
 
     # Tools
     ##########################################################################
 
-    def resetConfig(self):
-        "Update group config cache."
-        self.groupConfigs = dict(self.db.all("select id, confId from groups"))
-        self.configCache = {}
+    def resetConf(self):
+        "Update group conf cache."
+        self.groupConfs = dict(self.db.all("select id, gcid from groups"))
+        self.confCache = {}
 
-    def configForCard(self, card):
-        id = self.groupConfigs[card.groupId]
-        if id not in self.configCache:
-            self.configCache[id] = simplejson.loads(
-                self.db.scalar("select config from groupConfig where id = :id",
+    def confForCard(self, card):
+        id = self.groupConfs[card.gid]
+        if id not in self.confCache:
+            self.confCache[id] = simplejson.loads(
+                self.db.scalar("select conf from gconf where id = :id",
                                id=id))
-        return self.configCache[id]
+        return self.confCache[id]
 
     def resetSchedBuried(self):
         "Put temporarily suspended cards back into play."
-        self.db.statement(
+        self.db.execute(
             "update cards set queue = type where queue = -3")
 
     def groupLimit(self, type):
@@ -484,7 +477,7 @@ and queue between 1 and 2""",
         if not l:
             # everything
             return ""
-        return " and groupId in %s" % ids2str(l)
+        return " and gid in %s" % ids2str(l)
 
     # Daily cutoff
     ##########################################################################
@@ -538,7 +531,7 @@ select count() from cards c where queue = 1 and due > :now
             self.revQueue = self.db.all(
                 self.cardLimit(
                 "revActive", "revInactive", """
-select id, factId from cards c where queue = 1 and due > :lim
+select id, fid from cards c where queue = 1 and due > :lim
 order by due limit %d""" % self.queueLimit), lim=self.dayCutoff)
             self.revQueue.reverse()
 
diff --git a/anki/stats.py b/anki/stats.py
index 9c4d2eace..6951e1ec9 100644
--- a/anki/stats.py
+++ b/anki/stats.py
@@ -4,7 +4,6 @@
 
 import time, sys, os, datetime
 import anki, anki.utils
-from anki.db import *
 from anki.lang import _, ngettext
 from anki.utils import canonifyTags, ids2str
 from anki.hooks import runFilter
diff --git a/anki/stdmodels.py b/anki/stdmodels.py
index d3e7c7188..e3658c784 100644
--- a/anki/stdmodels.py
+++ b/anki/stdmodels.py
@@ -2,48 +2,54 @@
 # Copyright: Damien Elmes 
 # License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
 
-"""\
-Standard Models.
-==============================================================
-
-Plugins can add to the 'models' dict to provide more standard
-models.
-"""
-
-from anki.models import Model, CardModel, FieldModel
+from anki.models import Model, Template, Field
 from anki.lang import _
 
-models = {}
-
-def byName(name):
-    fn = models.get(name)
-    if fn:
-        return fn()
-    raise ValueError("No such model available!")
-
-def names():
-    return models.keys()
+models = []
 
 # Basic
 ##########################################################################
 
-def BasicModel():
-    m = Model(_('Basic'))
-    m.addFieldModel(FieldModel(u'Front', True, True))
-    m.addFieldModel(FieldModel(u'Back', False, False))
-    m.addCardModel(CardModel(u'Forward', u'%(Front)s', u'%(Back)s'))
-    m.addCardModel(CardModel(u'Reverse', u'%(Back)s', u'%(Front)s',
-                             active=False))
+def BasicModel(deck):
+    m = Model(deck)
+    m.name = _("Basic")
+    fm = Field(deck)
+    fm.name = _("Front")
+    fm.conf['required'] = True
+    fm.conf['unique'] = True
+    m.addField(fm)
+    fm = Field(deck)
+    fm.name = _("Back")
+    m.addField(fm)
+    t = Template(deck)
+    t.name = _("Forward")
+    t.qfmt = "{{" + _("Front") + "}}"
+    t.afmt = "{{" + _("Back") + "}}"
+    m.addTemplate(t)
+    t = Template(deck)
+    t.name = _("Reverse")
+    t.qfmt = "{{" + _("Back") + "}}"
+    t.afmt = "{{" + _("Front") + "}}"
+    t.active = False
+    m.addTemplate(t)
     return m
 
-models['Basic'] = BasicModel
+models.append(BasicModel)
 
 # Recovery
 ##########################################################################
 
 def RecoveryModel():
-    m = Model(_('Recovery'))
-    m.addFieldModel(FieldModel(u'Question', False, False))
-    m.addFieldModel(FieldModel(u'Answer', False, False))
-    m.addCardModel(CardModel(u'Single', u'{{{Question}}}', u'{{{Answer}}}'))
+    m.name = _("Recovery")
+    fm = Field(deck)
+    fm.name = _("Question")
+    m.addField(fm)
+    fm = Field(deck)
+    fm.name = _("Back")
+    m.addField(fm)
+    t = Template(deck)
+    t.name = _("Forward")
+    t.qfmt = "{{" + _("Question") + "}}"
+    t.afmt = "{{" + _("Back") + "}}"
+    m.addTemplate(t)
     return m
diff --git a/anki/upgrade.py b/anki/storage.py
similarity index 51%
rename from anki/upgrade.py
rename to anki/storage.py
index b59a7abce..3674fd747 100644
--- a/anki/upgrade.py
+++ b/anki/storage.py
@@ -4,11 +4,47 @@
 
 DECK_VERSION = 100
 
-import time, simplejson
-from anki.db import *
+import os, time, simplejson
 from anki.lang import _
-from anki.media import rebuildMediaDir
+#from anki.media import rebuildMediaDir
 from anki.utils import intTime
+from anki.db import DB
+from anki.deck import _Deck
+import anki.groups
+from anki.stdmodels import BasicModel
+
+def Deck(path, queue=True):
+    "Open a new or existing deck. Path must be unicode."
+    path = os.path.abspath(path)
+    create = not os.path.exists(path)
+    # connect
+    db = DB(path)
+    if create:
+        ver = _createDB(db)
+    else:
+        ver = _upgradeSchema(db)
+    db.execute("pragma cache_size = 20000")
+    # add db to deck and do any remaining upgrades
+    deck = _Deck(db)
+    if ver < DECK_VERSION:
+        _upgradeDeck(deck, ver)
+    elif create:
+        deck.addModel(BasicModel(deck))
+        deck.save()
+    if not queue:
+        return deck
+    # rebuild queue
+    deck.reset()
+    return deck
+
+def _createDB(db):
+    db.execute("pragma page_size = 4096")
+    db.execute("pragma legacy_file_format = 0")
+    db.execute("vacuum")
+    _addSchema(db)
+    _updateIndices(db)
+    db.execute("analyze")
+    return DECK_VERSION
 
 def moveTable(s, table):
     sql = s.scalar(
@@ -18,19 +54,16 @@ def moveTable(s, table):
     s.execute("insert into %s2 select * from %s" % (table, table))
     s.execute("drop table "+table)
 
-def upgradeSchema(engine, s):
+def _upgradeSchema(db):
     "Alter tables prior to ORM initialization."
     try:
-        ver = s.scalar("select version from deck limit 1")
+        ver = db.scalar("select version from deck")
     except:
-        ver = s.scalar("select version from decks limit 1")
+        ver = db.scalar("select version from decks")
     if ver < 65:
         raise Exception("oldDeckVersion")
     if ver < 99:
-        # fields
-        ###########
-        s.execute(
-            "alter table fields add column chksum text not null default ''")
+        raise "upgrade"
         # cards
         ###########
         moveTable(s, "cards")
@@ -83,9 +116,20 @@ cast(modified as int), tags, cache from facts2""")
 insert or ignore into media select id, filename, size, cast(created as int),
 originalPath from media2""")
         s.execute("drop table media2")
-        # deck
+        # longer migrations
         ###########
+
+
+
         migrateDeck(s, engine)
+        migrateFields(s, engine)
+        # # fields
+        # ###########
+        # db.execute(
+        #     "alter table fields add column csum text not null default ''")
+
+
+
         # models
         ###########
         moveTable(s, "models")
@@ -141,91 +185,64 @@ utcOffset, "", "", "" from decks""")
     s.execute("drop table decks")
     s.execute("drop table deckVars")
 
-def updateIndices(db):
-    "Add indices to the DB."
-    # sync summaries
-    db.execute("""
-create index if not exists ix_cards_modified on cards
-(modified)""")
-    db.execute("""
-create index if not exists ix_facts_modified on facts
-(modified)""")
-    # card spacing
-    db.execute("""
-create index if not exists ix_cards_factId on cards (factId)""")
-    # fields
-    db.execute("""
-create index if not exists ix_fields_factId on fields (factId)""")
-    db.execute("""
-create index if not exists ix_fields_chksum on fields (chksum)""")
-    # media
-    db.execute("""
-create index if not exists ix_media_chksum on media (chksum)""")
-    # deletion tracking
-    db.execute("""
-create index if not exists ix_gravestones_delTime on gravestones (delTime)""")
-
-def upgradeDeck(deck):
+def _upgradeDeck(deck, version):
     "Upgrade deck to the latest version."
-    if deck.version < DECK_VERSION:
+    print version, DECK_VERSION
+    if version < DECK_VERSION:
         prog = True
         deck.startProgress()
         deck.updateProgress(_("Upgrading Deck..."))
         oldmod = deck.modified
     else:
         prog = False
-    if deck.version < 100:
+    if version < 100:
         # update dynamic indices given we don't use priority anymore
         for d in ("intervalDesc", "intervalAsc", "randomOrder",
                   "dueAsc", "dueDesc"):
-            deck.db.statement("drop index if exists ix_cards_%s2" % d)
-            deck.db.statement("drop index if exists ix_cards_%s" % d)
+            deck.db.execute("drop index if exists ix_cards_%s2" % d)
+            execute.db.statement("drop index if exists ix_cards_%s" % d)
         # remove old views
         for v in ("failedCards", "revCardsOld", "revCardsNew",
                   "revCardsDue", "revCardsRandom", "acqCardsRandom",
                   "acqCardsOld", "acqCardsNew"):
-            deck.db.statement("drop view if exists %s" % v)
-        # remove the expensive value cache
-        deck.db.statement("drop index if exists ix_fields_value")
+            deck.db.execute("drop view if exists %s" % v)
         # add checksums and index
         deck.updateAllFieldChecksums()
         # this was only used for calculating average factor
-        deck.db.statement("drop index if exists ix_cards_factor")
+        deck.db.execute("drop index if exists ix_cards_factor")
         # remove stats, as it's all in the revlog now
-        deck.db.statement("drop table if exists stats")
+        deck.db.execute("drop table if exists stats")
         # migrate revlog data to new table
-        deck.db.statement("""
+        deck.db.execute("""
 insert or ignore into revlog select
 cast(time*1000 as int), cardId, ease, reps,
 cast(lastInterval as int), cast(nextInterval as int),
 cast(nextFactor*1000 as int), cast(min(thinkingTime, 60)*1000 as int),
 0 from reviewHistory""")
-        deck.db.statement("drop table reviewHistory")
+        deck.db.execute("drop table reviewHistory")
         # convert old ease0 into ease1
-        deck.db.statement("update revlog set ease = 1 where ease = 0")
+        deck.db.execute("update revlog set ease = 1 where ease = 0")
         # remove priority index
-        deck.db.statement("drop index if exists ix_cards_priority")
+        deck.db.execute("drop index if exists ix_cards_priority")
         # suspended cards don't use ranges anymore
         deck.db.execute("update cards set queue=-1 where queue between -3 and -1")
         deck.db.execute("update cards set queue=-2 where queue between 3 and 5")
         deck.db.execute("update cards set queue=-3 where queue between 6 and 8")
-        # don't need an index on fieldModelId
-        deck.db.statement("drop index if exists ix_fields_fieldModelId")
         # update schema time
-        deck.db.statement("update deck set schemaMod = :t", t=intTime())
+        deck.db.execute("update deck set schemaMod = :t", t=intTime())
         # remove queueDue as it's become dynamic, and type index
-        deck.db.statement("drop index if exists ix_cards_queueDue")
-        deck.db.statement("drop index if exists ix_cards_type")
+        deck.db.execute("drop index if exists ix_cards_queueDue")
+        deck.db.execute("drop index if exists ix_cards_type")
         # remove old deleted tables
         for t in ("cards", "facts", "models", "media"):
-            deck.db.statement("drop table if exists %sDeleted" % t)
+            deck.db.execute("drop table if exists %sDeleted" % t)
         # finally, update indices & optimize
         updateIndices(deck.db)
         # rewrite due times for new cards
-        deck.db.statement("""
+        deck.db.execute("""
 update cards set due = (select pos from facts where factId = facts.id) where type=2""")
         # convert due cards into day-based due
-        deck.db.statement("""
+        deck.db.execute("""
 update cards set due = cast(
 (case when due < :stamp then 0 else 1 end) +
 ((due-:stamp)/86400) as int)+:today where type
@@ -240,14 +257,170 @@ between 0 and 1""", stamp=deck.sched.dayCutoff, today=deck.sched.today)
         deck.config['nextFactPos'] = deck.db.scalar("select max(pos) from facts")+1
         deck.flushConfig()
         # add default config
-        import deck as deckMod
-        deckMod.DeckStorage._addConfig(deck.engine)
 
         deck.updateDynamicIndices()
         deck.db.execute("vacuum")
         deck.db.execute("analyze")
-        deck.version = 100
+        deck.db.execute("update deck set version = ?", DECK_VERSION)
         deck.db.commit()
     if prog:
         assert deck.modified == oldmod
         deck.finishProgress()
+
+def _addSchema(db):
+    db.executescript("""
+create table if not exists deck (
+    id              integer primary key,
+    created         integer not null,
+    mod             integer not null,
+    schema          integer not null,
+    version         integer not null,
+    syncName        text not null,
+    lastSync        integer not null,
+    utcOffset       integer not null,
+    qconf           text not null,
+    conf            text not null,
+    data            text not null
+);
+
+create table if not exists cards (
+    id              integer primary key,
+    fid             integer not null,
+    tid             integer not null,
+    gid             integer not null,
+    mod             integer not null,
+    q               text not null,
+    a               text not null,
+    ord             integer not null,
+    type            integer not null,
+    queue           integer not null,
+    due             integer not null,
+    interval        integer not null,
+    factor          integer not null,
+    reps            integer not null,
+    streak          integer not null,
+    lapses          integer not null,
+    grade           integer not null,
+    cycles          integer not null
+);
+
+create table if not exists facts (
+    id              integer primary key,
+    mid             integer not null,
+    mod             integer not null,
+    pos             integer not null,
+    tags            text not null,
+    cache           text not null
+);
+
+create table if not exists models (
+    id              integer primary key,
+    mod             integer not null,
+    name            text not null,
+    conf            text not null
+);
+
+create table if not exists fields (
+    id              integer primary key,
+    mid             integer not null,
+    ord             integer not null,
+    name            text not null,
+    numeric         integer not null,
+    conf            text not null
+);
+
+create table if not exists templates (
+    id              integer primary key,
+    mid             integer not null,
+    ord             integer not null,
+    name            text not null,
+    active          integer not null,
+    qfmt            text not null,
+    afmt            text not null,
+    conf            text not null
+);
+
+create table if not exists fdata (
+    fid             integer not null,
+    fmid            integer not null,
+    ord             integer not null,
+    val             text not null,
+    csum            text not null
+);
+
+create table if not exists gravestones (
+    delTime         integer not null,
+    objectId        integer not null,
+    type            integer not null
+);
+
+create table if not exists gconf (
+    id              integer primary key,
+    mod             integer not null,
+    name            text not null,
+    conf            text not null
+);
+
+create table if not exists groups (
+    id              integer primary key autoincrement,
+    mod             integer not null,
+    name            text not null,
+    gcid            integer not null
+);
+
+create table if not exists media (
+    file            text primary key,
+    mod             integer not null,
+    csum            text not null
+);
+
+create table if not exists revlog (
+    time            integer primary key,
+    cid             integer not null,
+    ease            integer not null,
+    rep             integer not null,
+    lastInt         integer not null,
+    interval        integer not null,
+    factor          integer not null,
+    userTime        integer not null,
+    flags           integer not null
+);
+
+create table if not exists tags (
+    id              integer primary key,
+    mod             integer not null,
+    name            text not null collate nocase unique
+);
+
+insert or ignore into deck
+values(1,%(t)s,%(t)s,%(t)s,%(v)s,'',0,-2,'', '', '');
+""" % ({'t': intTime(), 'v':DECK_VERSION}))
+    import anki.deck
+    db.execute("update deck set qconf = ?, conf = ?, data = ?",
+               simplejson.dumps(anki.deck.defaultQconf),
+               simplejson.dumps(anki.deck.defaultConf),
+               "{}")
+    db.execute(
+        "insert or ignore into gconf values (1, ?, ?, ?)""",
+        intTime(), _("Default Config"),
+        simplejson.dumps(anki.groups.defaultConf))
+    db.execute(
+        "insert or ignore into groups values (1, ?, ?, 1)",
+        intTime(), _("Default Group"))
+
+def _updateIndices(db):
+    "Add indices to the DB."
+    db.executescript("""
+-- sync summaries
+create index if not exists ix_cards_mod on cards (mod);
+create index if not exists ix_facts_mod on facts (mod);
+-- card spacing
+create index if not exists ix_cards_fid on cards (fid);
+-- fact data
+create index if not exists ix_fdata_fid on fdata (fid);
+create index if not exists ix_fdata_csum on fdata (csum);
+-- media
+create index if not exists ix_media_csum on media (csum);
+-- deletion tracking
+create index if not exists ix_gravestones_delTime on gravestones (delTime);
+""")
diff --git a/anki/sync.py b/anki/sync.py
index 1c8fe372a..16c1b87f8 100644
--- a/anki/sync.py
+++ b/anki/sync.py
@@ -6,13 +6,12 @@ import zlib, re, urllib, urllib2, socket, simplejson, time, shutil
 import os, base64, httplib, sys, tempfile, httplib, types
 from datetime import date
 import anki, anki.deck, anki.cards
-from anki.db import sqlite
 from anki.errors import *
-from anki.models import Model, FieldModel, CardModel
-from anki.facts import Fact, Field
-from anki.cards import Card
+#from anki.models import Model, Field, Template
+#from anki.facts import Fact
+#from anki.cards import Card
 from anki.utils import ids2str, hexifyID, checksum
-from anki.media import mediaFiles
+#from anki.media import mediaFiles
 from anki.lang import _
 from hooks import runHook
 
@@ -334,7 +333,7 @@ class SyncTools(object):
             self.applyDict(local, model)
             self.mergeFieldModels(local, fms)
             self.mergeCardModels(local, cms)
-        self.deck.db.statement(
+        self.deck.db.execute(
             "delete from modelsDeleted where modelId in %s" %
             ids2str([m['id'] for m in models]))
 
@@ -457,7 +456,7 @@ insert into fields
 (id, factId, fieldModelId, ordinal, value, chksum)
 values
 (:id, :factId, :fieldModelId, :ordinal, :value, :chksum)""", dlist)
-        self.deck.db.statement(
+        self.deck.db.execute(
             "delete from factsDeleted where factId in %s" %
             ids2str([f[0] for f in facts]))
 
@@ -535,7 +534,7 @@ values
 :matureEase1, :matureEase2, :matureEase3, :matureEase4, :yesCount,
 :noCount, :question, :answer, :lastFactor, :spaceUntil,
 :type, :combinedDue, :rd, 0)""", dlist)
-        self.deck.db.statement(
+        self.deck.db.execute(
             "delete from cardsDeleted where cardId in %s" %
             ids2str([c[0] for c in cards]))
 
@@ -569,7 +568,7 @@ values
         if 'meta' in deck:
             meta = deck['meta']
             for (k,v) in meta:
-                self.deck.db.statement("""
+                self.deck.db.execute("""
 insert or replace into deckVars
 (key, value) values (:k, :v)""", k=k, v=v)
             del deck['meta']
@@ -592,7 +591,7 @@ select * from revlog where time > :ls""",
                   'flags': h[8]} for h in history]
         if not dlist:
             return
-        self.deck.db.statements("""
+        self.deck.db.execute("""
 insert or ignore into revlog values
 (:time, :cardId, :ease, :rep, :lastInterval, :interval, :factor,
  :userTime, :flags)""",
@@ -603,7 +602,7 @@ insert or ignore into revlog values
 
     def updateSources(self, sources):
         for s in sources:
-            self.deck.db.statement("""
+            self.deck.db.execute("""
 insert or replace into sources values
 (:id, :name, :created, :lastSync, :syncPeriod)""",
                                   id=s[0],
@@ -633,12 +632,12 @@ from media where id in %s""" % ids2str(ids))]
                 'description': m[5]})
         # apply metadata
         if meta:
-            self.deck.db.statements("""
+            self.deck.db.execute("""
 insert or replace into media (id, filename, size, created,
 originalPath, description)
 values (:id, :filename, :size, :created, :originalPath,
 :description)""", meta)
-        self.deck.db.statement(
+        self.deck.db.execute(
             "delete from mediaDeleted where mediaId in %s" %
             ids2str([m[0] for m in media]))
 
@@ -646,7 +645,7 @@ values (:id, :filename, :size, :created, :originalPath,
         sids = ids2str(ids)
         files = self.deck.db.column0(
             "select filename from media where id in %s" % sids)
-        self.deck.db.statement("""
+        self.deck.db.execute("""
 insert into mediaDeleted
 select id, :now from media
 where media.id in %s""" % sids, now=time.time())
diff --git a/anki/utils.py b/anki/utils.py
index 269e211e1..f717ee328 100644
--- a/anki/utils.py
+++ b/anki/utils.py
@@ -11,7 +11,6 @@ except ImportError:
     import md5
     md5 = md5.new
 
-from anki.db import *
 from anki.lang import _, ngettext
 import locale, sys
 
@@ -151,7 +150,7 @@ def tidyHTML(html):
                   "margin-right:\d+px;(?: -qt-block-indent:0; "
                   "text-indent:0px;)?", u"", html)
     html = re.sub(u"-qt-paragraph-type:empty;", u"", html)
-    # strip leading space in style statements, and remove if no contents
+    # strip leading space in style execute, and remove if no contents
     html = re.sub(u'style=" ', u'style="', html)
     html = re.sub(u' style=""', u"", html)
     # convert P tags into SPAN and/or BR
@@ -246,7 +245,7 @@ def canonifyTags(tags):
     tags = [t.lstrip(":") for t in set(parseTags(tags))]
     return joinTags(sorted(tags))
 
-def findTag(tag, tags):
+def hasTag(tag, tags):
     "True if TAG is in TAGS. Ignore case."
     return tag.lower() in [t.lower() for t in tags]
 
@@ -254,7 +253,7 @@ def addTags(addtags, tags):
     "Add tags if they don't exist."
     currentTags = parseTags(tags)
     for tag in parseTags(addtags):
-        if not findTag(tag, currentTags):
+        if not hasTag(tag, currentTags):
             currentTags.append(tag)
     return joinTags(currentTags)
 
diff --git a/tests/deck/fake.png b/tests/deck/fake.png
deleted file mode 100644
index 8baef1b4a..000000000
--- a/tests/deck/fake.png
+++ /dev/null
@@ -1 +0,0 @@
-abc
diff --git a/tests/importing/dingsbums.xml b/tests/importing/dingsbums.xml
deleted file mode 100644
index a00bd17df..000000000
--- a/tests/importing/dingsbums.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-
-
-Deutsch - ItalienischRick Gruber-RiemerKlett Grund- und AufbauwortschatzDeutschItalienischAttributesLektionKategoriOthersErklärungUnregelmässigen_USen_USen_USen_USen_USen_USen_USen_USen_US1000222falsefalse
-
-RegelmässigUnregelmässig
-avereessereavere oder essere
--are (regelmässig)-ere (regelmässig)-ire (regelmässig)Unregelmässig
-illa
-RegelmässigUnregelmässigNur EinzahlNur Mehrzahl
-
-
-
-
-
-
-
-
-
-
-
-
-Rest
-Harenberg Kalender Italienisch 2007
-50. Restaurant, Café, Hotel
-Berlitz Kalender 2005
-A
-
-
-Default
-
-
-entfernen, beseitigenallontanare
-dann; damals, also; früherallora
-Schüler, Zöglingallievo
-lustig, heiterallegro
-sich in einer unbequemen Situation befindenessere un pesche four d' aqua
-das ist mir egalme ne fregoGeste: unter dem Kinn mit der Hand vonhinten nach vorne reiben
-Wirtinostessa
-
-
-2361915312
-1961915312
-1251251100
-2021915312
-1881251100
-1241811000
-1761915312
-1241811000
-
-
diff --git a/tests/importing/supermemo1.xml b/tests/importing/supermemo1.xml
deleted file mode 100644
index d65652107..000000000
--- a/tests/importing/supermemo1.xml
+++ /dev/null
@@ -1,89 +0,0 @@
-
-
-  3572
-
-  
-    1
-
-    Topic
-
-    
-      
-
-      
-    
-
-    
-      40326
-
-      aoeu
-
-      Topic
-
-      
-        40327
-
-        1-400
-
-        Topic
-
-        
-          40615
-
-          aoeu
-
-          Topic
-
-          
-            10247
-
-            Item
-
-            
-              aoeu
-
-              aoeu
-            
-
-            
-              1844
-
-              7
-
-              0
-
-              19.09.2002
-
-              5,701
-
-              2,452
-            
-          
-
-        
-
-        Topic
-
-        
-          aoeu
-          
-        
-
-        
-          0
-
-          0
-
-          0
-
-          04.08.2000
-
-          3,000
-
-          0,000
-        
-
-      
-    
-  
-
diff --git a/tests/importing/test.mem b/tests/importing/test.mem
deleted file mode 100644
index 71668e56b..000000000
--- a/tests/importing/test.mem
+++ /dev/null
@@ -1,219 +0,0 @@
---- Mnemosyne Data Base --- Format Version 1 ---
-(lp1
-(imnemosyne.core.mnemosyne_core
-StartTime
-p2
-(dp3
-S'time'
-p4
-F1183141800
-sba(lp5
-(imnemosyne.core.mnemosyne_core
-Category
-p6
-(dp7
-S'active'
-p8
-I01
-sS'name'
-p9
-V
-p10
-sba(imnemosyne.core.mnemosyne_core
-Category
-p11
-(dp12
-S'active'
-p13
-I01
-sS'name'
-p14
-Vfoo
-p15
-sba(imnemosyne.core.mnemosyne_core
-Category
-p16
-(dp17
-g13
-I01
-sg14
-Vbaz, quux
-p18
-sbaa(lp19
-(imnemosyne.core.mnemosyne_core
-Item
-p20
-(dp21
-S'a'
-Vbar
-p22
-sS'last_rep'
-p23
-L34L
-sS'ret_reps'
-p24
-I0
-sS'cat'
-p25
-g16
-sS'q'
-Vfoo
-p26
-sS'grade'
-p27
-I0
-sS'acq_reps'
-p28
-I1
-sS'ret_reps_since_lapse'
-p29
-I0
-sS'easiness'
-p30
-F2.5
-sS'lapses'
-p31
-I0
-sS'acq_reps_since_lapse'
-p32
-I1
-sS'next_rep'
-p33
-L34L
-sS'id'
-p34
-S'9f401476'
-p35
-sba(imnemosyne.core.mnemosyne_core
-Item
-p36
-(dp37
-S'a'
-Vfoo
-p38
-sg23
-L34L
-sg24
-I0
-sg25
-g6
-sS'q'
-Vbar
-p39
-sg27
-I0
-sg28
-I1
-sg29
-I0
-sg30
-F2.5
-sg31
-I0
-sg32
-I1
-sg33
-L34L
-sg34
-S'a869958d'
-p40
-sba(imnemosyne.core.mnemosyne_core
-Item
-p41
-(dp42
-S'a'
-Vquux
-p43
-sg23
-L34L
-sg24
-I0
-sg25
-g11
-sS'q'
-Vbaz
-p44
-sg27
-I5
-sg28
-I2
-sg29
-I0
-sg30
-F2.5
-sg31
-I0
-sg32
-I2
-sg33
-L35L
-sg34
-S'74651aa3'
-p45
-sba(imnemosyne.core.mnemosyne_core
-Item
-p46
-(dp47
-S'a'
-Vbaz
-p48
-sg23
-L34L
-sg24
-I0
-sg25
-g6
-sS'q'
-Vquux
-p49
-sg27
-I0
-sg28
-I1
-sg29
-I0
-sg30
-F2.5
-sg31
-I0
-sg32
-I1
-sg33
-L34L
-sg34
-S'0bd8f10b'
-p50
-sba(imnemosyne.core.mnemosyne_core
-Item
-p51
-(dp52
-S'a'
-Vcard
-p53
-sg23
-L34L
-sg24
-I0
-sg25
-g11
-sS'q'
-Vonesided
-p54
-sg27
-I0
-sg28
-I1
-sg29
-I0
-sg30
-F2.5
-sg31
-I0
-sg32
-I1
-sg33
-L34L
-sg34
-S'bb503cf1'
-p55
-sbaa.
\ No newline at end of file
diff --git a/tests/importing/test10-2.anki b/tests/importing/test10-2.anki
deleted file mode 100644
index 9ba69ece0ab6ec22eccbd0c1f9d49ed118c45f2b..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 180224
zcmeI54|E(?eeY*>NBhrOjsIq?IKrb9inNLB?5-r+N)WMnyY2nRFT8Oig!
z+L3p5BgsAR-a2Xi(Lhru=g`mt0eVsbw1plDv=rz$Bx!-9IDO$y^3F5w@i+|xH_dqg
z`rzgD{qCKe)y)2r0}eROcP#JD-20o~{oT*Kb7$_IyZ3kaEeDLEuFRT+d9|nv3Tp*X
z6!s{JAP6G;AE5uv_6Y4rf-s>}0$BWNn<#AipIh2U$|wJf$zCS^QvR9vC##)?c!z*=
z_YG1o*4-_>=fCrbI$SCoZh`7=p2^*R+J~5rS
zF*Bu1Oin8kM-ChqP$FTopc#2J*Dx+V&r%rI;tgZ->b$N@XO5CCmp)CmvIS$IXqfr>
zNHtqDZr7DJPfi}lj81sYJ6WlEbqdvH5jj@`%gISys|iu-LgFV&y2Wf8&yVJl7-Zx)q&DVI|;eO4{yib`rgDK0PQ6|theMDevUeSNIN
z4o!_89G$vVxjA#I(r;+I6qS9GQ7c6%DHfk^8*;1jP=Zn1YaWxIRVq44k>;yFnu8W0LmctYrZ)I`(I5lVE`J!I9
zUCkXd=$dGeJ+H93)LMAnIDf@%+12#wsjfk0T*`HA5DBw;`IuSCw=jaF
zX6gQVK2nU)gt>UWqK8d0zq)KezkPKfhJV#;FilQnXbzq7dp-?wO>@<}eZFJT7mRgw
zigzvZ_StT`?N7g_g|@?5G|_R^hSxNW7pU1Ts&iKTBGT$DgeJs}$ncIkyAmS5
zYU#<=(W%za>DJMMt)qvUL@!gTG1tnt
zsz5XIsshantO_(Uu`1BaNa{M%HPy<>6^UkMu1GYqb48+=p(_$iEOnk|sdpJ#OCLcg
zN}rN^l^}mnF31zKgAD{g00ck)1V8`;KmY_l00ck)1YXw!;?iaz+#PDLrIV@Dwq$x+
zDi!yyDwE9kHVb067z~PaTRNHC#T5;SyMM9x$vwo|`L@x!Ub$z)W@?02qK5VV#prVa
z{b2(E5C8!X009sH0T2KI5C8!XxC98?8GYf$_n-OdlYji)<){!22D`fM9*G8hq0uux
zdGzGB?|-oJ{dYI13u<1sbmx2exUSI5Lz*i*r_kKOoTeC-qLzv#y=ykIH0N?znbY$$
z!GX<9oLP3}SPn$gyrx*k%|%Nox>cA3eylGo&?FQ#%WAu_kHQvdIx>?~ixIV;)3hv_
zj?Q#1(u}mCo~MZ<%5j5cQK)%UA@LJxQ9Z%-St^>D?<+>~IvJokwhk!t(w=5d<&}j}
zk*CN~U}~5(E2&3#o(8xcnk-6lls(q+4A11${E6+#sH0n1G;+BJ`{2{Zyf9{T7Y~}2
zT*+A_9}zOZ%u~J^WoCOMGGWqOd`5wrQPu3IWOFmkS!JGPwX*V{lAt+bkp;alt7nUL
zCJ1?n3W?0f9;ewztWb0^LswLdmm!-_W^Ld7ExStlj`IG${3inaVFLjW009sH0T2KI
z5C8!X009sH0TB3Y5KzTlAsCE`0U=~h7x4K)z9?;>kSKPX>{pTZ{
z&-Q%x#jDT$+5F}W_sh$>pPoE>o%YtQ-QTyZ_qPSZzN5aQ`wnf~=sWtwzx4ZjtpD%p
zI3>s<^nncoKmY_l00ck)1V8`;KmY_l00b^Lf%4G%zWx}Wk#On_LO9S<3MWP>kx)$B
z;{0c6hNDhzc1q8hb8G<$Ge1p>d9gSA1x1}Pai2E_swWAFFbRD=fEs9NgHk)4fd
zt{PeH$&;N8*8lf+oD<}Sz^1
z%y28Sxl<&?j^33~jZyw%17_CBsE7L(6gR8+|729O@2{w5{eMY(NDv>Ao(O!neK*$T
zzsRaN7Bi(4X>FGU@s)*7v6$&<>!mhZBlOZ+%+y(&Ka#*w&h~|J=`7_8=LyZ-J$U^g
zzsMYW-lrI~Dge#@zzh&Zn3!
zs3tC>Yyo(u`t5nAV*USi;S+-J3Ev~q4buM!>{#>d_6MR<(Z`}c@A#VhHsPrM@GI{T
zOXZu_1!BFu;%&AEj*~P;LqEnGZlo;N{zk7q4p&TnaV9rlXiEd^oJ6^MZSanFwFP56
zJ>uPCTqR$(f%~GeT)P2}!niBuqH%@S+yP5LKUp#gv|>XeH+k_=-XI@#josE&90@f(
zk(8cl=#I^asPUu59o^ZdG`VV-8@7fTorpS(j^7EVtHxRfuTK~5T!(z;c$~U89(TA`
zmpj%4xL23EM7siz0~CunKC;4|Lgzs(yt-69wl#Ft@~XP)iL-v0ek5Nsa;@EQG2dy-
z6H<*YpidQPXsV0bxH_9LDW
zqUTL2wMrDfE_sM6vmVDfcbZ*&h;8LI?NdzbAZ^=^%lDv
z9lwJuTMvyYOjEyH)VMQ6d)b|&g%GPf^ctas1Xr_%Pp};mQryy&v_o*4%Nl_rA=8o3*~b;(fWI
z(vt&jlVkb~Yl5-9KJoN*{Jv~I0#nfbz
zJ8a*;4I-H@Ve-;ym`&V|*IrqXM0t}OjBVN^o{sX&FkhMM|9Ve0xChPt2zf)Vy}Sn^
z6h~9>-EN?{>s(rkFK=eaG2%d^djGCH*;(*VNpqojwXm#Ep|H!4cUxIiIK7G6#cKI9
z+2wq1@Yv^s66c0CviR{P#y1SN3fTr(rHpuNWc`1i{2@X9Hho|N0T2KI5C8!X009sH
z0T2KI5C8!X_>B`tzUOm)`T+f0;RAPvgaEAr7z{Xb{+YLiLUzvr`(AwczJH_d$D%La
z_vsT4pQVNWwoEJMzVi)s=7aP0nIXY*=C_{Q$Td9j(aTx?Us(Ga&l(a00T2KI5C8!X
z009sH0T2KI5C8!X_zeQr2?&8eXv4E_zm>gnpMUS{6Wru{2(>R|nU
zf7BG@bjRE13mXW400@8p2!H?xfB*=900@A5s0I-1lS%AOE|
z}k{-=6O22SX+Ov!iET
z_{ayxU)pacebt?Nq;InAi9M1HW%n-@Ke>l?oo`3JeDlZeewnRZ|KK|+nGI8BLm`%3
zo<;WuWnZjjw(qK8S7y)so}Kc!Prkw&)QGoYZyBvz>|FH-f)eX_2oiC3a)ag5wFh`jU80P?OZ)|?v-`0Loh<>U4;r92npQc@G
zAOHd&00JNY0w4eaAOHd&aA63@ZT_xJ3+kNCoJ0>LM|Q}Pzk6Ly*R#j>sYcF9?O5;k
zCuFN+E$Dge01MZx)ams;sq4&Ygq9DJDxkig)dBPk<*`QaO|!9!`%8
z4GxWr>`V=&xoF)aYo>NrDm6H~Gfim^?o191jU)%TY~B2ACc9&J=TLHRcw}TKHN0zR
zWN_CYH&-3X4h;``%}wu0r&H-2LnA{;${C$wi>7y`RWjU}*{{amdFQAob=|X=wJZ*0
z`|Nye(#r47n)%|kS#{pXEngeI*~}g{l!Hb-d)$m)$D=GNg==@F7nZJjXM_~g;%lkk
zospjG{K?uZ)SNO}Fw~qov2>CWBma74;o6=g->zf6_qM4yV=jMfR%gy|$rz@(Hp>@I
zwfgE=t-eZDZ_m!p)LT8HW>3r&%u-&vwr7@atRP9W-in5z5xT0|`t>R((w=%lsTkDsE9&t*~nsa%h
zXqb7Q?309U@le6kN?9Hd6(yldyh$a85W1!NSpPpDf5h(ppOL>Re})dQfdB}A00@8p
z2!H?xfB*=900@8p2>eC~$bmK?Ac`j*K1<8?Z<+S_qHPoq4m$y|#N+pGJ;&nT;rB)T
zJl+YAeLVh!rH`>?|JPHz$m5*=*8jgex>=B)lb@0QT)soTO}<|4?D)5iGaaAk_&|r%
zaj;`s^rzA1qW>-Wjp!GnpN>8heNXgM^tR}m=p<|)00JNY0w4eaAOHd&00JOz2@vSH
zTJ%`%pb**o^vK~vLnRNK$Wm2yoGm&agWRnyVWKGYWuu_qYY@bJ`*KUw{XrCF#4YCI1
z^-{MWXVpR_ARSr9#OHO*P?KxcN}Wz>DOz)}?_jK;-)`uO`wgpT7M87(Iisi_Q%gnD
z$ZPu2F;DEVR7#+hSHxEixwj`&e;#LEoz`4=HIS^~R{wH5g+SOsjKD#MwPo
zROiMhn5ZbJ?slmwwZVyUOR+ExcHJ7rfm`mNxiTWfhPN^aN3nHGE9r*{hFLI*%Y&qu
zbStT@uoSzR+pr?Xh?xa0(if6qJ8RS!Y{@|?wY|EPwe^s$?jY%@5v24+S42`>fyPOV
zYfMY(%C>ro7b@oNE$L25se~l;j&L4Mp;wqVhaBU$Mwz?HFLjM=V3pw(wP4kinGnMce*M+w+0z1Md$8gXQ4AhbBTF3x~tG@H3IO
zM!vR2S#$fEbL}^^zdza={hN-PI-ZtGbo%*j`}+g2o=xH$A7f%85#_FJ!A*%osJzSu
zqiE2H4HH>YYF;zv`B6D~O>k3BPpHgCtJLgCE-;}lx^~B=GNaQO<(Bcx?CdpEBO#Cm(h+luN;POpr>u#vEodw*laqUq5$AIK=~nEv8SZotr%2G}`?@;lZC
zW3iZc$5yV64|ws9O&$e2Na>G+4NV!Jn9kgonNlVur*I$tc>Zo$M<5M49DG3ah7vCWL%3k%uSpIQP}|Mv@x&8
z8{}!<ox{seSP9x!`x)gNT}yvv)5=(
zoNYC$aLtS^lCceJjptOWI9?f&Y?PUJp4_Fh63|tvaiJ3>b-7uR`}iPf(*&DG7xV?I
zk<#LEBY(ok&owKQ__A(|k>`?P<222tah^&RuOJ>!7*DEjBbx%TgMFgFo0z&GQcoEj
zOL-1EPhJ*`jr5A;bx!ldhuSN(^%y%CC!awpgY%bH%71)fZ|0~Hr}oSpTR9=FOin0q
zcLY@peoPf3VZQL!amQ9h&+TGxN3deBmBkZFTY|AS^@-(9Coi7&GROGQxaJAmT9>LP
z+BwHg*AvAOu4s%WN>>D9!!faZwUaV8(fw+J2PmG@sA%fM6l#xoV5Fbj+6L@<+JKTZ
z=Vy$(uF(x{pn6kFOm|Ubh^>^Jt&SC)S>XHKjT5vb>e8}<6NleGH6kXSbn@bPMZ^!{
zwQ4;gOfL^mDtXY<<%G617#r*o%WaNUX5Vz>6~-9tJI6h>x}6B;^^;>ZJDE~Y=f*YS
z!<*=Yssv*@sT@Qn6@Cjk=Dr#7kT=#QQZL7n;h{>u`}9Pv+pgC^PZCbOwl5ogUL~lv
zVNzcCYFd?GrJrjU*AnlF+ZnpfNsVRHI_7@a#Y3*FO{HGW;l`l3t5(e;lB1eGCnb*e
z1X&F$Z}Fy2&r$Raji=GqIAvSnJkuoM#9QAOh^4x`b(z->mdbdwdh|C+&L!uhXx9$=
z+{AQmv+KlGY$Y0LwJL4Dqmsbc#Y=Z70+=XW+3=!wc+#cH0bSKtw@a*=K+Q$(hB6e&nBHeS%r?3aD#itn#{cD+>7|zbp8>9{H2a9{D|$
z+tqHC*I#8DWV$;klR1qZAGl$(>o&+0d|n2nni=F2W2Fz`xxmWNt6d!jP)VX9mt8!?&
z3zgGjrAp-5bz9~NJ}&wfB*=900@8p2!H?xfB*=900>-U1ony1Krj>(X^wt47!Enh0gj&e$)iix
zKKiLW-O~?-O8RG|-O2y^t$*14`D^w_b_VuH-(=epyMM9x$vqSlq5q=%yg+~0KmY_l
z00ck)1V8`;KmY_l00ck)1YY|DHcEkD)o*|^P{8{C!rIq<=8yvr009sH0T2KI5C8!X
z009sH0T2Lz<^)*(KOo;D$Ul+4D}PP?OZh`|fDHsd00ck)1V8`;KmY_l00ck)1VG?b
zA+W(82&APSY~FCcynOUS-}t+~`f0h>9k%1j|M>AWKkR*mg?;{kEllpzRqNjQ^WXW|
zk4$&iGn*d>p80|DI1794>=Rty@>6fU>AyaAPsSZ~W#5kr@4Dg1U)ufuPYLo%@^kX{
zG`v;XWxD+ox5e;>HfRJoB;!On7jWk%KuxSKWrcX
z0w4eaAOHd&00JNY0w4eaAOHfF4uN)mKRoTA*M5{}X*-0|5{K0T2KI5C8!X009sH0T2KI5V&XvYz~Ow
zpcwFx7r)1%FW>iR-~O%V*!2H*xFQ}cJaz9W-wR70V=wyG#{yw?#^{+BKJvlwm-hQk
zJbad&Fm3n$zaq%b%TLQ+xo9#7LqGroKmY_l00ck)1V8`;KmY_l00dSDY?8!az_&-T
zL!40nZ>Tc_uu%#J1AC-zvh4|X|6h{-UXWju|A%~rcCdi}2!H?xfB*=900@8p2!H?x
zfB*;FGO>;K6=l)oeY
zgZxGLk&7>rum}V|00ck)1V8`;KmY_l00ck)1YR!$IwUb5`khz&l6~mB+ox_l#a3ta
z2cl9qAUf~(+xU@lroWxy!p@1T|1Ye4y)Z;RK>!3m00ck)1V8`;KmY_l00cnb^+^Ez
z|JP@^BiA4R0w4eaAOHd&00JNY0w4eauP*|u|L>O{W9$EahrY0Z00@8p2!H?xfB*=9
z00@8p2!H?xT(kt%l0N`36!2C308|5;_w<9IlKxrew*hwlAO8O?+M&-I5nn?f#Xe{Q~ZTHg@22
z_$Bv&pB*@31im$X@A`j!`746_WBFQ{BDLre><&C0Y=KUT1Zt+mT)Jj=)NWcH@3g4tIsCnJe|3CLLN67#H

diff --git a/tests/importing/test10-3.anki b/tests/importing/test10-3.anki
deleted file mode 100644
index c7e770b365af49505479206eb611d8d59b3a98e9..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 180224
zcmeI5e{37)dB^WaiWF&6=U)mVJBm-IDi&j@AthO|ou(-*Q?X`QawIuRnquCGd?g*k
zAM)MNGSjAm>@{iAp;))9-BMr}wm-IREsC^5(6s~B7FmI`*pCz_)^5wzr9(1o1=6Bf
zSFG5u_kHhZcNzr}YIXR!tE#~FL+^LzothhyrJeY`E
zA=5=go}OLIKaii7XXh5>*$Xo>Lo$^XtrQovGEwBInYkR5x#aTH+gQdVkYmZco%>BM)LYwW2@AWwl@iTFhdft5VcK8U>xGv|fEGpj<6#%bKsq
zR96gnG5@f)#;STztEeSkD=*cEVQSS%Yb>>5Y!JOQd|lV7x@OjWlhZ0D(J#}yTHB3M
zQVnz3mislVVvPhvJ6-c_ii~ho4~jI!rG7an%bISOIbJ|QyFsh5I$1Y-3ux2|1!5T5
zW#TJV)XPLyS4dFjQFL-yBO8I@dbL(r$*Ts*1a!`ZLUucak?j;lw^JAkP@scOO(*TF
zY-$9V+0+QKv#AkeXj3D|(mF9FDN!?^AeCx6sj;pW$c2ijmHb)=t;v#V(sn?dnOgVN
z&-q^(rE
zu`TECn{89nnrTxOkfo*IlOY=%e6w=|t(iHJfUM}`$b8Tq-F#Ets?eGP)5!5rA~1ob
zxHY<3Dpfb~<#n@eX$4d=^97DVOS9X)w>zD~({p)Xa|#xY@oA$q1>>&3o`Sj0Bi?@N
z7Fkm;77{dFd1JFiJ0*|FJU`pZT)iCPQ?L)5lb6Go*wZD
ztEpJc$a)QV&^rw*n0=8H)?BTHmG>qRzX7kmOefTI-r_SsX0@kPR($4xidNA~O)V{0
z8h*pPL0xOXe=y&+el12Vb!(&Z)N7H@b>*%u`jUtx>8mLHwLt&l009sH0T2KI5C8!X
z009sH0T2KI5ZDm}2LIs8Fa6*rU-_p$es;GIkBO;NG8q#)bKf9q8&?ir4WCjCt-xRI
z|Fhp=$20G+*8eXF(n~v{fG`pSKmY_l00ck)1V8`;KmY_l00cl_D}kOLe*Bq_(%S)_
ze!f$P#>8Z&@Idsn!|R{9`&o8N;L0ms`hJ+L|LK?haex2_fB*=900@8p2!H?xfB*=9
z00`W|1lamNBq@UQQTl=d1V8`;KmY_l00ck)1V8`;KmY{ZvINM-KlMNSJN2KvQ;0_o
zB|Cc$Qx=^m@sRV!(8QC@N9*&1KDWsr1B0j3!dluG7@X4SQ&rMNPNzTqKhU4yhu_5%
zKgfUo?eD+HzQRwl>!jJm5BIjsZmOzp(C0KXvT^eD%fXYkm7Vqd|N8{#K2{1GAOHd&
z00JNY0w4eaAOHd&00JNY0xbkKk3aQ^Z?iAhfBE1MAQZ5YZqk$0T2KI5C8!X009sH0T2KI5C8#-fc5+T&k54ErO$y0
z2!H?xfB*=900@8p2!H?xfB*=9z|ReVJO1?7KK(4;lb6Go*wauK2u#Pk#e<2N{00snw-z)7W49A?$k_Pw!-AWL|iM%)3b~D2lDgs?A)R}
zdtqi~NKV9+A}Op*7Zpdy!U{T3O;VJnX6ABK)1^(5)0Q<~k*Tg2@?!pBZ-k0kCas}G
zVia_3-PEd;R^c+uyrvVM#OUv53R-2w$1X*yXeNC?$AYDy_^24wnqDAIF+HU^KR*PaW}~c2&eH(N#wt<8NYRDGV^VP%*WVA3=rw
zyvdxTPb+bm!cc9CuNkieVN;}dFcxSw7Diqf9_$VdPXwo-)9Op4H}46@QUe3xRjYe&
zz^6%x&_?Y(k9evtG
zb%k5TQuPQIw8v=W+J3V|I$14Ir=rLY<>n{Pn^nTjiT=Q`DMi
zQx=e=rQnkx>tDXvIfB;A97#Y{q^Ow>+L(P)-m1`=1JlR_lkkdLqpPJ-bt7M1H|tg+
z0hMgl*NLOhGA{Z0tT*Uoo;ujU-TTw~8-3Q`ek0`#!6%sUhC_1^Uk&Qrc
zy;`fRN?G#3~QyAS&VJtv_I(#~jYiDIsBgo9AMv$FNjUYpt8bOxU
zi7`oungIo=RNG0nnKkL8q_T05PC!&T&a}26MKgH2^&2;t6Z3CCg=%?8tB@j(>es}!
zcX77k37C|y;m$jWar+`qq^6fL=w!s6f<L#DQf*VpuJcZ>#cq*hF%P*pCOb(5+RWGgp+zCI7`f2#
z8;K`(-M?Ge{hHJzJu3Z;@HOEsK@kq@k8SqtkEMEh#jD+ofohSi)>z@$`)!^QIiYmK
zqH{{-r&owBSE{C5sg+7Y2@9i-fGz;1{UR88!AF~p4rNuZ%WI_08cce#dtWTo-7Q{C
zavRxdlKpjiY-1roc2SvZk}K3q+f=zwEzzDwXh4BF%ArJKK-yX;x@cQgYb8@2;YuF6
zuD!oAmg?^ppPb}&(Zz4co|(H&;~p#NsL@dRxtW^>uc~!&six_4O>g_3w_b_8BWJCm
z(aDbgq_R>g6J0AfsJsy=#>aeT#EzhKD(gtv%(Z<}WK(1&Xi0HRhwQ8@x>$yFCrv3cKN{ZL^yMdCPVaQaX90^S~RJ*uT+H&ts;C+Ln&a$Gs2f
z3BD-!P0L^K+d=##=|N}TCJMYQcSE!Kwzt*TV{O^}HovKYQmE-VshIqhgR3(5ir1)D
z3bXV!o@J
zDYD61#upwJWrIZ{egOE`v~Jk*f8RP;)mB!|+iNI`z1Q8)$w50YT_d*<8#QB{REpH=
zxAY*TLN+F89nR@Rb_2!NHe01}yVX}|S=CCU$dWq8p!(|A2^r^!ewxjWeU%Khkg8VZ
z$@+{|)`BMKwia75wNtB^g}LQrgV1Dr8YY|Fw43WQ=h0ry$~;@$*j{O&TCKFxqAS35
zVn(jbl7+=?Uy0if6hnez(V`IR&9RzX>Y`eg
zPqPot+@E;`-r`Yy%6m9P%`kQ9PlDUC(o~_&kFKqK-6Qn`?e4MI;-DKWwm9hSiY*Sh
zwPK3{@2lW8mGe1k^Cnaf%ce#E&ejfLegFRpg7lK~g*WAcW+Vy%AOHd&00JNY0w4ea
zAOHd&00JQJQxNF+VKi1jdwN9ptQ4F?E-00@8p2!H?xfB*=900@8p2!O!Ph=3Bl
zU5NM5pZ>QTBbjV=I5RSw%_yN@oy-ZYGpdNU3!MS!I67?oe=vDTkp4~jw)C9z>(aWk
zB+W`0X;;_xy1v!*^{&6@`druVbv@m6t?SXQ<6Tnn`^guRpG93ty$Ur*Lzzebu{JGV|$KPb*|9c?Ju4#a`BM^k<`SNT-avkI^8V!
zG%*T|Z2I=obhao`b8;?~yY{tOyl!ttd%8v%NlAMneO6)5@troJ}OzITvSl?T+-&XgrPVzFm=YRMV>B>$RqvnNY9B@abqiXrj)&-GtzUkMTu`EO{M!{ELY0}r!IF#BZmaZ-WoG=
zXGbJ8oZ?$fQWN<5MYfwH-Mf~?m$Pq+M0!Vg^)1q^>g}3lrx)q=^pmtfaIL;@q&K^t
zRq|FX7RF1H3`Ke^N)M^Jk-1HbbocM4X|Oce8+#d-vuyp}k=!SA-IF|%9831oQyd@w
z0w4eaAOHd&00JNY0wC}fAs}^xd%M@w6|$f`M#eHH#-&KOZ*Pf^!s?W&m5l88zHm4#
z88u^_REje!oEX{7B3*A?yKj|TnN(@OdzFb}nTg{kP8=T{KX#mcY&$xhols87%J}io
z?D$f4c_O1OkElyoWk^jzOm+F#ST=jCk?aUdc9dp4a$Wf+-s66=CE4QRw^e8gxzhp_wfWvdaJ27
zFKPA7vs!(VtbTi;ywqy-l3G|>(W|ve@nruLKPWq<+G>TbYJ&Fa4(aoP^t$w_^ey@u
zfWLf;baNCF1V8`;KmY_l00ck)1V8`;KmY`G0D;}%UeUQp)v+&}7QOeQ!d>A$v3bub
z!qlC6Q=whq-nerIhkgDZl|I4N|F1~jl)fVU3BAAp0w4eaAOHd&00JNY0w4eaAOHd&
z@UtS&6^#go#RcnoCZrhZ?uZD(;zLBQpV3xUogaQg_C_MYfaneiOW}ynAD^jiI7+)i
z5#f+HRn<4>&jUgUF(RbIQ>vjALTvpXlD;EIKcFu-KmY_l00ck)1V8`;KmY_l00ck)
z1a>3=DbyiEMe#u8g~RKg$sG)Zk|GU=$DIJS{ulP_$fCh$5C8!X009sH0T2KI5C8!X
z009sHf$Jo|e*cg4|8>HG8VG;@2!H?xfB*=900@8p2!H?x>`(&i^Z!0!UFdkK+u431hq}!yAb?lEk7b%7R
zAv_g&E_77SKlq7#{Iq9ETVkH~7JC~F4YV1iy{6IH!Pba|;RJ3CLv
zMY@f}kWBZg;AfkiZ@27ulap3UCjSa&+zxmjnX(5)*tnt58D
zYT0|`HpnZvE}OE~$;jFnV@Re9wTfEW>>P5Ta`L61U8jDQv8uS9s=HpZXw_{dkGfis
zlRs@mu2Vl#sK!k-#n~EVbKl*TJymzr(Kh1PLr?wex!zWEy=WYJtft+Swv1X~MlI84
z*ODS{Qm#|CQLfz1)T7?v8D>yS4P4#z+D_W&UuDG@#VcU@m
z*_K?Goqq5_UY?#koqt$%>B!ZJ%=F|z`kH?9NHld%O8gZkCBAtM-O!~(Dr%YVtD^x4
zdR1AnJVYLJVyA0&#Zt#o;^t8&b?#;^Iu3*uga_PhXV5UUMU5rS0~3QDBu+{mIO(%e
zG&L|FK5D8iklNW
zHMUm{+XcpX-|tC9ae@_jFcDYQNL`T^^A9h|vvc(C!pzK&oQU%sZg?=eIzK;sE;s*(
zd^Z1xJji3D6X|>QMpKi$qQJ4)rl~cv%C9dvnOw{|X9qUB_r+4(-Qv|Gw~|#a`|I{t
zXH|MaqfVVZ8F+;{^Gel}E45N-C_!a1dvS&LsZOMhZNh|p~1CqBqsmpL4kX8@HQWJF45uFsRfvT!&bi7nc=T&o+haBFT
zY^xqK+8)&0(bH>pP&5NBG`ywQ`;_r5<)(}7AbD$6ZlSHu@^S~ugY0U$nAUq^sr!zI
zo5@Cb_{MBbZhVil>7r+=w7#wRd88NZD75A0Ep%n(4yHw8iBg=4GHq`uvF2pP
zne$jxou9vLYw~W5MlCrr5oTUhPZ#@QsZlyjXml19iH&v6ZgbA{fM4ovMJERq#^-3$
zMLNB)n!>x4Xe!(5wVlVc{ND&)L$r&OE7dntlE+O0Np^KMoK80T2KI5C8!X009sH0T2KI5CDOlP2l#Z7>|k3
zQ0^OK?aHfN&xM@d|FiXfSbC8?|L9F4y6o)_su|A(Jn_+XyBf7KlpTKzH)!}@;D~8Suhm@KmY_l
z00ck)1V8`;KmY_l00eH30M`FE2oBLe00ck)1V8`;KmY_l00ck)1VCU15y1L?2UQkK
z1pyEM0T2KI5C8!X009sH0T2Lz8zjKi|DyEo0{xEz1V8`;KmY_l00ck)1V8`;KmY_l
zV8;{aj)<{n=v@)3AO|Wh(0u@N2if{RB>k%({gA%k009sH0T2KI5C8!X009sH0T2KI
z5ZJKGDg?SCQ8D_;dtRhp
z`yYP3@ofO>_y4a6(rdR$IZ+M}009sH0T2KI5C8!X009sH0T6i06W9|K<6>ttv}OHI
zzyFVK|G(uM2LONo2!H?xfB*=900@8p2!H?x?05pVvvogx{9h>d4YGFS)vo899|KsQ
z|6~2X<7*fN009sH0T2KI5C8!X009sH0T2KIL1?@F7xn-V2!H?xfB*=900@8p2!H?x
zfB*=9z^zPxef}Sn{!ozqOZpG#@1?Ive@-uOfB*=900@8p2!H?xfB*=900@8p2>gE#
zI1nBdA0m4FjJC3RUe~I+X4X5B;ltuW%~&UuA}L0taKAWH-EgGgJ>db-tsdDGJ|s?6
S^$k@ohC0Kk_$k%U3jYT@5{<(E

diff --git a/tests/importing/test10.anki b/tests/importing/test10.anki
deleted file mode 100644
index 8280631882ea2c3c6917fb511b64a69ad5152c93..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 180224
zcmeI54{#gRoyT`2S+W$%o4+U~xLM1DSl|RnwqqM8)O8dvBzEdJ9HE_twRY`nBTJ4}
zB5^wOP~e(U3X~~BheEGM+iU5c>A~IfKgYClw6wjJX}HpV%$45JUjIS(aF@vR7#y1dE?HM8eNQtsy(5gsjFk-Q>jZ+lj``y
zlsdk5=gwX=6wKvyGpl9l#zp5?3S)Y-ZfsVYGt{ZnRiw+IPdBV|-kdL(xomBumM)mr
z8tNM-CU&Mq#$D$fDp$Q4g<7MCj3a{Oa|Jb9%w$yEn9+)vf|}@63k&l`S*&0jq4;W<&6`pHkWDIOs_Ta
z7Hhxt`ip3~n9m#80`GhrT?4P!)y01Re_1T&bs8f
z^{y&vU$3i&w!@lq(NWfiS9Oic)NL2ES*y1Byofuo8M=xT^F|$ImfgUb_7i5-ESOrR
z(iR)`DHh%Sq>UF-h@%tZd!{Bws0FKKo-+rd?rUxtv#UtUmruGzhhp!H$^44@o&+yZ
ztDqGu{>S4gPaf2yIN3cyot`8V)Tsm6&ThD!_rG~#-l{7d)#$RS%`TxAruuIhhfm&H
z>!wx6Qy_Y+Xrz0=_W)^(i|f
z}svECF6<2mUwbYA`$hT
zR3<*?Sue?5a=k4Xj}LK0{q<}ZlpCEWH9wJ!|K;#6CHfy52!H?xfB*=900@8p2!H?x
zfWWJOz>VQ&e|^h|uRQvJ+ZMu7Fc9eMJU$!_cv?qJ{N`(izJJTz<==n4T$|UjhGmG~
zsRM>eD-UU{@T^K}3p2WETB=qo-U5Xu^4fF^A1Rq3HUt)9xN^Th&Bk)^=YFl!c55Ai(pay_(I
zl-4M_tmPS=&S=?#Th$SvTRm)MG9mVdFCTNmm^K_dXjyVOXXSi^$bejy@>MT0TSKAo
z9IeG?6u21`%|azxo0*$Y=V(T8+dC#UL{b
zRnvJHvIS+<)=R!?S82~xeEhF`P@?~_fdB}A00@8p2!H?xfB*=900@8p2)rBwGX01&~ugNsvW!6uJv5?#ru68zW(3SepFJ1=?^v#009sH0T2KI
z5C8!X009sH0T5Vr0;T@h)SV+c8-&!XLv5Gf+x(|lHOI$H>5H_s%Yx|Q
zhfnb_)02HKwb3_1Uwt1l6(8ph#qcR-yP>>#KIIJO360%7xcwo(c>l|PPLFnIk8gL%D@fpXbD8RyXE7fTqXZ*1NTK`
zId=Ung;7V$Y2ym7u>+R8aj0nK>5C2Z+~h@zS(AL&)puK0b|h5)L{fURt~)jnQRPRC
zJG!$^Y0zq!8@9R{MMRB8;dep|)mZP~_UXc%>yYmpms1zvaff?#xnY%`dv$rIY*zqs
zfMPMnM^?~P=sc*2SC@*%wz|$*PN}<|C>xgI)7%{w^PR>#AyxSjdg&B7cc+J)
z#guBi>?}sP3x$6{r0mPd831_9u2%Vy$qQ%R3Qr=GV}ZW^?No
z`=CY{Z03oR+~O7MINLmVO=zC|+VdkAh)0SCA?D0bC195
zS8-b{qHTi@8uh%dF5UywRIDgFgFGIdANEH^J7vivH>*d=BP{myIDWieB(gst_EwfI
zZx2K^Y>Ix>y$ub-8%VLm}iFh%4GjmyRyMOX!eB28+vu~?hR2KEyZ`bf#$Ar>05l+
zT$&ss_J%6g@9LAq2M^UG7n;-Z3n~=~YleK-%BsS#b=)pi%csaL@w>)lp9m$+b!}wv
zV-1Y28*b&(b+SqsaoNbm{~qPjlJb4}gAD{g00ck)1V8`;KmY_l00ck)1VG?rC!pT?
z*}uP?USGKLc&p^6?*g_;;vE3yy&<1|!#w|;xV~R~<_G`EB=4T9EBW0=*K)~Ee)vo_
z{+Cw1>{){WAOHd&00JNY0w4eaAOHd&00JNY0;iAw8~^*2q@+BlJgD5IT&wJ*18g7w
z0w4eaAOHd&00JNY0w4eaAn=+e(D4sXe(Cn7e|G1$Zu3iie{jtcZ{0`E_BPBVeNSbW`S*YE{`bCn9}|(oXFVRHs56d#{`L(_H5$8`RNv0hY%1t
zrnP*>2OfU!4Nu?te;>P=iL_RdxozexjICJ0#{a&yZ%9hAT@T+$JJ>(~1V8`;KmY_l00ck)1VG?4Cr}#w
z?63ax99!}{wVB3CY;|<{Ah|~mkmq#z6npg+lmlY4^wEd+@wrO}Tb|10%%@(UiB;mwGG8|O
zw`Yl5h~G*?Y4p}_|1HbShy6<|8!=3EmCcE7e3R$q#5bNjsd+RA7r@=cSX(wUkQSnq
zvN`e1YeX)@ubmr;D2@K%CwDx+abn>a&s~htHf)|W-;@&
z(U0A9ROCYZRwA(e|1|H|U>*p700@8p2!H?xfB*=900@8p2rL5ujQ^JbB2<6?2!H?x
zfB*=900@8p2!H?xfWT=^fQ|oK+73zK2ird0_P(}bw2KV{KmY_l00ck)1V8`;KmY`m
zhk(-J?OZpn%^J)}bbow!o8t3!t;!fi`oIp&%vgzStG(WsVim1!->RnW$nEd?Ny^&l6`&g
z{^8`%z_#I`;eo-yzJUajB_G8@+nnq4NtRUhCkFoN`9z*rLh7q(+8|7Z9}QbvKU4zRq_YPFog-vUO&zI%)OoGr4SG%ZxT>W)?1p
zUXe>5Fx6dVHhmx$y^u#)R0gUYINvU0zW26h8FMy!LE2!>aLE{^x;o3tr&>F8R%@ro>aFRy=~}C&we-Q+
ze6E<)FX*1(8!Jc>t+k@AXoy;Mi}D3Y`JM6$<%h}_l&>vMGlv!s009sH0T2KI5C8!X
z009sH0T2LzmrkI~+bN4#w3gN0nCzY~^R|1t*
z0w4eaAOHd&00JNY0w4eaAOHfVI{`NS_bCra%5%z3l}Bj@8wh{^2!H?xfB*=900@8p
z2!H?xfWT=+pxx(_Hp+YKcV!ISv(oF6y5&hDotw>?1v8iRC?22GCGXDX^kSL^gk_)9
zDPOLU-wMMrJZ${ms$6f6|4%4iQ65nKPPv@|uz>&wfB*=900@8p2!H?xfB*=900=A_
z0j0G?^2_qUk3UIn8#hdOJmCNZ1cM?#@$>l0Ha^4Rukm`qEj(TXC_WzF8GoMQqkcNy
z%i~3W;^Fbn9{C81Urp!BJYEE_@&B3ODM|UI@?+&Q%H4{kOeqPawf$G^Uv2+P`_1h}
zd#XLres=r%_CWag@UO!E9{yJNbK&=g7w9BxAOHd&00JNY0w4eaAOHd&@ERx3eWtHt
zc+HGyWOU0svftjZe|F_`BQvINQujzSR-m
zW=WZ2%B<~1#wZwieAOAgO_IVyDnX%jzKEpgM*5(Y2(4xNJUY2@jjx;bnStCOt4~?&
z>ynhTmM;e+L#vqhoS~aqe8oy%he$24T471&jcZNg@MWe|$mJK}uWR>3wnVIgR$%$%
zzxxXd^Tu{nAm)GaAWrT52txFPpo)Xo!>&t9&f8
zJZB=a=X!meqia}!Ua93R>rlol82hziA!lZF0;RwoTgO{J5)1}2!5@dN4n4d=U2*M-XWA}ryCb|Q{H6BG
z+aFhobox@a%X<8g?sf7kpRZsu5v9&8fpxK1YiWT^DABaiD4WTd)UtYRjvtl6=Lgnx
zcej@KjFXl=#0AEU!;an2$<)YHO1*L{^;fDRSkDCh%^B)c>M9C##4z_Bbx%p>4UJ|E)g3!0
zM%<@FY2J*=N6YNi3J0RKDjANvInQ1usg2w{IkszLa-VudYMakF)6kiiyz>ak<
zX0be-<($WhxH24dMnzf9?T|4&S~oXQ-JS%UGUtqFojmQBm`sgbI?l}Msc4F+lc^o4
z$<+90YL8tr$|m1%;o3lC^Je*-1KecST&L?`z1wJ4oNYC$aE**Ekg;`ajb=2fFjk&=
zte2T+mfWQ@6EHNZexVa3ZJ|+;JNRU1!vq^g=Z$%*p3=erGkeg?&NeEP=z?L5lIM~_
z{WNn8<6M<2T1MQfGM-f7hS&KcyEe-b?_z4ENL^*LKjAv;ICMrJGQ3GHtrFc6pDr)g
z*8S{YjC=+yPOzU@F8{Id?WwEODD`K~*zyTcbz)qNIwPoR@MEeNF*8C*cF3i0B}x9+
z!1c7iet@u{HnCb!o|rAq5~rw%2}=?Sa$3bxCqOJ(98p3
zS9bU#TRP-omBKP7erl&nBPUcgn!~Iz*A9yvIpmpVjVT+dea^P@4{6}Yh`Radk)$4=K3#S$)SjK+#*1tJ3x
zxpbaLnVaZ*guw$;S87x=HDW6D$2>69!!B*T_BE|nP3PvO&8(r*1+KSpQHxD=Qe}uN
zmYuDR6y%3^LMyXxs{9CJl=j7OceQR2As#acv)Rd%f;Ky*6Cd71$22t%8KiQMMJoIfwBLC#
zbRR{}%6J-^>!)l>
zEHOd{j#IftA`(XJi#xv{BDjoOK=*oxKDYL)wbp_0Ja
z#YId+|vIf9pD)(aTZ;`#ql
zr!F7|0w4eaAOHd&00JNY0w4eaAOHfdTml&XzjDhCmV*EYfB*=900@8p2!H?xfB*=9
zz)}f>nBdB#N<$6=KmY_l00ck)1V8`;KmY_l00cnbl}G^N|5swE!DbKu0T2KI5C8!X
z009sH0T2KI5O`q(*!bVCyh~Dkqx?{LSh-)hhYqlT00@8p2!H?xfB*=900@8p2!H?x
zoDu?SyncVu_tg3|wd*$sK*m`JXu4+Zx*E
z3_JeQOFlhw-zN(!?6#TjaD4+SuYY~|p>wWwhV^_o^It6wDv#LX|4&QG@04FEKT^J>
zd|CPQDajy|f&d7B00@8p2!H?xfB*=900@8p2rP*}yLXNMQ1OVE9$<$vxttiuv%_ia
zs2J?qhj}q?u)-Vmi}?b!FXjpG{C`R4Ap`;-00JNY0w4eaAOHd&00JNY0?R^xJ)1x4
z@fba?KjXL0N?*RZHoqmfdB}A00@8p2!H?xfB*=900@A8R(~
zBOjqf{?XO0h!fwu#?u*po<;aw5&Cb>@?5s@8TMMhHSUOWZ}c4e_>(MR${zpUCn>*H
zzNb8--1isFAQA%s5C8!X009sH0T2KI5C8!X009ti5m@Jw1AfmNe0HE1y}LujVBZ}o
z<_6rMVk*HN|9@3do>TrqIiY;jl_(BC00ck)1V8`;KmY_l00ck)1V8`;P7eZ#FW4m~
zS^Rov6Y4Ut@fdKy6+47
zf__=73kdKdp{Ru)2}y4oNd|>hHvY%=|4&VELMsS>00@8p2!H?xfB*=900@8p2%Ihi
zF#bPXN&)ZA5}i9-1D+!5D9|-2!H?xfB*=900@8p2!H?xfWT{%fP2yYV>cZY
zgLZesOJ4sE|9`L13WjWh00@8p2!H?xfB*=900@8p2)t$p*w6nTVekL{TzOo1`EKBWAg-uUfiM
zYmKZB)mp0)vo)qpF0ScyqDi;U&7CSu&iZu9Yc>7i8j%AdYilc{u9pMjWUBPOX`^OM
zt1EhSUFPYsTsI=l>i1E-4j5m|luaukAb^bE~e8r-=QmsttcEKoSq(0)JAHb>+v?GW_ZQX8R)s#l4|RGnsg
zJ1H{HkorlLW;0O8Bqpo-^JMjuzM@;5r5Mq+Vry1;u2M0HB{{>T+ppIQ7F4ZD8mXvj
zRZF7XJ?kl^jJ292>9iRvk-8Kl#X9WmNHG?(#^vpbo;8fx*0ObSX=@=?SX*X;X>tp_
zV}7^Ok2m#4zW&!Yo0IU9C3ST-!0;r1P~h`B4hdJG9z7TZUB!v#&^<@sZBy66uH10v}R;
zN#P}GTB>F8kFc*ic~FNT)mmRAft`V-u1hD`^xiw!u9Ho#-L&eo$!}wR=#iyem)WM(xo$9N_DbwQ=*-jn-cBp+>~f%=%z#)OWo-&U0Wx1(T|>J
zl74oHzaA2w7oVbEI6wddKmY_l00ck)1V8`;KmY_l00iD70&>^BP|x9bz>&=;%19y-cPsk|~ZT6SA-`6mO%)m0|0Dq4$4==>IrC00ck)1V8`;KmY_l00ck)1VG@e
zLg4E3Uw`%4FTeQAi?7WkLp`xrGWDU!WGoz?{7OE5?-x&ZJKu%dRZ}ldN{j4Es?diG
zm)Vn4(q-LRmQ<-yr_UivYE7f(i)#78vBX_##AMG&o03{5l4@R{Po+_*6-U%ExSX`O
z?mnD#ERi@0@eAdCjzp75zT;5RJ90oAzVF!%|(fmT9!Ltkx21CX3ZA
zi9Qpus9O5Uafv@1F0n^>OsT40Ad*b3tkMUw+2dw1Gbd4|3Dq49^kH&lJdL2%%glIr
zn4e$P>62rmX3_@{5;Q4GqE9W{;&1_=Je4&n;ko7Jo?()ZC{^z;e__n{kMI~w!f=8))T%be9zv!
z;d`F`Y&aZd>;JI$KSJVP(+?aV00JNY0w4eaAOHd&00JNY0w4eaKXC%m5C81jpLp?^
zmmb;~4@K!+fq2MW=Cj8h)>9Wh&AzYv@j^)Ym(Tp(qc1-5osWGoNb>$)I{r;2dH?y}
zRM`5TZu*A<1V8`;KmY_l00ck)1V8`;KmY_l;B8F6{`>#$hQ#N@-+dcr6IlZR5C8!X
z009sH0T2KI5C8!X0D-qIfx*W<{_*ef%??DmRe#?=vZp7`tQFk9``=FA|6f?98y3{e
z%cOp6@6boS{tfqncr?{|!8F@EU{azU-1v3}$M&-Ie?&~N{r^8I7U&5M5C8!X009sH
z0T2KI5C8!X009uVaR{XT3AI9si<#nhCR^4jMTIDZLUE!vRy?-K
zyw{H2dgmW}`umUar~6rw@r$p1zUQFvSafHr#zHQyX0sEOf|gMi%XyMjin&UrpyUc<
ztXL?Fxf+d1uQ2(e!!q8!nx=NO7}`b+z~Ha
zE87j_?c-YcqYrd(*UG=~LGD_adtK%q5ua;i#~X87PubVVE4D_=a$R3#8~3*Qkf%HB
zvMqFjeKI$B2X}>duV${-%CmGkUYYCUL39IQ>nshHJ9@`nrdtfpP&e)MOKSDBPB-uE
z#5?;}_uh=WQm0r4m@%pwc_6cvHStC_-J>z2aIm+fyO8cm6V3b@os#SH>
zblhmq%r2DfD9!s6%gg%G@|n7B)OBm!ER
z=H1?PT;IlzwIkB-oF15gimF$M##EhVd;?`1Mdle&KdI7e1`3(>_R-Yk_bGivr@KXW
zl41_W+lsAO<+)15B$iZLTOoD5JfviXrEFTN87zqIIjxb3x>mI$8qD_Co}v!Bt&Wc0
zE7W!QcBB}KS>y6{Mb8>WZEIQTV|{BO#(2wYFimcucg%01$922yj*P5Nk*4S7OEY)O
zN;GgtZt9PGuI*P0+GI?3NZl3T9q>Z}?|>OglYeykI$$2u!MDB4mCk6vHRBp}&z8Dm
z2DBD78j^E0ORskB^jN3M2z#t#;I@Y8=B*t?j(=)y_Uyv^ByAXyyqQwvg6uDt%_!NY
zj#YKHOg0MLvDA?x!qwbnnVck5ViAp(gL5ABm5}U>l37vl5Km_PeGIJwOd)Hm9kKb%
zXNgdiKGU7LVb9PWh{jR_1Hy-=Y}0j8)lT#J#%e-
zovR<)3%|AAbR6s}M78M7_W{+dPA;zLb=qy;J~wx&G&$?1x>nQa-@@gDK(Rbob=^)SZ*_LlbEpO*GvK)aXXE
zX#DY8LL$x9|2slI2!(zS9`F8-9Us|wGTEJ+Pd=OckKQkdKhyPxuX}wQa6x3FknlDT
z-gcZu;XP0K>zOyoZXhh=S~QT2!@FWvkH=!E{riOv&283vlUI5(?z6)V>(rMzjm#}<
ze+}DgN9`={)KKV5axI`OY+dr58$Ry=eFKosOkZ~)Z$_E47(5_LxL;{=l#fnjksZH_+c!K+(hFD1eO@I>VkBL3@I{yAf12$O`HFx@&
zEf4yUyS_%7Z`bp^q4Ug6-KtyX0~$Cl=+^L6XI<&G4__y*JB$8~Yj{>s-L-bd8JtFY
z{7yFmn4*(x($YbQb^k`alLVjiPIIx2nrQc5H=X)V=z5_;qII%r2J$i2%4N19rM+$7
zLR6*qYSblXY2P6MnXb8Xa(T-+@aApVcDGC$cU{}vF>MIo#%_Hnk3Oa|zxVy<^$jleBG47I
zXZY^O69gOq%bCu8U}u!A-|c(a=EbURk@KddT8j5v^cnAs8L}3u!@725
zm|c^uCu6Dpeqm#R!(ey9?^f%k`+bv7i|hv5xxYYy?ptoI0a`|XnO%`iq(^(Bsi}S;
zq|+OeR~*ySHOt_?&%247SKPB*8}AomsRIXus~5PHCS8!S9@_re?X%Gd>@W)mUaz}@
z`}C=~$)@|IYy`7!rw2r`+wb(|rf#&kIn~@u6X}h0PiZWn7Z3)C+6d#}$vH@|M&gBieoxCu8b2};ywj%|Ux{_hWo$zGOu
z93TJ!AOHd&00JNY0w4eaAOHe4B7qI^-(UGF`Ud%T{`AQq`c67?Ikz!+p1PcmQE&9r
z@qF{qBgEbJ;75-i;qSq7KXbo4JLr6$elL9-s3w)^69DEda%Dxyjg4y+r956)R4Qt|
zBH#M@Uz~sMDARdsljiY!t}rp-)vP*#VaJwd-WRb8)%^}{EPwi+KSJ~S@=K}B{F09O
zb>FG?=68MH=l-3gvA*xggH2X-%I|hx>)&>M>FfR8EK_qn;bztS4zu+?{QupEjT?Ce
z0T2KI5C8!X009sH0T2KI5V(;DVEun1l{WGU0w4eaAOHd&00JNY0w4eaAaJ7+VC(+_
z$(uvscf|iCen$Kq(G(X%MU40UWAFdz{Y>x2dP#4oSLuCM@4nuh$sZ(NPJT7{x#Yvi
zi*%2`QF<8;5C8!X009sH0T2KI5C8!Xc$*Lq4@3@!M7H(XX}&3b_V7C+sfj(*k%+G9
zSI+bA^VHwS65HwijA|`sC8d9VBsJP3QlY*q*e-c2dS7J!$R2xBajL>TIdAVQnU}Hy
zsmT6fYjk7F{TYo49ehV*|Irp9zE5CYQg-)621BB~!J)GYW9Fv4k%5rt*~vS*Yft0|
z4WS#rx;rpt61yYR5j@)?&{Pt;*g22R?zkzk-&LsW7MXT?Bi1{0lkPIRp1HXEJ(_`yIU=G_ZOs_lQvS?6=9j>IdU_*vnz5S
zBsx1XX5JHy^iS<^_jK&A?IR`k4k6Mvu!n7eNGY)|_Cdqh`oBB5E7Uuhyd#-Q%JdWm
z2!H?xfB*=900@8p2!H?x+!zGJ?ymm6RqBm|dGg6qf9+zV>+r5BA?4+1>Yme7#&>sh
zrA5ab=qVOXOl5=6wb+BVEt4x#Dh>E~c_cqRF`mig3raqhDN;A@nLE|nSbGnfVcZ_#j=KIjeCwQ1iksi-xRKkXZ`UY8jth}<=YBk$^aH&q6
zm1@Ta>>d3mJ0{v{MWAScw(4&2sgU@6@n!LE#HYl+yfKm&fdd3U00ck)
z1V8`;KmY_l00ck)1VG?ABp`;nLs3CE_n3arcq08|I4lZ0VCIRLgT~`8rNYko|NSBH
zkHq(1hZ%&)AOHd&00JNY0w4eaAOHd&00JQJCKI^pSDyWy$NufvmmmIOEOag$JCQ2n
z@@h6aQ7LE{b+Mc$S*4h(WC}{IK*oxN!q~CZ+EV!NXS_MFD$HH
zYE_+eRcG1r|A#^sLxLc@6n-RfF7j~XAG(X(pX&Z@^uI;F61y$-WPE4bi2q&BRL@@|
zMiL)Mys+c$9gpw$ubsc1Jd*rE@B4edF0Rqb8^=cfu~=U^9p6~Lq?60kO~e%Q%rLLk
zG-HMT+DK+%eFFpW4Z;M}@pPvAFHJ6#q`PNIKP7o2B-Koqq)N(`^d6~FH&!I>
z93gRenbe8I98O5LP-hP`ElD@2Ck6UTNf>pF)TPCB$#tI}7>>kpuEH
zJvU#Pxnq`@HPq0QmgY;-rTNnAROzf;n2yO0p4b~p4Gszqjd7EG?nZn+_W6zW#o1P~
zI?~Q)iy9kXt6Wu0Yldw^N`0yXWk#;i9ZNe25Y-GWbV^BGZO0vT6n?
zwU+hT1--V^u2AH4VouS`L#^O6jka;VN+vsqhb6|7R=8qcGPkkq$SXrwB|bBIvUHEsEF#e+!NsH@)*;Y#uCAx&Q&j1G
zVZ~MIbGl>yIMA%sC&RSb0n!k28Z)D%$-Y>sxL?@lZdU4X)ni|Bu$^j;q%Q$aczSMs
zG<9^ppf{z$ef`+#``f7JB3`{bAbsJT)b1l5yLlh#bO+pj&Wz>_y17}sH`0-3sd$5>
zA@++xsvn4@ZW$CdlCJqauSDnhuNlo3c&Ir&U#xoxOVJl04LQov7Y;UMkQJEgwt&{9BxjdMXZ4^;nZ6`f(=V$V`WjV
z5seOK!;K*K(F6)9oOpDHulsB_hFaDui(OImhtT0Im?P0-8;u3A0>`21V(kb!39
zt#Z74n3pVR$UBy$tCC$fF<%mH?PGS(>cFmZg5DLFlwZD)mJ>{?0@FaNpLRHw%JnzZ
z^##ZI(G;zsj1~KRq&XQk!gWyPB4fg|XHl1CG+H)xC4WG+v7K=9#KO(qqfV0NW@qj?SE5rYXEgJR
zvSfJ(&6ndtE`8@vG&RsCP@i*jboF@ivMA2sHdpOZhzESqh
z#pl?JnP~3B+xh7z_RYzisB1kh-{^ghy+l}IW>x5}`$U^!ZOU`%HOCXYEmJb>mI%H!
z=jnMjb|1A3GQ5kLxvECzF5EDB>NUs{ye)&2b_Th%%UQGd+Wkgy*t3vwlcWnN&uNfR
zRN@UWY9l^k+w(y$B-+AUd+2G9Jv8{5+M}(JEf=AUC25dB_G0y#+GG!G!xCMLGEb2k
z@lg+|MnkJ^wmE-&@g^^5gM41TpM&gHivKjX`TmMJI&xXWT#J>^0L{D&l00@8p2!H?xfB*=9
z00@8p2!H?x{KN_D>54{Q{d~_sZ?f(QTs5C8!X009sH0T2KI5C8!X
z009u#E&;KtFRB`3&0U`F>=L7@yMDi=>tgi1`pS}I*2}lZ<-Dqm=hX?dLW+x-;&>)o
z)+$AXD1}0CqBvGOwpv?~rO{jOa#r>Js|&fjn$1pB3R*^8Eayp9DdsAff|4tcv0|Yx
z=3ebx|6~8Z?G_{CKmY_l00ck)1V8`;KmY_l00cnb`Xs>q{*U$l^;veX8U#Q91V8`;
zKmY_l00ck)1V8`;wo3r(|Lu~390-5_2!H?xfB*=900@8p2!H?xT)za^`adjwGbH{i
z{lEbNAOHd&00JNY0w4eaAOHd&00JOz9TK=HD)hvKXgGZCF`aJrpMJ6tF!RLBLF4h4
zQug})Ya#KU#IIe48HC9o00JNY0w4eaAOHd&00JNY0w8e166lKvv1s^x5gUNJ^!JDQ
z*Z*G#iGL@4;f4iLoI2
z5!1@D4gSMV7mw|F{NXPKizrOw(7tbQ5xoEZmMvfy1_B@e0w4eaAOHd&00JNY0w4ea
z*NlMu{Qrv~@!!Q4ubEOP0s#;J0T2KI5C8!X009sH0T2KI5O}i*^hSiJ(AWVWoTNW{
z!v5#~vH#zjT_{Ko1V8`;KmY_l00ck)1V8`;KmY`;9Rar8pAc`_|Bnaw*ZVmJoNde
zzIV`gV#g;N+X2}7|Ka`rYgzzM1_B@e0w4eaAOHd&00JNY0w4eaZz_T9zW@KJkobSa
zPra$8Au$jD0T2KI5C8!X009sH0T2KI5CDM&fzhsW(Q;nZ#`EfgS|P>7OmRGuEo+sc
zLX<+GI8huc9$T#~g`X}S+x7UvUu5!yTwcv)Cn^Okqb`>7B&!s2l}tg&706hzP#9zK
zhxUDg%iDkd|5ix+7x7z-L~#NFAOHd&00JNY0w4eaAOHd&00JOzT@grj9TLv2nX9Cx
Y5v^-i*Pt+6H)__1WsEGU{XE)7O>#2B#pWt`zIke?WN#*%H@?66gHf+|;}h2Ir#G#FEq$h4Rdj
z426)4R0VfW-+=cis(C=eyF6VSLn>~)y>n5hNkOFL;f;@f<8Qv3y^-hZ25$T7z9-F3
z8w%w7-nT#F`78wnZk-JWn7DN~4lrex+&;5@{nl@PnWq|0cxHC@=St%VXE;{(ue{`4
z>~Z%KFQe(|1(PSMypCJB{I2V=JMD=wmp7!edhs4OHFedou4$`QN51qajR;?2_iuW?
zt9X#!$ye{@J-_U;)Wb5wcH+tkohmM
zVtq*O?lVi*s7*DtNzs))w`%{d3lX+6)+kSWp}S`G+ZP5gOx^1vrv{oH{S*@^y7APp
zO|cuqH*y^-s^e2hn%b56uOjeB*4j>!h_a2J4t=`Mub4EoU3BBwHYR?th{NHLo?%kg
zX1;p2p~8RDq74?ioi~Z?{XE)7O>#2B#pWu5#3bvTC4^Y>8_`iF1B#Zfaf$gL6@8Vo7R>LV0FM
zhC)b2s)D~)y?c=J%$`P)|dZ&&*(Yy
zJYd&`BZV@@k6D;AF>N@&#I3X8KobLy0I?JpxYxYeI6eQM#N#u!Mb_=^oz!c=xx{T+
zZ|O|WgWCMB?Sgkk&G7R5b*5@fW%blITW1@UzTRB4#HhYsA^2#-$)L>m<0Z!>Usv!S
zEO@Kr5Fa0+U9)h)V)v>qEH?wVuf)$WpIQ?=ZQ;JA<2LF4sD&G(V{(xf6M9L
zYc9C?yxII~ZP?cpkxF+pS@(%gJGW%H|0W@?6oJ)GCrvGW#+!7N@%5gNC};7MQ_`>X
zSDvY4@?B{>EoY6#G~;PSYqo6o^8QEr%8-*sH#w{-Zq(nyARV`H)!q=aOgso`eysfb;oyCvfX$ep3=HpAmT}@KyH?1Ti*EPt51@A?;ynI
z^GWPqAN%JVA7M8^pKpp(9k~Cs4$0T|R6u@(%l65y$*$ISqC~LG}*4=KJn8}XMX36aM{l~K;S?7}^J+Y{+k;$pq>;u^u
zGBrI*rcNI}-b-T9vZ3iEwczQOEb=%ewWOzSNi9+`n?1wAstZq}X3o%;EWKP>>zSvj
zwQNvxt&c&^E$ar=$oqoO?rBkmW$NTbDn-l7U&bl$|dWVTGR_G
zNf9qkb}p9+K%iG=ifkBI`H
z(cD>6oPw@Q7fvlQJ7=&AV34OypUBQkPV^}0UXn?nrmtu#(9s)jaMvZ!%hEu7FxuR~b9*jl)#
zuRq{=*=vPMRdZ$C8oPwA5i+Z;>Pp&^z3@_+du2SmlDTrBpe~toR_8h6nW>BV!s1Co
zFB`g5tzxbcl`K-Nm3N_hp@p}(Y%bA~#tP(`rY2dU7baNVPa7I#71dL>s>rD&HK&&r
znh51py+Ab{)CtbFRY2xRY8+KrGChS%YVxu_Lzj;0McryG#fdf*Tefo3`MgOjQf=Ry
zx>_o8r$T{fG_NifEW)aAj%hwoUM^WgXVqYV8pKN~ujZ`vNO2ZZI2abkMv)a=YStAwP=
zGk-Se?o_LoU0GV~A6X|HOFfg4M3?#@MdYYysg@~Deph}9rxw|1tt?T`#z0e7$Wi`1
zR{wQm&?VDTI;paLQ(dSwvrUPyFBew+b$Y>Wj$6rTx@j4#%2Vz2$g-x(Y|?D8+L={N
znr`i_wD!)l_8w^MJ=ny1g|!+Bt(2ajlfNfF&n`GX00ck)1V8`;KmY_l
z00ck)1V8`;-Xa3Y!0oG62j
z9~uAl;9%wIAGcMnqxY+(o*O6g{6g|mQAZ%)SqCp
z(TKIEoAx};009sH0T2KI5C8!X009sH0TB3qBk&v2ejodx
zLkjsK;jqLGzdsUD7Y^?1{^uY3
z(0L;oYG3=t{cOg->>``uP%aDV^^fB*=900@8p2!H?xfB*=9
zz%5AN;(vYP>tZcIaj!2L>h6wo>|?2nB&6NWKc47l+!<~2v2&kQ?h`Zc`2%6=0>s1q
z9{!jz_xFeqdCD9&*fyCvyVBzDddQ38;+H@B>~D(!{$4h~@9vK7>u9Ng2%W1i&!Q6#
zTm}A6i`h}dD^iy_wg!Cs9}K+T7kGc@&m&L8|2ObHo@N{%00M6X0qj*h87tPIe!1%?gDH|BSaGUmV>|@k6;`g*!
z0pk~|hBRyzFn-ZseF)~K-sa|~`1n8Me~-`qo=|7#$C2|X2hCAFDH
zq0WO_=sHKW1{ZaW3p1={&oY@Tjf=@6-bUMMVzwzYux~V@_tvprV;IlQKb|pbOxd}z
zt%-p=rvQu>^aZvxN%fmUSa`Fi6M0(CTlHES>8f9oC~X0c2)X?@&*&VFwUIeKO`_sf
z{+w-KJVS02SQ`VE7#DS;X#lI<4X>*OW-TpST(VYd5i(ycvwdGmYYl!Wy@Z=4s)||k
zA~PG-XgaLBn`g_ltz-$CR;*!l*-{dYj(x()y`gZTyIZ<^STvrt-%(ni%>0n+A?j{I
zU4Eh!tr4juV^IW`yPq415!3NozF9y6Yk$THqt1+#y&b8|0E>+b&URda=
zj7GwV{rjcM1FLybEmI;Ns`r~+IV3&;%&M1NqPTEfVy{Pmss`z*5Y@6FRKI*ED$K<_
zJ#DMb4Yu00TFoskb;Z6;tQIx3&szJa@gBU@EWtKQU@QBs%wFcQhY8nfmxR{I7v;cZ
z;B?g%ZDS>DFjFhwUGL99Pja=7w`SmOXk~4cD^3Tx*5IWodX2U2OI72Wo{bJS3CS&^
zUb9Vb43f5NJv3HeT3Mar_4+6`)!Radm!7iGN+QXFIVQwfhQvybVOBd+%0B#CMB3Ey
zp1GXM@qN_TXR~KPx$cJfYCpa=v!quOXl4bjCeX|XTutEC{||uV3N?k%(AuY_yL~T}
zTlucH&or1_T|Tm6{R%h!+M=8#byJjoFFV#vZACIGyTy6EL^biMS24W)ZE}4B=<3Ik
zH6N4ppQ1(P!o=lnJ*5hBg)8#k{iy?4-Tutx#YdCf5?7MKM(WXhLGonM)GM_$ovO`s;s}N&ZTz*
zZO+cskNN*ML&1X%AOHd&00JNY0w4eaAOHd&00JOzV+ru_|L*u+pZpW~uh<%ZkIAMy
zD=TuO?Wb*DZu@lGvu(63+orVL*|t6Yi}*L=Uy6Ss{=WEc#wX)P;tG2W2MB-w2!H?x
zfB*=900@8p2;3qBw(Sn?_sPQiXVva#I(A1e;gdC*J8LSjUHo2nE8A}mcKc+P)!TGO
zBDkw>hiwQ{zc^>#%yIciEkiP0w*_~Nt@W-m{Fx!jp7!9b18anYeNW~q?+kYO9ovJ6{sxh}UZ5qlNSXK6ZNV;AMBXe!HXida&Mk1
z=oXz*m#wm1(&)uG$74=mS#X;aZ138^aP!pj3qLV;KK|bn-|B1Yj~|O?;+^ak2MB-w
z2!H?xfB*=900@8p2)r!_$eRKk?MvzcJ*7WE2h(H2axl=jtw3pR@tCR?Ol5d`Adr&H
zWpjy^wBy{Jno1?lhGJL77wN?bl{vgOIW&+NRmL(SqoafAp~1nC%*b%^5J?V><(1L-
zy6NA&B-uAKFg!XmJUFOi(xW3I!=r=4qg-~)gl;A~Jfw^$86`b7HZnFklFp2z(_D1T
zta2XgSUNMLC_}>|gF~a4!AxdsAj4%xGr9E8Sc3{I>XE^L;S7s>EIpVR8yy_rvNS(H
zhX!i0TJp&&Q&O+pu*m2RdT6l`aSgqb9Kkt)&DZePcAU`kv?c1W7Bat8g0w4eaAOHd&
z00JNY0w4eaAh00_YzcHo&d*evwg*y@`xmJ|TcA@~{naYS#hqWL{F?(EQRf#NtpDE-
z*$pK@00ck)1V8`;KmY_l00ck)1VG?T5#Zzh?V&Bc(3a5Og?EO3EBwXCGts@#uf#sN
z`Aqz__(SnOjsL1mZTpKd7xn$13togq`xvt;V@@#DQD7UheA$ZYlubE$L98C12X
zMvfhy9(VgB*-n?%sg+VL>E&rMkCT)w&C!ycv7MZmJTX4=2zgib5z?b;saUGgzBQa^
zZ@Xm0eyIXo@NT|%3
zmerMXu7}&*O1u1Qt?hE-U6m}RtXCD}^>dQXv13aJ8xm6)-E6}gsFi863X{j=+_$;)
zs`t9KT5gt8r%z;OCMSB7bT7%ITulsdQLU!5Db#mu3Grg^m{7dhNb+@vO8vd;QoP=$
z$C@wn>e|I#{!Yq>i>h525urKls&n1y3WcS|UVdjMoiz_0g<7oE6vdf0sjgC-u=kR5x@h
zs}VN$Ht3S+6{xkSm(J>?g=SrEa)p``Z1%I|9cQ_zpSuyRb}zkz^JJ~@SbHdOqPy8Q
zx2q?fQ(TX`PTdw_YvHAH;&Z)mH9YTL%sZ+b$mG<~>>1K1LfD6o5FRN2%RXl>>Hq-MXer&-W-lCB8q
z+*8xLLWu*rB)uUP(Iwi!cQgs!?ltQa7Y?bjci3acn%?yvAYx~MNv+;#Z)5QFo>h6S
zeZ4d3;ZKuV%Ge!F9O-5QRY!f-1`l)MX;O1J?rV(C5P&e
z8l;pelpb6vE%=`v|MuYEz=36(c
z`cNDMKmY_l00ck)1V8`;KmY_l00cl_9R&FJKOle0C%+>9h+S}i00@8p2!H?xfB*=9
z00@8p2!H?xYy<*wFxn|)(u&fT&h#ltp!%F1?MsjKDI@;2V5BpeNjuMc{O^}v;P(Gs
z*a*pkjvxR6AOHd&00JNY0w4eaAOHd&00OH7b_7D9@N1{pF8`(6;g{m#TJQGa>Boc{
z#{a9#a0db)00JNY0w4eaAOHd&00JNY0vnA0AL~DQ?gAU*KVERg`rPdcm%_r$`Axtc
z|9{RWe?|V>MoS~~1pyEM0T2KI5C8!X009sH0T2LzTbF0<@jv$eyLC$oQU(Gb00JNY0w4eaAOHd&00JNY0-ONG|0o9_
z00JNY0w4eaAOHd&00JNY0w8ef6TtZY*3V<45ClK~1V8`;KmY_l00ck)1V8`;`1oIv
zf97Lt85>@O1C}!f5|7m
zbZaCKsQ>{G009sH0T2KI5C8!X009sHfwwn-_Mj9F`QH(=DY(b~-}lMie|ytG>OlYm
zKmY_l00ck)1V8`;KmY_lV8aqf1`db%7t2N3Z&0&bI8Xa6YFYyamr4sC>%Vkx+vl(9
zSpUCaa~Fz(00@8p2!H?xfB*=900@8p2!O!43E1=hf9{iizV25b2m&Ag0w4eaAOHd&
z00JNY0w4eaAaK(YhzF%mv|;@}_W!%-vkeM@00@8p2!H?xfB*=900@8p2!O!)3Gn^@
zq{y26|6cUTFRnijNP++efB*=900@8p2!H?xfB*=900`Wa1a9Z!ezyIefBf5ngID&y
zdda>2-!FagFKbc4QQ|ca
z009sH0T2KI5C8!X009sH0T8&!2($-7QYie|={@D=N;$0mzsYh3>VN?1V8`;KmY_l00ck)1V8`;KmY_l;D!?D51bBV
zGRm+rpBbA^=af;J9v)2(jHWY6erS+Vs!)1xskHF1{!0h9eg3M>?f?1ZANu5%*$D>-
zfB*=900@8p2!H?xfB*=900@Ak93TJ!
zAOHd&@YWNk#M9ySuC7Rhp4aIGHFs`8H8gX|bvrSW9iPpThbOb|CUp`-HDe|%(40l?
zCV8V=BsuVO;_hw5n14AJ*irY!lkP*@U?P03gJq6>R(S4NH{UJU#f&%Q5>5`Kpb_L1n1zJ*zR6HH1MtOQ_@}bjNQfnF9qC8sR!qQcJJM~~sDACm}u{vot
zIjl5_0?n_zO+s~KczFC?l;KomXDZx&;6S93b2aq5d!Vb0z5Q4X*wq0|+g|HxU|&Zk
zRB5~0?td1zT6vWvT$|uEsP492ojT`rsgiEi*1PIlUCUe9e!y0!T;B_^?#}6G8al)}
z=XEKx-p!lBqxUTxJhU)(S
DNS`Px#32;bRa{vGf6951U69E94oEQKA00(qQO+^RT2@we`F{vpBGXMYp9dt!lbVF}#
zZDnqB07G(RVRU6=Aa`kWXdqN*WgtgMO;GP-C2jx!0qsddK~#9!?cCdP!ypVr(ZKNk
zzwEwcJe?-v#^^{#*}SJV%RnV=<2a574
zQ#IYW85X6FiL1WR`M0@V(?u#Pd5*$*Uz08MY}K+}i8PQ38MA_VN`;8mBXlT|axR#>
zvF-x}4F#)TJBkZ0R$T-HqD)OtArWz9e=N2#od
zn(5f4vT`8TIruS#p3Hl}LhQ93=H=6B#exxA78n!yaC`
z(7|zY!|(TTIZ*yF+{Oyp{5Sl#&KsMjQ|OT135M^z4*zO0TdHXsly*3~^XCVzaOma!
zJBSVq^f*kQ+XtUAUFRB}6%k9XucG0{ct1#YG+@JL%
ztG2}7bLXEy=ukFu+Vx`n4?1o9gran(NAlR`^u1s}2%n-6!^Pg|A-X=I#nmCw$-f{n
zSryV|N{gQlsD*WtEN3CILL_;Sj0Fp)!c_%efdv*=U{SyV3oNj}0*e9`SYUw#7FZOp
ezyb>_r}6`v(9i0j^)C(p0000", "", c0.question) == u"one"
-    # now let's make a duplicate
+    m = f.model
+    m.templates[1].active = True
+    m.flush()
+    n = deck.addFact(f)
+    assert n == 2
+    # check q/a generation
+    c0 = f.cards()[0]
+    assert re.sub("", "", c0.q) == u"one"
+    # it should not be a duplicate
+    for p in f.problems():
+        assert not p
+    # now let's make a duplicate and test uniqueness
     f2 = deck.newFact()
-    f2['Front'] = u"one"; f2['Back'] = u"three"
-    try:
-        f2 = deck.addFact(f2)
-    except Exception, e:
-        pass
-    assert e.data['type'] == 'fieldNotUnique'
+    f2.model.fields[1].conf['required'] = True
+    f2['Front'] = u"one"; f2['Back'] = u""
+    p = f2.problems()
+    assert p[0] == "unique"
+    assert p[1] == "required"
     # try delete the first card
-    id1 = f.cards[0].id; id2 = f.cards[1].id
+    cards = f.cards()
+    id1 = cards[0].id; id2 = cards[1].id
+    assert deck.cardCount() == 2
+    assert deck.factCount() == 1
     deck.deleteCard(id1)
+    assert deck.cardCount() == 1
+    assert deck.factCount() == 1
     # and the second should clear the fact
     deck.deleteCard(id2)
+    assert deck.cardCount() == 0
+    assert deck.factCount() == 0
 
 def test_fieldChecksum():
-    deck = getDeck()
-    deck.addModel(BasicModel())
+    deck = getEmptyDeck()
     f = deck.newFact()
     f['Front'] = u"new"; f['Back'] = u"new2"
     deck.addFact(f)
-    (id, sum) = deck.db.first(
-        "select id, chksum from fields where value = 'new'")
-    assert sum == "22af645d"
+    assert deck.db.scalar(
+        "select csum from fdata where ord = 0") == "22af645d"
     # empty field should have no checksum
     f['Front'] = u""
-    deck.db.flush()
+    f.flush()
     assert deck.db.scalar(
-        "select chksum from fields where id = :id", id=id) == ""
-    # changing the value should change the checksum
+        "select csum from fdata where ord = 0") == ""
+    # changing the val should change the checksum
     f['Front'] = u"newx"
-    deck.db.flush()
+    f.flush()
     assert deck.db.scalar(
-        "select chksum from fields where id = :id", id=id) == "4b0e5a4c"
+        "select csum from fdata where ord = 0") == "4b0e5a4c"
     # back should have no checksum, because it's not set to be unique
-    (id, sum) = deck.db.first(
-        "select id, chksum from fields where value = 'new2'")
-    assert sum == ""
+    assert deck.db.scalar(
+        "select csum from fdata where ord = 1") == ""
     # if we turn on unique, it should get a checksum
-    fm = f.model.fieldModels[1]
-    fm.unique = True
-    deck.updateFieldChecksums(fm.id)
+    f.model.fields[1].conf['unique'] = True
+    f.model.flush()
+    f.model.updateCache()
+    print deck.db.scalar(
+        "select csum from fdata where ord = 1")
     assert deck.db.scalar(
-        "select chksum from fields where id = :id", id=id) == "82f2ec5f"
-    # and turning it off should zero the checksum again
-    fm.unique = False
-    deck.updateFieldChecksums(fm.id)
-    assert deck.db.scalar(
-        "select chksum from fields where id = :id", id=id) == ""
-
-def test_modelAddDelete():
-    deck = getDeck()
-    deck.addModel(BasicModel())
-    deck.addModel(BasicModel())
-    f = deck.newFact()
-    f['Front'] = u'1'
-    f['Back'] = u'2'
-    deck.addFact(f)
-    assert deck.cardCount() == 1
-    deck.deleteModel(deck.currentModel)
-    deck.reset()
-    assert deck.cardCount() == 0
-    deck.db.refresh(deck)
-
-def test_modelCopy():
-    deck = getDeck()
-    m = BasicModel()
-    assert len(m.fieldModels) == 2
-    assert len(m.cardModels) == 2
-    deck.addModel(m)
-    f = deck.newFact()
-    f['Front'] = u'1'
-    deck.addFact(f)
-    m2 = deck.copyModel(m)
-    assert m2.name == "Basic copy"
-    assert m2.id != m.id
-    assert m2.fieldModels[0].id != m.fieldModels[0].id
-    assert m2.cardModels[0].id != m.cardModels[0].id
-    assert len(m2.fieldModels) == 2
-    assert len(m.fieldModels) == 2
-    assert len(m2.fieldModels) == len(m.fieldModels)
-    assert len(m.cardModels) == 2
-    assert len(m2.cardModels) == 2
-
-def test_media():
-    deck = getDeck()
-    # create a media dir
-    deck.mediaDir(create=True)
-    # put a file into it
-    file = unicode(os.path.join(testDir, "deck/fake.png"))
-    deck.addMedia(file)
-    # make sure it gets copied on saveas
-    path = "/tmp/saveAs2.anki"
-    sum = "fake.png"
-    try:
-        os.unlink(path)
-    except OSError:
-        pass
-    deck.saveAs(path)
-    assert os.path.exists("/tmp/saveAs2.media/%s" % sum)
-
-def test_modelChange():
-    deck = getDeck()
-    m = Model(u"Japanese")
-    m1 = m
-    f = FieldModel(u'Expression', True, True)
-    m.addFieldModel(f)
-    m.addFieldModel(FieldModel(u'Meaning', False, False))
-    f = FieldModel(u'Reading', False, False)
-    m.addFieldModel(f)
-    m.addCardModel(CardModel(u"Recognition",
-                             u"%(Expression)s",
-                             u"%(Reading)s
%(Meaning)s")) - m.addCardModel(CardModel(u"Recall", - u"%(Meaning)s", - u"%(Expression)s
%(Reading)s", - active=False)) - m.tags = u"Japanese" - m1.cardModels[1].active = True - deck.addModel(m1) - f = deck.newFact() - f['Expression'] = u'e' - f['Meaning'] = u'm' - f['Reading'] = u'r' - f = deck.addFact(f) - f2 = deck.newFact() - f2['Expression'] = u'e2' - f2['Meaning'] = u'm2' - f2['Reading'] = u'r2' - deck.addFact(f2) - m2 = BasicModel() - m2.cardModels[1].active = True - deck.addModel(m2) - # convert to basic - assert deck.modelUseCount(m1) == 2 - assert deck.modelUseCount(m2) == 0 - assert deck.cardCount() == 4 - assert deck.factCount() == 2 - fmap = {m1.fieldModels[0]: m2.fieldModels[0], - m1.fieldModels[1]: None, - m1.fieldModels[2]: m2.fieldModels[1]} - cmap = {m1.cardModels[0]: m2.cardModels[0], - m1.cardModels[1]: None} - deck.changeModel([f.id], m2, fmap, cmap) - deck.reset() - assert deck.modelUseCount(m1) == 1 - assert deck.modelUseCount(m2) == 1 - assert deck.cardCount() == 3 - assert deck.factCount() == 2 - (q, a) = deck.db.first(""" -select question, answer from cards where factId = :id""", - id=f.id) - assert stripHTML(q) == u"e" - assert stripHTML(a) == u"r" - -def test_findCards(): - deck = getDeck() - deck.addModel(BasicModel()) - f = deck.newFact() - f['Front'] = u'dog' - f['Back'] = u'cat' - f.addTags(u"monkey") - deck.addFact(f) - f = deck.newFact() - f['Front'] = u'goats are fun' - f['Back'] = u'sheep' - f.addTags(u"sheep goat horse") - deck.addFact(f) - f = deck.newFact() - f['Front'] = u'cat' - f['Back'] = u'sheep' - deck.addFact(f) - assert not deck.findCards("tag:donkey") - assert len(deck.findCards("tag:sheep")) == 1 - assert len(deck.findCards("tag:sheep tag:goat")) == 1 - assert len(deck.findCards("tag:sheep tag:monkey")) == 0 - assert len(deck.findCards("tag:monkey")) == 1 - assert len(deck.findCards("tag:sheep -tag:monkey")) == 1 - assert len(deck.findCards("-tag:sheep")) == 2 - assert len(deck.findCards("cat")) == 2 - assert len(deck.findCards("cat -dog")) == 1 - assert len(deck.findCards("cat -dog")) == 1 - assert len(deck.findCards("are goats")) == 1 - assert len(deck.findCards('"are goats"')) == 0 - assert len(deck.findCards('"goats are"')) == 1 - deck.addTags(deck.db.column0("select id from cards"), "foo bar") - assert (len(deck.findCards("tag:foo")) == - len(deck.findCards("tag:bar")) == - 3) - deck.deleteTags(deck.db.column0("select id from cards"), "foo") - assert len(deck.findCards("tag:foo")) == 0 - assert len(deck.findCards("tag:bar")) == 3 + "select csum from fdata where ord = 1") == "82f2ec5f" + # turning it off doesn't currently zero the checksum for efficiency reasons + # f.model.fields[1].conf['unique'] = False + # f.model.flush() + # f.model.updateCache() + # assert deck.db.scalar( + # "select csum from fdata where ord = 1") == "" def test_upgrade(): - src = os.path.expanduser("~/Scratch/upgrade.anki") + import tempfile, shutil + src = os.path.join(testDir, "support", "anki12.anki") (fd, dst) = tempfile.mkstemp(suffix=".anki") print "upgrade to", dst shutil.copy(src, dst) diff --git a/tests/test_media.py b/tests/test_media.py index 96c0feeaf..485287f00 100644 --- a/tests/test_media.py +++ b/tests/test_media.py @@ -1,55 +1,45 @@ # coding: utf-8 import tempfile, os, time -import anki.media as m from anki import Deck -from anki.stdmodels import BasicModel from anki.utils import checksum - -def getDeck(): - import tempfile - (fd, nam) = tempfile.mkstemp(suffix=".anki") - os.unlink(nam) - return Deck(nam) +from shared import getEmptyDeck, testDir # uniqueness check def test_unique(): + d = getEmptyDeck() dir = tempfile.mkdtemp(prefix="anki") # new file n = "foo.jpg" - new = os.path.basename(m.uniquePath(dir, n)) + new = os.path.basename(d.media.uniquePath(dir, n)) assert new == n # duplicate file open(os.path.join(dir, n), "w").write("hello") n = "foo.jpg" - new = os.path.basename(m.uniquePath(dir, n)) + new = os.path.basename(d.media.uniquePath(dir, n)) assert new == "foo (1).jpg" # another duplicate open(os.path.join(dir, "foo (1).jpg"), "w").write("hello") n = "foo.jpg" - new = os.path.basename(m.uniquePath(dir, n)) + new = os.path.basename(d.media.uniquePath(dir, n)) assert new == "foo (2).jpg" # copying files to media folder def test_copy(): - deck = getDeck() + d = getEmptyDeck() dir = tempfile.mkdtemp(prefix="anki") path = os.path.join(dir, "foo.jpg") open(path, "w").write("hello") # new file - assert m.copyToMedia(deck, path) == "foo.jpg" + assert d.media.addFile(path) == "foo.jpg" # dupe md5 - deck.db.statement(""" -insert into media values (null, 'foo.jpg', 0, 0, :sum)""", - sum=checksum("hello")) path = os.path.join(dir, "bar.jpg") open(path, "w").write("hello") - assert m.copyToMedia(deck, path) == "foo.jpg" + assert d.media.addFile(path) == "foo.jpg" # media db def test_db(): - deck = getDeck() - deck.addModel(BasicModel()) + deck = getEmptyDeck() dir = tempfile.mkdtemp(prefix="anki") path = os.path.join(dir, "foo.jpg") open(path, "w").write("hello") @@ -58,55 +48,42 @@ def test_db(): f['Front'] = u"" f['Back'] = u"back [sound:foo.jpg]" deck.addFact(f) - # 1 entry in the media db, with two references, and missing file + # 1 entry in the media db, and no checksum assert deck.db.scalar("select count() from media") == 1 - assert deck.db.scalar("select refcnt from media") == 2 - assert not deck.db.scalar("select group_concat(chksum, '') from media") - # copy to media folder & check db - path = m.copyToMedia(deck, path) - m.rebuildMediaDir(deck) + assert not deck.db.scalar("select group_concat(csum, '') from media") + # copy to media folder + path = deck.media.addFile(path) # md5 should be set now assert deck.db.scalar("select count() from media") == 1 - assert deck.db.scalar("select refcnt from media") == 2 - assert deck.db.scalar("select group_concat(chksum, '') from media") - # edit the fact to remove a reference - f['Back'] = u"" - f.setModified(True, deck) - deck.db.flush() - assert deck.db.scalar("select count() from media") == 1 - assert deck.db.scalar("select refcnt from media") == 1 - # remove the front reference too - f['Front'] = u"" - f.setModified(True, deck) - assert deck.db.scalar("select refcnt from media") == 0 - # add the reference back - f['Front'] = u"" - f.setModified(True, deck) - assert deck.db.scalar("select refcnt from media") == 1 + assert deck.db.scalar("select group_concat(csum, '') from media") # detect file modifications - oldsum = deck.db.scalar("select chksum from media") + oldsum = deck.db.scalar("select csum from media") open(path, "w").write("world") - m.rebuildMediaDir(deck) - newsum = deck.db.scalar("select chksum from media") + deck.media.rebuildMediaDir() + newsum = deck.db.scalar("select csum from media") assert newsum and newsum != oldsum # delete underlying file and check db os.unlink(path) - m.rebuildMediaDir(deck) + deck.media.rebuildMediaDir() # md5 should be gone again assert deck.db.scalar("select count() from media") == 1 - assert deck.db.scalar("select not chksum from media") + assert deck.db.scalar("select not csum from media") # media db should pick up media defined via templates & bulk update f['Back'] = u"bar.jpg" - f.setModified(True, deck) - deck.db.flush() + f.flush() # modify template & regenerate assert deck.db.scalar("select count() from media") == 1 - assert deck.db.scalar("select sum(refcnt) from media") == 1 - deck.currentModel.cardModels[0].aformat=u'' - deck.updateCardsFromModel(deck.currentModel) - assert deck.db.scalar("select sum(refcnt) from media") == 2 + m = deck.currentModel() + m.templates[0].afmt=u'' + m.flush() + m.updateCache() assert deck.db.scalar("select count() from media") == 2 - deck.currentModel.cardModels[0].aformat=u'{{{Back}}}' - deck.updateCardsFromModel(deck.currentModel) - assert deck.db.scalar("select count() from media") == 2 - assert deck.db.scalar("select sum(refcnt) from media") == 1 + +def test_deckIntegration(): + deck = getEmptyDeck() + # create a media dir + deck.media.mediaDir(create=True) + # put a file into it + file = unicode(os.path.join(testDir, "deck/fake.png")) + deck.media.addFile(file) + print "todo: check media copied on rename" diff --git a/tests/test_sched.py b/tests/test_sched.py index 11f8ce399..3c9de0ff5 100644 --- a/tests/test_sched.py +++ b/tests/test_sched.py @@ -1,19 +1,13 @@ # coding: utf-8 import time -from tests.shared import assertException, getDeck +from tests.shared import assertException, getEmptyDeck from anki.stdmodels import BasicModel #from anki.db import * -def getEmptyDeck(): - d = getDeck() - d.addModel(BasicModel()) - d.db.commit() - return d - def test_basics(): d = getEmptyDeck() - assert not d.getCard() + assert not d.sched.getCard() def test_new(): d = getEmptyDeck() @@ -22,16 +16,15 @@ def test_new(): f = d.newFact() f['Front'] = u"one"; f['Back'] = u"two" f = d.addFact(f) - d.db.flush() d.reset() assert d.sched.newCount == 1 # fetch it - c = d.getCard() + c = d.sched.getCard() assert c assert c.queue == 2 assert c.type == 2 # if we answer it, it should become a learn card - d.answerCard(c, 1) + d.sched.answerCard(c, 1) assert c.queue == 0 assert c.type == 2 @@ -41,31 +34,30 @@ def test_learn(): f = d.newFact() f['Front'] = u"one"; f['Back'] = u"two" f = d.addFact(f) - d.db.flush() # set as a learn card and rebuild queues - d.db.statement("update cards set queue=0, type=2") + d.db.execute("update cards set queue=0, type=2") d.reset() - # getCard should return it, since it's due in the past - c = d.getCard() + # sched.getCard should return it, since it's due in the past + c = d.sched.getCard() assert c # it should have no cycles and a grade of 0 assert c.grade == c.cycles == 0 # fail it - d.answerCard(c, 1) + d.sched.answerCard(c, 1) # it should by due in 30 seconds assert round(c.due - time.time()) == 30 # and have 1 cycle, but still a zero grade assert c.grade == 0 assert c.cycles == 1 # pass it once - d.answerCard(c, 2) + d.sched.answerCard(c, 2) # it should by due in 3 minutes assert round(c.due - time.time()) == 180 # and it should be grade 1 now assert c.grade == 1 assert c.cycles == 2 # pass again - d.answerCard(c, 2) + d.sched.answerCard(c, 2) # it should by due in 10 minutes assert round(c.due - time.time()) == 600 # and it should be grade 1 now @@ -74,7 +66,7 @@ def test_learn(): # the next pass should graduate the card assert c.queue == 0 assert c.type == 2 - d.answerCard(c, 2) + d.sched.answerCard(c, 2) assert c.queue == 1 assert c.type == 1 print "test intervals, check early removal, etc"