mirror of
https://github.com/ankitects/anki.git
synced 2025-12-12 22:36:55 -05:00
drop sqlalchemy; massive refactor
SQLAlchemy is a great tool, but it wasn't a great fit for Anki: - We often had to drop down to raw SQL for performance reasons. - The DB cursors and results were wrapped, which incurred a sizable performance hit due to introspection. Operations like fetching 50k records from a hot cache were taking more than twice as long to complete. - We take advantage of sqlite-specific features, so SQL language abstraction is useless to us. - The anki schema is quite small, so manually saving and loading objects is not a big burden. In the process of porting to DBAPI, I've refactored the database schema: - App configuration data that we don't need in joins or bulk updates has been moved into JSON objects. This simplifies serializing, and means we won't need DB schema changes to store extra options in the future. This change obsoletes the deckVars table. - Renamed tables: -- fieldModels -> fields -- cardModels -> templates -- fields -> fdata - a number of attribute names have been shortened Classes like Card, Fact & Model remain. They maintain a reference to the deck. To write their state to the DB, call .flush(). Objects no longer have their modification time manually updated. Instead, the modification time is updated when they are flushed. This also applies to the deck. Decks will now save on close, because various operations that were done at deck load will be moved into deck close instead. Operations like undoing buried card are cheap on a hot cache, but expensive on startup. Programmatically you can call .close(save=False) to avoid a save and a modification bump. This will be useful for generating due counts. Because of the new saving behaviour, the save and save as options will be removed from the GUI in the future. The q/a cache and field cache generating has been centralized. Facts will automatically rebuild the cache on flush; models can do so with model.updateCache(). Media handling has also been reworked. It has moved into a MediaRegistry object, which the deck holds. Refcounting has been dropped - it meant we had to compare old and new value every time facts or models were changed, and existed for the sole purpose of not showing errors on a missing media download. Instead we just media.registerText(q+a) when it's updated. The download function will be expanded to ask the user if they want to continue after a certain number of files have failed to download, which should be an adequate alternative. And we now add the file into the media DB when it's copied to th emedia directory, not when the card is commited. This fixes duplicates a user would get if they added the same media to a card twice without adding the card. The old DeckStorage object had its upgrade code split in a previous commit; the opening and upgrading code has been merged back together, and put in a separate storage.py file. The correct way to open a deck now is import anki; d = anki.Deck(path). deck.getCard() -> deck.sched.getCard() same with answerCard deck.getCard(id) returns a Card object now. And the DB wrapper has had a few changes: - sql statements are a more standard DBAPI: - statement() -> execute() - statements() -> executemany() - called like execute(sql, 1, 2, 3) or execute(sql, a=1, b=2, c=3) - column0 -> list
This commit is contained in:
parent
8e40fdcb18
commit
2f27133705
44 changed files with 1689 additions and 2837 deletions
|
|
@ -3,58 +3,35 @@
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
"""\
|
"""\
|
||||||
Anki (libanki)
|
|
||||||
====================
|
|
||||||
|
|
||||||
Open a deck:
|
Open a deck:
|
||||||
|
|
||||||
deck = anki.Deck(path)
|
deck = anki.Deck(path)
|
||||||
|
|
||||||
Get a card:
|
Get a due card:
|
||||||
|
|
||||||
card = deck.getCard()
|
card = deck.sched.getCard()
|
||||||
if not card:
|
if not card:
|
||||||
# deck is finished
|
# deck is finished
|
||||||
|
|
||||||
Show the card:
|
Show the card:
|
||||||
|
|
||||||
print card.question, card.answer
|
print card.q, card.a
|
||||||
|
|
||||||
Answer the card:
|
Answer the card:
|
||||||
|
|
||||||
deck.answerCard(card, ease)
|
deck.sched.answerCard(card, ease)
|
||||||
|
|
||||||
Edit the card:
|
Edit the card:
|
||||||
|
|
||||||
fields = card.fact.model.fieldModels
|
fact = card.fact()
|
||||||
for field in fields:
|
for (name, value) in fact.items():
|
||||||
card.fact[field.name] = 'newvalue'
|
fact[name] = value + " new"
|
||||||
card.fact.setModified(textChanged=True, deck=deck)
|
fact.flush()
|
||||||
deck.setModified()
|
|
||||||
|
|
||||||
Get all cards via ORM (slow):
|
|
||||||
|
|
||||||
from anki.cards import Card
|
|
||||||
cards = deck.s.query(Card).all()
|
|
||||||
|
|
||||||
Get all q/a/ids via SQL (fast):
|
|
||||||
|
|
||||||
cards = deck.s.all("select id, question, answer from cards")
|
|
||||||
|
|
||||||
Save & close:
|
Save & close:
|
||||||
|
|
||||||
deck.save()
|
|
||||||
deck.close()
|
deck.close()
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
version = "1.2.6"
|
||||||
__import__('pkg_resources').declare_namespace(__name__)
|
from anki.storage import Deck
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
version = "1.2.8"
|
|
||||||
|
|
||||||
from anki.deck import DeckStorage
|
|
||||||
|
|
||||||
def Deck(*args, **kwargs):
|
|
||||||
return DeckStorage.Deck(*args, **kwargs)
|
|
||||||
|
|
|
||||||
223
anki/cards.py
223
anki/cards.py
|
|
@ -2,12 +2,8 @@
|
||||||
# Copyright: Damien Elmes <anki@ichi2.net>
|
# Copyright: Damien Elmes <anki@ichi2.net>
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
import time, sys, math, random
|
import time
|
||||||
from anki.db import *
|
from anki.utils import genID, intTime, hexifyID
|
||||||
from anki.models import CardModel, Model, FieldModel, formatQA
|
|
||||||
from anki.facts import Fact, factsTable, Field
|
|
||||||
from anki.utils import parseTags, findTag, stripHTML, genID, hexifyID, intTime
|
|
||||||
from anki.media import updateMediaCount, mediaFiles
|
|
||||||
|
|
||||||
MAX_TIMER = 60
|
MAX_TIMER = 60
|
||||||
|
|
||||||
|
|
@ -26,57 +22,89 @@ MAX_TIMER = 60
|
||||||
# - rev queue: integer day
|
# - rev queue: integer day
|
||||||
# - lrn queue: integer timestamp
|
# - lrn queue: integer timestamp
|
||||||
|
|
||||||
cardsTable = Table(
|
|
||||||
'cards', metadata,
|
|
||||||
Column('id', Integer, primary_key=True),
|
|
||||||
Column('factId', Integer, ForeignKey("facts.id"), nullable=False),
|
|
||||||
Column('groupId', Integer, nullable=False, default=1),
|
|
||||||
Column('cardModelId', Integer, ForeignKey("cardModels.id"), nullable=False),
|
|
||||||
Column('modified', Integer, nullable=False, default=intTime),
|
|
||||||
# general
|
|
||||||
Column('question', UnicodeText, nullable=False, default=u""),
|
|
||||||
Column('answer', UnicodeText, nullable=False, default=u""),
|
|
||||||
Column('ordinal', Integer, nullable=False),
|
|
||||||
Column('flags', Integer, nullable=False, default=0),
|
|
||||||
# shared scheduling
|
|
||||||
Column('type', Integer, nullable=False, default=2),
|
|
||||||
Column('queue', Integer, nullable=False, default=2),
|
|
||||||
Column('due', Integer, nullable=False),
|
|
||||||
# sm2
|
|
||||||
Column('interval', Integer, nullable=False, default=0),
|
|
||||||
Column('factor', Integer, nullable=False),
|
|
||||||
Column('reps', Integer, nullable=False, default=0),
|
|
||||||
Column('streak', Integer, nullable=False, default=0),
|
|
||||||
Column('lapses', Integer, nullable=False, default=0),
|
|
||||||
# learn
|
|
||||||
Column('grade', Integer, nullable=False, default=0),
|
|
||||||
Column('cycles', Integer, nullable=False, default=0)
|
|
||||||
)
|
|
||||||
|
|
||||||
class Card(object):
|
class Card(object):
|
||||||
|
|
||||||
# called one of three ways:
|
def __init__(self, deck, id=None):
|
||||||
# - with no args, followed by .fromDB()
|
self.deck = deck
|
||||||
# - with all args, when adding cards to db
|
if id:
|
||||||
def __init__(self, fact=None, cardModel=None, group=None):
|
self.id = id
|
||||||
# timer
|
self.load()
|
||||||
self.timerStarted = None
|
else:
|
||||||
if fact:
|
# to flush, set fid, tid, due and ord
|
||||||
self.id = genID()
|
self.id = genID()
|
||||||
self.modified = intTime()
|
self.gid = 1
|
||||||
self.due = fact.pos
|
self.q = ""
|
||||||
self.fact = fact
|
self.a = ""
|
||||||
self.modelId = fact.modelId
|
self.flags = 0
|
||||||
self.cardModel = cardModel
|
self.type = 2
|
||||||
self.groupId = group.id
|
self.queue = 2
|
||||||
# placeholder; will get set properly when card graduates
|
self.interval = 0
|
||||||
self.factor = 2500
|
self.factor = 0
|
||||||
# for non-orm use
|
self.reps = 0
|
||||||
self.cardModelId = cardModel.id
|
self.streak = 0
|
||||||
self.ordinal = cardModel.ordinal
|
self.lapses = 0
|
||||||
|
self.grade = 0
|
||||||
|
self.cycles = 0
|
||||||
|
self.timerStarted = None
|
||||||
|
|
||||||
def setModified(self):
|
def load(self):
|
||||||
self.modified = intTime()
|
(self.id,
|
||||||
|
self.fid,
|
||||||
|
self.tid,
|
||||||
|
self.gid,
|
||||||
|
self.mod,
|
||||||
|
self.q,
|
||||||
|
self.a,
|
||||||
|
self.ord,
|
||||||
|
self.type,
|
||||||
|
self.queue,
|
||||||
|
self.due,
|
||||||
|
self.interval,
|
||||||
|
self.factor,
|
||||||
|
self.reps,
|
||||||
|
self.streak,
|
||||||
|
self.lapses,
|
||||||
|
self.grade,
|
||||||
|
self.cycles) = self.deck.db.first(
|
||||||
|
"select * from cards where id = ?", self.id)
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
self.mod = intTime()
|
||||||
|
self.deck.db.execute(
|
||||||
|
"""
|
||||||
|
insert or replace into cards values
|
||||||
|
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
self.id,
|
||||||
|
self.fid,
|
||||||
|
self.tid,
|
||||||
|
self.gid,
|
||||||
|
self.mod,
|
||||||
|
self.q,
|
||||||
|
self.a,
|
||||||
|
self.ord,
|
||||||
|
self.type,
|
||||||
|
self.queue,
|
||||||
|
self.due,
|
||||||
|
self.interval,
|
||||||
|
self.factor,
|
||||||
|
self.reps,
|
||||||
|
self.streak,
|
||||||
|
self.lapses,
|
||||||
|
self.grade,
|
||||||
|
self.cycles)
|
||||||
|
|
||||||
|
def flushSched(self):
|
||||||
|
self.mod = intTime()
|
||||||
|
self.deck.db.execute(
|
||||||
|
"""update cards set
|
||||||
|
mod=?, type=?, queue=?, due=?, interval=?, factor=?, reps=?,
|
||||||
|
streak=?, lapses=?, grade=?, cycles=? where id = ?""",
|
||||||
|
self.mod, self.type, self.queue, self.due, self.interval,
|
||||||
|
self.factor, self.reps, self.streak, self.lapses,
|
||||||
|
self.grade, self.cycles, self.id)
|
||||||
|
|
||||||
|
def fact(self):
|
||||||
|
return self.deck.getFact(self.deck, self.fid)
|
||||||
|
|
||||||
def startTimer(self):
|
def startTimer(self):
|
||||||
self.timerStarted = time.time()
|
self.timerStarted = time.time()
|
||||||
|
|
@ -87,40 +115,9 @@ class Card(object):
|
||||||
# Questions and answers
|
# Questions and answers
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def rebuildQA(self, deck, media=True):
|
|
||||||
# format qa
|
|
||||||
d = {}
|
|
||||||
for f in self.fact.model.fieldModels:
|
|
||||||
d[f.name] = (f.id, self.fact[f.name])
|
|
||||||
qa = formatQA(None, self.fact.modelId, d, self._splitTags(),
|
|
||||||
self.cardModel, deck)
|
|
||||||
# find old media references
|
|
||||||
files = {}
|
|
||||||
for type in ("question", "answer"):
|
|
||||||
for f in mediaFiles(getattr(self, type) or ""):
|
|
||||||
if f in files:
|
|
||||||
files[f] -= 1
|
|
||||||
else:
|
|
||||||
files[f] = -1
|
|
||||||
# update q/a
|
|
||||||
self.question = qa['question']
|
|
||||||
self.answer = qa['answer']
|
|
||||||
# determine media delta
|
|
||||||
for type in ("question", "answer"):
|
|
||||||
for f in mediaFiles(getattr(self, type)):
|
|
||||||
if f in files:
|
|
||||||
files[f] += 1
|
|
||||||
else:
|
|
||||||
files[f] = 1
|
|
||||||
# update media counts if we're attached to deck
|
|
||||||
if media:
|
|
||||||
for (f, cnt) in files.items():
|
|
||||||
updateMediaCount(deck, f, cnt)
|
|
||||||
self.setModified()
|
|
||||||
|
|
||||||
def htmlQuestion(self, type="question", align=True):
|
def htmlQuestion(self, type="question", align=True):
|
||||||
div = '''<div class="card%s" id="cm%s%s">%s</div>''' % (
|
div = '''<div class="card%s" id="cm%s%s">%s</div>''' % (
|
||||||
type[0], type[0], hexifyID(self.cardModelId),
|
type[0], type[0], hexifyID(self.tid),
|
||||||
getattr(self, type))
|
getattr(self, type))
|
||||||
# add outer div & alignment (with tables due to qt's html handling)
|
# add outer div & alignment (with tables due to qt's html handling)
|
||||||
if not align:
|
if not align:
|
||||||
|
|
@ -137,59 +134,3 @@ class Card(object):
|
||||||
|
|
||||||
def htmlAnswer(self, align=True):
|
def htmlAnswer(self, align=True):
|
||||||
return self.htmlQuestion(type="answer", align=align)
|
return self.htmlQuestion(type="answer", align=align)
|
||||||
|
|
||||||
def _splitTags(self):
|
|
||||||
return (self.fact._tags, self.fact.model.name, self.cardModel.name)
|
|
||||||
|
|
||||||
# Non-ORM
|
|
||||||
##########################################################################
|
|
||||||
|
|
||||||
def fromDB(self, s, id):
|
|
||||||
r = s.first("""select * from cards where id = :id""", id=id)
|
|
||||||
if not r:
|
|
||||||
return
|
|
||||||
(self.id,
|
|
||||||
self.factId,
|
|
||||||
self.groupId,
|
|
||||||
self.cardModelId,
|
|
||||||
self.modified,
|
|
||||||
self.question,
|
|
||||||
self.answer,
|
|
||||||
self.ordinal,
|
|
||||||
self.flags,
|
|
||||||
self.type,
|
|
||||||
self.queue,
|
|
||||||
self.due,
|
|
||||||
self.interval,
|
|
||||||
self.factor,
|
|
||||||
self.reps,
|
|
||||||
self.streak,
|
|
||||||
self.lapses,
|
|
||||||
self.grade,
|
|
||||||
self.cycles) = r
|
|
||||||
return True
|
|
||||||
|
|
||||||
def toDB(self, s):
|
|
||||||
# this shouldn't be used for schema changes
|
|
||||||
s.execute("""update cards set
|
|
||||||
modified=:modified,
|
|
||||||
question=:question,
|
|
||||||
answer=:answer,
|
|
||||||
flags=:flags,
|
|
||||||
type=:type,
|
|
||||||
queue=:queue,
|
|
||||||
due=:due,
|
|
||||||
interval=:interval,
|
|
||||||
factor=:factor,
|
|
||||||
reps=:reps,
|
|
||||||
streak=:streak,
|
|
||||||
lapses=:lapses,
|
|
||||||
grade=:grade,
|
|
||||||
cycles=:cycles
|
|
||||||
where id=:id""", self.__dict__)
|
|
||||||
|
|
||||||
mapper(Card, cardsTable, properties={
|
|
||||||
'cardModel': relation(CardModel),
|
|
||||||
'fact': relation(Fact, backref="cards", primaryjoin=
|
|
||||||
cardsTable.c.factId == factsTable.c.id),
|
|
||||||
})
|
|
||||||
|
|
|
||||||
|
|
@ -14,10 +14,52 @@ NEW_CARDS_RANDOM = 0
|
||||||
NEW_CARDS_DUE = 1
|
NEW_CARDS_DUE = 1
|
||||||
|
|
||||||
# sort order for day's new cards
|
# sort order for day's new cards
|
||||||
NEW_TODAY_ORDINAL = 0
|
NEW_TODAY_ORD = 0
|
||||||
NEW_TODAY_DUE = 1
|
NEW_TODAY_DUE = 1
|
||||||
|
|
||||||
# review card sort order
|
# review card sort order
|
||||||
REV_CARDS_OLD_FIRST = 0
|
REV_CARDS_OLD_FIRST = 0
|
||||||
REV_CARDS_NEW_FIRST = 1
|
REV_CARDS_NEW_FIRST = 1
|
||||||
REV_CARDS_RANDOM = 2
|
REV_CARDS_RANDOM = 2
|
||||||
|
|
||||||
|
# Labels
|
||||||
|
##########################################################################
|
||||||
|
|
||||||
|
def newCardOrderLabels():
|
||||||
|
return {
|
||||||
|
0: _("Add new cards in random order"),
|
||||||
|
1: _("Add new cards to end of queue"),
|
||||||
|
}
|
||||||
|
|
||||||
|
def newCardSchedulingLabels():
|
||||||
|
return {
|
||||||
|
0: _("Spread new cards out through reviews"),
|
||||||
|
1: _("Show new cards after all other cards"),
|
||||||
|
2: _("Show new cards before reviews"),
|
||||||
|
}
|
||||||
|
|
||||||
|
# FIXME: order due is not very useful anymore
|
||||||
|
def revCardOrderLabels():
|
||||||
|
return {
|
||||||
|
0: _("Review cards from largest interval"),
|
||||||
|
1: _("Review cards from smallest interval"),
|
||||||
|
2: _("Review cards in order due"),
|
||||||
|
3: _("Review cards in random order"),
|
||||||
|
}
|
||||||
|
|
||||||
|
def failedCardOptionLabels():
|
||||||
|
return {
|
||||||
|
0: _("Show failed cards soon"),
|
||||||
|
1: _("Show failed cards at end"),
|
||||||
|
2: _("Show failed cards in 10 minutes"),
|
||||||
|
3: _("Show failed cards in 8 hours"),
|
||||||
|
4: _("Show failed cards in 3 days"),
|
||||||
|
5: _("Custom failed cards handling"),
|
||||||
|
}
|
||||||
|
|
||||||
|
def alignmentLabels():
|
||||||
|
return {
|
||||||
|
0: _("Center"),
|
||||||
|
1: _("Left"),
|
||||||
|
2: _("Right"),
|
||||||
|
}
|
||||||
|
|
|
||||||
175
anki/db.py
175
anki/db.py
|
|
@ -2,18 +2,6 @@
|
||||||
# Copyright: Damien Elmes <anki@ichi2.net>
|
# Copyright: Damien Elmes <anki@ichi2.net>
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
"""\
|
|
||||||
DB tools
|
|
||||||
====================
|
|
||||||
|
|
||||||
SessionHelper is a wrapper for the standard sqlalchemy session, which provides
|
|
||||||
some convenience routines, and manages transactions itself.
|
|
||||||
|
|
||||||
object_session() is a replacement for the standard object_session(), which
|
|
||||||
provides the features of SessionHelper, and avoids taking out another
|
|
||||||
transaction.
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from pysqlite2 import dbapi2 as sqlite
|
from pysqlite2 import dbapi2 as sqlite
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
@ -22,128 +10,63 @@ except ImportError:
|
||||||
except:
|
except:
|
||||||
raise Exception("Please install pysqlite2 or python2.5")
|
raise Exception("Please install pysqlite2 or python2.5")
|
||||||
|
|
||||||
from sqlalchemy import (Table, Integer, Float, Column, MetaData,
|
|
||||||
ForeignKey, Boolean, String, Date,
|
|
||||||
UniqueConstraint, Index, PrimaryKeyConstraint)
|
|
||||||
from sqlalchemy import create_engine
|
|
||||||
from sqlalchemy.orm import mapper, sessionmaker as _sessionmaker, relation, backref, \
|
|
||||||
object_session as _object_session, class_mapper
|
|
||||||
from sqlalchemy.sql import select, text, and_
|
|
||||||
from sqlalchemy.exceptions import DBAPIError, OperationalError
|
|
||||||
from sqlalchemy.pool import NullPool
|
|
||||||
import sqlalchemy
|
|
||||||
|
|
||||||
# some users are still on 0.4.x..
|
|
||||||
import warnings
|
|
||||||
warnings.filterwarnings('ignore', 'Use session.add()')
|
|
||||||
warnings.filterwarnings('ignore', 'Use session.expunge_all()')
|
|
||||||
|
|
||||||
# sqlalchemy didn't handle the move to unicodetext nicely
|
|
||||||
try:
|
|
||||||
from sqlalchemy import UnicodeText
|
|
||||||
except ImportError:
|
|
||||||
from sqlalchemy import Unicode
|
|
||||||
UnicodeText = Unicode
|
|
||||||
|
|
||||||
from anki.hooks import runHook
|
from anki.hooks import runHook
|
||||||
|
#FIXME: do we need the dbFinished hook?
|
||||||
|
|
||||||
# shared metadata
|
class DB(object):
|
||||||
metadata = MetaData()
|
def __init__(self, path, level="EXCLUSIVE"):
|
||||||
|
self._db = sqlite.connect(
|
||||||
|
path, timeout=0, isolation_level=level)
|
||||||
|
self._path = path
|
||||||
|
self.echo = False
|
||||||
|
|
||||||
# this class assumes the provided session is called with transactional=False
|
def execute(self, sql, *a, **ka):
|
||||||
class SessionHelper(object):
|
if self.echo:
|
||||||
"Add some convenience routines to a session."
|
print sql, a, ka
|
||||||
|
if ka:
|
||||||
def __init__(self, session, lock=True, transaction=True):
|
# execute("...where id = :id", id=5)
|
||||||
self._session = session
|
res = self._db.execute(sql, ka)
|
||||||
self._lock = lock
|
|
||||||
self._transaction = transaction
|
|
||||||
if self._transaction:
|
|
||||||
self._session.begin()
|
|
||||||
if self._lock:
|
|
||||||
self._lockDB()
|
|
||||||
self._seen = True
|
|
||||||
|
|
||||||
def save(self, obj):
|
|
||||||
# compat
|
|
||||||
if sqlalchemy.__version__.startswith("0.4."):
|
|
||||||
self._session.save(obj)
|
|
||||||
else:
|
else:
|
||||||
self._session.add(obj)
|
# execute("...where id = ?", 5)
|
||||||
|
res = self._db.execute(sql, a)
|
||||||
def expunge_all(self):
|
|
||||||
# compat
|
|
||||||
if sqlalchemy.__version__.startswith("0.4."):
|
|
||||||
self._session.clear()
|
|
||||||
else:
|
|
||||||
self._session.expunge_all()
|
|
||||||
|
|
||||||
def update(self, obj):
|
|
||||||
# compat
|
|
||||||
if sqlalchemy.__version__.startswith("0.4."):
|
|
||||||
self._session.update(obj)
|
|
||||||
else:
|
|
||||||
self._session.add(obj)
|
|
||||||
|
|
||||||
def execute(self, *a, **ka):
|
|
||||||
x = self._session.execute(*a, **ka)
|
|
||||||
runHook("dbFinished")
|
runHook("dbFinished")
|
||||||
return x
|
return res
|
||||||
|
|
||||||
def __getattr__(self, k):
|
def executemany(self, sql, l):
|
||||||
return getattr(self.__dict__['_session'], k)
|
if self.echo:
|
||||||
|
print sql, l
|
||||||
def scalar(self, sql, **args):
|
self._db.executemany(sql, l)
|
||||||
return self.execute(text(sql), args).scalar()
|
runHook("dbFinished")
|
||||||
|
|
||||||
def all(self, sql, **args):
|
|
||||||
return self.execute(text(sql), args).fetchall()
|
|
||||||
|
|
||||||
def first(self, sql, **args):
|
|
||||||
c = self.execute(text(sql), args)
|
|
||||||
r = c.fetchone()
|
|
||||||
c.close()
|
|
||||||
return r
|
|
||||||
|
|
||||||
def column0(self, sql, **args):
|
|
||||||
return [x[0] for x in self.execute(text(sql), args).fetchall()]
|
|
||||||
|
|
||||||
def statement(self, sql, **kwargs):
|
|
||||||
"Execute a statement without returning any results. Flush first."
|
|
||||||
return self.execute(text(sql), kwargs)
|
|
||||||
|
|
||||||
def statements(self, sql, data):
|
|
||||||
"Execute a statement across data. Flush first."
|
|
||||||
return self.execute(text(sql), data)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return repr(self._session)
|
|
||||||
|
|
||||||
def commit(self):
|
def commit(self):
|
||||||
self._session.commit()
|
self._db.commit()
|
||||||
if self._transaction:
|
|
||||||
self._session.begin()
|
|
||||||
if self._lock:
|
|
||||||
self._lockDB()
|
|
||||||
|
|
||||||
def _lockDB(self):
|
def scalar(self, *a, **kw):
|
||||||
"Take out a write lock."
|
res = self.execute(*a, **kw).fetchone()
|
||||||
self._session.execute("pragma locking_mode = exclusive")
|
if res:
|
||||||
self._session.execute(text("update deck set modified=modified"))
|
return res[0]
|
||||||
|
return None
|
||||||
|
|
||||||
def object_session(*args):
|
def all(self, *a, **kw):
|
||||||
s = _object_session(*args)
|
return self.execute(*a, **kw).fetchall()
|
||||||
if s:
|
|
||||||
return SessionHelper(s, lock=False, transaction=False)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def sessionmaker(*args, **kwargs):
|
def first(self, *a, **kw):
|
||||||
if sqlalchemy.__version__ < "0.5":
|
c = self.execute(*a, **kw)
|
||||||
if 'autocommit' in kwargs:
|
res = c.fetchone()
|
||||||
kwargs['transactional'] = not kwargs['autocommit']
|
c.close()
|
||||||
del kwargs['autocommit']
|
return res
|
||||||
else:
|
|
||||||
if 'transactional' in kwargs:
|
def list(self, *a, **kw):
|
||||||
kwargs['autocommit'] = not kwargs['transactional']
|
return [x[0] for x in self.execute(*a, **kw)]
|
||||||
del kwargs['transactional']
|
|
||||||
return _sessionmaker(*args, **kwargs)
|
def executescript(self, sql):
|
||||||
|
if self.echo:
|
||||||
|
print sql
|
||||||
|
self._db.executescript(sql)
|
||||||
|
runHook("dbFinished")
|
||||||
|
|
||||||
|
def rollback(self):
|
||||||
|
self._db.rollback()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self._db.close()
|
||||||
|
|
|
||||||
1511
anki/deck.py
1511
anki/deck.py
File diff suppressed because it is too large
Load diff
|
|
@ -2,42 +2,12 @@
|
||||||
# Copyright: Damien Elmes <anki@ichi2.net>
|
# Copyright: Damien Elmes <anki@ichi2.net>
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
class Error(Exception):
|
class AnkiError(Exception):
|
||||||
def __init__(self, message="", **data):
|
def __init__(self, type, **data):
|
||||||
|
self.type = type
|
||||||
self.data = data
|
self.data = data
|
||||||
self._message = message
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
m = self._message
|
m = self.type
|
||||||
if self.data:
|
if self.data:
|
||||||
m += ": %s" % repr(self.data)
|
m += ": %s" % repr(self.data)
|
||||||
return m
|
return m
|
||||||
|
|
||||||
class DeckAccessError(Error):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ImportFileError(Error):
|
|
||||||
"Unable to load file to import from."
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ImportFormatError(Error):
|
|
||||||
"Unable to determine pattern in text file."
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ImportEncodingError(Error):
|
|
||||||
"The file was not in utf-8."
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ExportFileError(Error):
|
|
||||||
"Unable to save file."
|
|
||||||
pass
|
|
||||||
|
|
||||||
class SyncError(Error):
|
|
||||||
"A problem occurred during syncing."
|
|
||||||
pass
|
|
||||||
|
|
||||||
# facts, models
|
|
||||||
class FactInvalidError(Error):
|
|
||||||
"""A fact was invalid/not unique according to the model.
|
|
||||||
'field' defines the problem field.
|
|
||||||
'type' defines the type of error ('fieldEmpty', 'fieldNotUnique')"""
|
|
||||||
pass
|
|
||||||
|
|
|
||||||
|
|
@ -8,9 +8,8 @@ from anki import DeckStorage
|
||||||
from anki.cards import Card
|
from anki.cards import Card
|
||||||
from anki.sync import SyncClient, SyncServer, copyLocalMedia
|
from anki.sync import SyncClient, SyncServer, copyLocalMedia
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
from anki.utils import findTag, parseTags, stripHTML, ids2str
|
from anki.utils import parseTags, stripHTML, ids2str
|
||||||
from anki.tags import tagIds
|
from anki.tags import tagIds
|
||||||
from anki.db import *
|
|
||||||
|
|
||||||
class Exporter(object):
|
class Exporter(object):
|
||||||
def __init__(self, deck):
|
def __init__(self, deck):
|
||||||
|
|
|
||||||
238
anki/facts.py
238
anki/facts.py
|
|
@ -3,166 +3,126 @@
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from anki.db import *
|
from anki.errors import AnkiError
|
||||||
from anki.errors import *
|
|
||||||
from anki.models import Model, FieldModel, fieldModelsTable
|
|
||||||
from anki.utils import genID, stripHTMLMedia, fieldChecksum, intTime, \
|
from anki.utils import genID, stripHTMLMedia, fieldChecksum, intTime, \
|
||||||
addTags, deleteTags, parseTags
|
addTags, deleteTags, parseTags
|
||||||
from anki.hooks import runHook
|
|
||||||
|
|
||||||
# Fields in a fact
|
|
||||||
##########################################################################
|
|
||||||
|
|
||||||
fieldsTable = Table(
|
|
||||||
'fields', metadata,
|
|
||||||
Column('id', Integer, primary_key=True),
|
|
||||||
Column('factId', Integer, ForeignKey("facts.id"), nullable=False),
|
|
||||||
Column('fieldModelId', Integer, ForeignKey("fieldModels.id"),
|
|
||||||
nullable=False),
|
|
||||||
Column('ordinal', Integer, nullable=False),
|
|
||||||
Column('value', UnicodeText, nullable=False),
|
|
||||||
Column('chksum', String, nullable=False, default=""))
|
|
||||||
|
|
||||||
class Field(object):
|
|
||||||
"A field in a fact."
|
|
||||||
|
|
||||||
def __init__(self, fieldModel=None):
|
|
||||||
if fieldModel:
|
|
||||||
self.fieldModel = fieldModel
|
|
||||||
self.ordinal = fieldModel.ordinal
|
|
||||||
self.value = u""
|
|
||||||
self.id = genID()
|
|
||||||
|
|
||||||
def getName(self):
|
|
||||||
return self.fieldModel.name
|
|
||||||
name = property(getName)
|
|
||||||
|
|
||||||
mapper(Field, fieldsTable, properties={
|
|
||||||
'fieldModel': relation(FieldModel)
|
|
||||||
})
|
|
||||||
|
|
||||||
# Facts: a set of fields and a model
|
|
||||||
##########################################################################
|
|
||||||
|
|
||||||
# Pos: incrementing number defining add order. There may be duplicates if
|
|
||||||
# content is added on two sync locations at once. Importing adds to end.
|
|
||||||
# Cache: a HTML-stripped amalgam of the field contents, so we can perform
|
|
||||||
# searches of marked up text in a reasonable time.
|
|
||||||
|
|
||||||
factsTable = Table(
|
|
||||||
'facts', metadata,
|
|
||||||
Column('id', Integer, primary_key=True),
|
|
||||||
Column('modelId', Integer, ForeignKey("models.id"), nullable=False),
|
|
||||||
Column('pos', Integer, nullable=False),
|
|
||||||
Column('modified', Integer, nullable=False, default=intTime),
|
|
||||||
Column('tags', UnicodeText, nullable=False, default=u""),
|
|
||||||
Column('cache', UnicodeText, nullable=False, default=u""))
|
|
||||||
|
|
||||||
class Fact(object):
|
class Fact(object):
|
||||||
"A single fact. Fields exposed as dict interface."
|
|
||||||
|
|
||||||
def __init__(self, model=None, pos=None):
|
def __init__(self, deck, model=None, id=None):
|
||||||
self.model = model
|
assert not (model and id)
|
||||||
self.id = genID()
|
self.deck = deck
|
||||||
self._tags = u""
|
if id:
|
||||||
if model:
|
self.id = id
|
||||||
# creating
|
self.load()
|
||||||
for fm in model.fieldModels:
|
else:
|
||||||
self.fields.append(Field(fm))
|
self.id = genID()
|
||||||
self.pos = pos
|
self.model = model
|
||||||
self.new = True
|
self.mid = model.id
|
||||||
|
self.mod = intTime()
|
||||||
|
self.tags = ""
|
||||||
|
self.cache = ""
|
||||||
|
self._fields = [""] * len(self.model.fields)
|
||||||
|
self._fmap = self.model.fieldMap()
|
||||||
|
|
||||||
def isNew(self):
|
def load(self):
|
||||||
return getattr(self, 'new', False)
|
(self.mid,
|
||||||
|
self.mod,
|
||||||
|
self.pos,
|
||||||
|
self.tags) = self.deck.db.first("""
|
||||||
|
select mid, mod, pos, tags from facts where id = ?""", self.id)
|
||||||
|
self._fields = self.deck.db.list("""
|
||||||
|
select value from fdata where fid = ? order by ordinal""", self.id)
|
||||||
|
self.model = self.deck.getModel(self.mid)
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
self.mod = intTime()
|
||||||
|
# facts table
|
||||||
|
self.cache = stripHTMLMedia(u" ".join(self._fields))
|
||||||
|
self.deck.db.execute("""
|
||||||
|
insert or replace into facts values (?, ?, ?, ?, ?, ?)""",
|
||||||
|
self.id, self.mid, self.mod,
|
||||||
|
self.pos, self.tags, self.cache)
|
||||||
|
# fdata table
|
||||||
|
self.deck.db.execute("delete from fdata where fid = ?", self.id)
|
||||||
|
d = []
|
||||||
|
for (fmid, ord, conf) in self._fmap.values():
|
||||||
|
val = self._fields[ord]
|
||||||
|
d.append(dict(fid=self.id, fmid=fmid, ord=ord,
|
||||||
|
val=val))
|
||||||
|
self.deck.db.executemany("""
|
||||||
|
insert into fdata values (:fid, :fmid, :ord, :val, '')""", d)
|
||||||
|
# media and caches
|
||||||
|
self.deck.updateCache([self.id], "fact")
|
||||||
|
|
||||||
|
def cards(self):
|
||||||
|
return [self.deck.getCard(id) for id in self.deck.db.list(
|
||||||
|
"select id from cards where fid = ? order by ord", self.id)]
|
||||||
|
|
||||||
|
# Dict interface
|
||||||
|
##################################################
|
||||||
|
|
||||||
def keys(self):
|
def keys(self):
|
||||||
return [field.name for field in self.fields]
|
return self._fmap.keys()
|
||||||
|
|
||||||
def values(self):
|
def values(self):
|
||||||
return [field.value for field in self.fields]
|
return self._fields
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def items(self):
|
||||||
|
return [(k, self._fields[v])
|
||||||
|
for (k, v) in self._fmap.items()]
|
||||||
|
|
||||||
|
def _fieldOrd(self, key):
|
||||||
try:
|
try:
|
||||||
return [f.value for f in self.fields if f.name == key][0]
|
return self._fmap[key][1]
|
||||||
except IndexError:
|
except:
|
||||||
raise KeyError(key)
|
raise KeyError(key)
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __getitem__(self, key):
|
||||||
try:
|
return self._fields[self._fieldOrd(key)]
|
||||||
item = [f for f in self.fields if f.name == key][0]
|
|
||||||
except IndexError:
|
|
||||||
raise KeyError
|
|
||||||
item.value = value
|
|
||||||
if item.fieldModel.unique:
|
|
||||||
item.chksum = fieldChecksum(value)
|
|
||||||
else:
|
|
||||||
item.chksum = ""
|
|
||||||
|
|
||||||
def get(self, key, default):
|
def __setitem__(self, key, value):
|
||||||
try:
|
self._fields[self._fieldOrd(key)] = value
|
||||||
return self[key]
|
|
||||||
except (IndexError, KeyError):
|
def fieldsWithIds(self):
|
||||||
return default
|
return dict(
|
||||||
|
[(k, (v[0], self[k])) for (k,v) in self._fmap.items()])
|
||||||
|
|
||||||
|
# Tags
|
||||||
|
##################################################
|
||||||
|
|
||||||
def addTags(self, tags):
|
def addTags(self, tags):
|
||||||
self._tags = addTags(tags, self._tags)
|
self.tags = addTags(tags, self.tags)
|
||||||
|
|
||||||
def deleteTags(self, tags):
|
def deleteTags(self, tags):
|
||||||
self._tags = deleteTags(tags, self._tags)
|
self.tags = deleteTags(tags, self.tags)
|
||||||
|
|
||||||
def tags(self):
|
# Unique/duplicate checks
|
||||||
return parseTags(self._tags)
|
##################################################
|
||||||
|
|
||||||
def assertValid(self):
|
def fieldUnique(self, name):
|
||||||
"Raise an error if required fields are empty."
|
(fmid, ord, conf) = self._fmap[name]
|
||||||
for field in self.fields:
|
if not conf['unique']:
|
||||||
if not self.fieldValid(field):
|
|
||||||
raise FactInvalidError(type="fieldEmpty",
|
|
||||||
field=field.name)
|
|
||||||
|
|
||||||
def fieldValid(self, field):
|
|
||||||
return not (field.fieldModel.required and not field.value.strip())
|
|
||||||
|
|
||||||
def assertUnique(self, s):
|
|
||||||
"Raise an error if duplicate fields are found."
|
|
||||||
for field in self.fields:
|
|
||||||
if not self.fieldUnique(field, s):
|
|
||||||
raise FactInvalidError(type="fieldNotUnique",
|
|
||||||
field=field.name)
|
|
||||||
|
|
||||||
def fieldUnique(self, field, s):
|
|
||||||
if not field.fieldModel.unique:
|
|
||||||
return True
|
return True
|
||||||
req = ("select value from fields "
|
val = self[name]
|
||||||
"where fieldModelId = :fmid and value = :val and chksum = :chk")
|
csum = fieldChecksum(val)
|
||||||
if field.id:
|
return not self.deck.db.scalar(
|
||||||
req += " and id != %s" % field.id
|
"select 1 from fdata where csum = ? and fid != ? and val = ?",
|
||||||
return not s.scalar(req, val=field.value, fmid=field.fieldModel.id,
|
csum, self.id, val)
|
||||||
chk=fieldChecksum(field.value))
|
|
||||||
|
|
||||||
def focusLost(self, field):
|
def fieldComplete(self, name, text=None):
|
||||||
runHook('fact.focusLost', self, field)
|
(fmid, ord, conf) = self._fmap[name]
|
||||||
|
if not conf['required']:
|
||||||
|
return True
|
||||||
|
return self[name]
|
||||||
|
|
||||||
def setModified(self, textChanged=False, deck=None, media=True):
|
def problems(self):
|
||||||
"Mark modified and update cards."
|
d = []
|
||||||
self.modified = intTime()
|
for k in self._fmap.keys():
|
||||||
if textChanged:
|
if not self.fieldUnique(k):
|
||||||
if not deck:
|
d.append("unique")
|
||||||
# FIXME: compat code
|
elif not self.fieldComplete(k):
|
||||||
import ankiqt
|
d.append("required")
|
||||||
if not getattr(ankiqt, 'setModWarningShown', None):
|
else:
|
||||||
import sys; sys.stderr.write(
|
d.append(None)
|
||||||
"plugin needs to pass deck to fact.setModified()")
|
return d
|
||||||
ankiqt.setModWarningShown = True
|
|
||||||
deck = ankiqt.mw.deck
|
|
||||||
assert deck
|
|
||||||
self.cache = stripHTMLMedia(u" ".join(
|
|
||||||
self.values()))
|
|
||||||
for card in self.cards:
|
|
||||||
card.rebuildQA(deck)
|
|
||||||
|
|
||||||
mapper(Fact, factsTable, properties={
|
|
||||||
'model': relation(Model),
|
|
||||||
'fields': relation(Field, backref="fact", order_by=Field.ordinal),
|
|
||||||
'_tags': factsTable.c.tags
|
|
||||||
})
|
|
||||||
|
|
|
||||||
86
anki/find.py
86
anki/find.py
|
|
@ -24,7 +24,7 @@ SEARCH_PHRASE_WB = 9
|
||||||
|
|
||||||
def findCards(deck, query):
|
def findCards(deck, query):
|
||||||
(q, cmquery, showdistinct, filters, args) = findCardsWhere(deck, query)
|
(q, cmquery, showdistinct, filters, args) = findCardsWhere(deck, query)
|
||||||
(factIdList, cardIdList) = findCardsMatchingFilters(deck, filters)
|
(fidList, cardIdList) = findCardsMatchingFilters(deck, filters)
|
||||||
query = "select id from cards"
|
query = "select id from cards"
|
||||||
hasWhere = False
|
hasWhere = False
|
||||||
if q:
|
if q:
|
||||||
|
|
@ -36,18 +36,18 @@ def findCards(deck, query):
|
||||||
hasWhere = True
|
hasWhere = True
|
||||||
else: query += " and "
|
else: query += " and "
|
||||||
if cmquery['pos']:
|
if cmquery['pos']:
|
||||||
query += (" factId in(select distinct factId from cards "+
|
query += (" fid in(select distinct fid from cards "+
|
||||||
"where id in (" + cmquery['pos'] + ")) ")
|
"where id in (" + cmquery['pos'] + ")) ")
|
||||||
query += " and id in(" + cmquery['pos'] + ") "
|
query += " and id in(" + cmquery['pos'] + ") "
|
||||||
if cmquery['neg']:
|
if cmquery['neg']:
|
||||||
query += (" factId not in(select distinct factId from "+
|
query += (" fid not in(select distinct fid from "+
|
||||||
"cards where id in (" + cmquery['neg'] + ")) ")
|
"cards where id in (" + cmquery['neg'] + ")) ")
|
||||||
if factIdList is not None:
|
if fidList is not None:
|
||||||
if hasWhere is False:
|
if hasWhere is False:
|
||||||
query += " where "
|
query += " where "
|
||||||
hasWhere = True
|
hasWhere = True
|
||||||
else: query += " and "
|
else: query += " and "
|
||||||
query += " factId IN %s" % ids2str(factIdList)
|
query += " fid IN %s" % ids2str(fidList)
|
||||||
if cardIdList is not None:
|
if cardIdList is not None:
|
||||||
if hasWhere is False:
|
if hasWhere is False:
|
||||||
query += " where "
|
query += " where "
|
||||||
|
|
@ -55,9 +55,9 @@ def findCards(deck, query):
|
||||||
else: query += " and "
|
else: query += " and "
|
||||||
query += " id IN %s" % ids2str(cardIdList)
|
query += " id IN %s" % ids2str(cardIdList)
|
||||||
if showdistinct:
|
if showdistinct:
|
||||||
query += " group by factId"
|
query += " group by fid"
|
||||||
#print query, args
|
#print query, args
|
||||||
return deck.db.column0(query, **args)
|
return deck.db.list(query, **args)
|
||||||
|
|
||||||
def findCardsWhere(deck, query):
|
def findCardsWhere(deck, query):
|
||||||
(tquery, fquery, qquery, fidquery, cmquery, sfquery, qaquery,
|
(tquery, fquery, qquery, fidquery, cmquery, sfquery, qaquery,
|
||||||
|
|
@ -65,15 +65,15 @@ def findCardsWhere(deck, query):
|
||||||
q = ""
|
q = ""
|
||||||
x = []
|
x = []
|
||||||
if tquery:
|
if tquery:
|
||||||
x.append(" factId in (%s)" % tquery)
|
x.append(" fid in (%s)" % tquery)
|
||||||
if fquery:
|
if fquery:
|
||||||
x.append(" factId in (%s)" % fquery)
|
x.append(" fid in (%s)" % fquery)
|
||||||
if qquery:
|
if qquery:
|
||||||
x.append(" id in (%s)" % qquery)
|
x.append(" id in (%s)" % qquery)
|
||||||
if fidquery:
|
if fidquery:
|
||||||
x.append(" id in (%s)" % fidquery)
|
x.append(" id in (%s)" % fidquery)
|
||||||
if sfquery:
|
if sfquery:
|
||||||
x.append(" factId in (%s)" % sfquery)
|
x.append(" fid in (%s)" % sfquery)
|
||||||
if qaquery:
|
if qaquery:
|
||||||
x.append(" id in (%s)" % qaquery)
|
x.append(" id in (%s)" % qaquery)
|
||||||
if x:
|
if x:
|
||||||
|
|
@ -83,7 +83,7 @@ def findCardsWhere(deck, query):
|
||||||
def allFMFields(deck, tolower=False):
|
def allFMFields(deck, tolower=False):
|
||||||
fields = []
|
fields = []
|
||||||
try:
|
try:
|
||||||
fields = deck.db.column0(
|
fields = deck.db.list(
|
||||||
"select distinct name from fieldmodels order by name")
|
"select distinct name from fieldmodels order by name")
|
||||||
except:
|
except:
|
||||||
fields = []
|
fields = []
|
||||||
|
|
@ -269,17 +269,17 @@ def findCardsMatchingFilters(deck, filters):
|
||||||
if fquery:
|
if fquery:
|
||||||
if filter['is_neg']: fquery += " except "
|
if filter['is_neg']: fquery += " except "
|
||||||
else: fquery += " intersect "
|
else: fquery += " intersect "
|
||||||
elif filter['is_neg']: fquery += "select id from fields except "
|
elif filter['is_neg']: fquery += "select id from fdata except "
|
||||||
|
|
||||||
value = filter['value'].replace("*", "%")
|
value = filter['value'].replace("*", "%")
|
||||||
args["_ff_%d" % c] = "%"+value+"%"
|
args["_ff_%d" % c] = "%"+value+"%"
|
||||||
|
|
||||||
fquery += (
|
fquery += (
|
||||||
"select id from fields where value like "+
|
"select id from fdata where value like "+
|
||||||
":_ff_%d escape '\\'" % c)
|
":_ff_%d escape '\\'" % c)
|
||||||
|
|
||||||
rows = deck.db.execute(
|
rows = deck.db.execute(
|
||||||
'select factId, value from fields where id in (' +
|
'select fid, value from fdata where id in (' +
|
||||||
fquery + ')', args)
|
fquery + ')', args)
|
||||||
while (1):
|
while (1):
|
||||||
row = rows.fetchone()
|
row = rows.fetchone()
|
||||||
|
|
@ -300,21 +300,21 @@ def findCardsMatchingFilters(deck, filters):
|
||||||
if sfquery:
|
if sfquery:
|
||||||
if filter['is_neg']: sfquery += " except "
|
if filter['is_neg']: sfquery += " except "
|
||||||
else: sfquery += " intersect "
|
else: sfquery += " intersect "
|
||||||
elif filter['is_neg']: sfquery += "select id from fields except "
|
elif filter['is_neg']: sfquery += "select id from fdata except "
|
||||||
field = field.replace("*", "%")
|
field = field.replace("*", "%")
|
||||||
value = filter['value'].replace("*", "%")
|
value = filter['value'].replace("*", "%")
|
||||||
args["_ff_%d" % c] = "%"+value+"%"
|
args["_ff_%d" % c] = "%"+value+"%"
|
||||||
|
|
||||||
ids = deck.db.column0(
|
ids = deck.db.list(
|
||||||
"select id from fieldmodels where name like "+
|
"select id from fieldmodels where name like "+
|
||||||
":field escape '\\'", field=field)
|
":field escape '\\'", field=field)
|
||||||
sfquery += ("select id from fields where "+
|
sfquery += ("select id from fdata where "+
|
||||||
"fieldModelId in %s and value like "+
|
"fmid in %s and value like "+
|
||||||
":_ff_%d escape '\\'") % (ids2str(ids), c)
|
":_ff_%d escape '\\'") % (ids2str(ids), c)
|
||||||
|
|
||||||
rows = deck.db.execute(
|
rows = deck.db.execute(
|
||||||
'select f.factId, f.value, fm.name from fields as f '+
|
'select f.fid, f.value, fm.name from fdata as f '+
|
||||||
'left join fieldmodels as fm ON (f.fieldModelId = '+
|
'left join fieldmodels as fm ON (f.fmid = '+
|
||||||
'fm.id) where f.id in (' + sfquery + ')', args)
|
'fm.id) where f.id in (' + sfquery + ')', args)
|
||||||
while (1):
|
while (1):
|
||||||
row = rows.fetchone()
|
row = rows.fetchone()
|
||||||
|
|
@ -364,18 +364,18 @@ def findCardsMatchingFilters(deck, filters):
|
||||||
(filter['is_neg'] is True and res is None)):
|
(filter['is_neg'] is True and res is None)):
|
||||||
cardFilterMatches.append(row[0])
|
cardFilterMatches.append(row[0])
|
||||||
|
|
||||||
factIds = None
|
fids = None
|
||||||
if len(factFilters) > 0 or len(fieldFilters) > 0:
|
if len(factFilters) > 0 or len(fieldFilters) > 0:
|
||||||
factIds = []
|
fids = []
|
||||||
factIds.extend(factFilterMatches)
|
fids.extend(factFilterMatches)
|
||||||
factIds.extend(fieldFilterMatches)
|
fids.extend(fieldFilterMatches)
|
||||||
|
|
||||||
cardIds = None
|
cardIds = None
|
||||||
if len(cardFilters) > 0:
|
if len(cardFilters) > 0:
|
||||||
cardIds = []
|
cardIds = []
|
||||||
cardIds.extend(cardFilterMatches)
|
cardIds.extend(cardFilterMatches)
|
||||||
|
|
||||||
return (factIds, cardIds)
|
return (fids, cardIds)
|
||||||
|
|
||||||
def _findCards(deck, query):
|
def _findCards(deck, query):
|
||||||
"Find facts matching QUERY."
|
"Find facts matching QUERY."
|
||||||
|
|
@ -400,7 +400,7 @@ def _findCards(deck, query):
|
||||||
tquery += "select id from facts except "
|
tquery += "select id from facts except "
|
||||||
if token == "none":
|
if token == "none":
|
||||||
tquery += """
|
tquery += """
|
||||||
select cards.id from cards, facts where facts.tags = '' and cards.factId = facts.id """
|
select cards.id from cards, facts where facts.tags = '' and cards.fid = facts.id """
|
||||||
else:
|
else:
|
||||||
token = token.replace("*", "%")
|
token = token.replace("*", "%")
|
||||||
if not token.startswith("%"):
|
if not token.startswith("%"):
|
||||||
|
|
@ -450,11 +450,11 @@ select id from facts where tags like :_tag_%d""" % c
|
||||||
fidquery += " intersect "
|
fidquery += " intersect "
|
||||||
elif isNeg:
|
elif isNeg:
|
||||||
fidquery += "select id from cards except "
|
fidquery += "select id from cards except "
|
||||||
fidquery += "select id from cards where factId in (%s)" % token
|
fidquery += "select id from cards where fid in (%s)" % token
|
||||||
elif type == SEARCH_CARD:
|
elif type == SEARCH_CARD:
|
||||||
print "search_card broken"
|
print "search_card broken"
|
||||||
token = token.replace("*", "%")
|
token = token.replace("*", "%")
|
||||||
ids = deck.db.column0("""
|
ids = deck.db.list("""
|
||||||
select id from tags where name like :tag escape '\\'""", tag=token)
|
select id from tags where name like :tag escape '\\'""", tag=token)
|
||||||
if isNeg:
|
if isNeg:
|
||||||
if cmquery['neg']:
|
if cmquery['neg']:
|
||||||
|
|
@ -493,10 +493,10 @@ select cardId from cardTags where src = 2 and cardTags.tagId in %s""" % ids2str(
|
||||||
field = field.replace("*", "%")
|
field = field.replace("*", "%")
|
||||||
value = value.replace("*", "%")
|
value = value.replace("*", "%")
|
||||||
args["_ff_%d" % c] = "%"+value+"%"
|
args["_ff_%d" % c] = "%"+value+"%"
|
||||||
ids = deck.db.column0("""
|
ids = deck.db.list("""
|
||||||
select id from fieldmodels where name like :field escape '\\'""", field=field)
|
select id from fieldmodels where name like :field escape '\\'""", field=field)
|
||||||
sfquery += """
|
sfquery += """
|
||||||
select factId from fields where fieldModelId in %s and
|
select fid from fdata where fmid in %s and
|
||||||
value like :_ff_%d escape '\\'""" % (ids2str(ids), c)
|
value like :_ff_%d escape '\\'""" % (ids2str(ids), c)
|
||||||
elif type == SEARCH_QA:
|
elif type == SEARCH_QA:
|
||||||
field = value = ''
|
field = value = ''
|
||||||
|
|
@ -555,17 +555,17 @@ select id from facts where cache like :_ff_%d escape '\\'""" % c
|
||||||
# Find and replace
|
# Find and replace
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def findReplace(deck, factIds, src, dst, isRe=False, field=None):
|
def findReplace(deck, fids, src, dst, isRe=False, field=None):
|
||||||
"Find and replace fields in a fact."
|
"Find and replace fields in a fact."
|
||||||
# find
|
# find
|
||||||
s = "select id, factId, value from fields where factId in %s"
|
s = "select id, fid, value from fdata where fid in %s"
|
||||||
if isRe:
|
if isRe:
|
||||||
isRe = re.compile(src)
|
isRe = re.compile(src)
|
||||||
else:
|
else:
|
||||||
s += " and value like :v"
|
s += " and value like :v"
|
||||||
if field:
|
if field:
|
||||||
s += " and fieldModelId = :fmid"
|
s += " and fmid = :fmid"
|
||||||
rows = deck.db.all(s % ids2str(factIds),
|
rows = deck.db.all(s % ids2str(fids),
|
||||||
v="%"+src.replace("%", "%%")+"%",
|
v="%"+src.replace("%", "%%")+"%",
|
||||||
fmid=field)
|
fmid=field)
|
||||||
modded = []
|
modded = []
|
||||||
|
|
@ -581,8 +581,8 @@ def findReplace(deck, factIds, src, dst, isRe=False, field=None):
|
||||||
if val.find(src) != -1]
|
if val.find(src) != -1]
|
||||||
# update
|
# update
|
||||||
if modded:
|
if modded:
|
||||||
deck.db.statements(
|
deck.db.executemany(
|
||||||
'update fields set value = :val where id = :id', modded)
|
'update fdata set value = :val where id = :id', modded)
|
||||||
deck.updateCardQACacheFromIds([f['fid'] for f in modded],
|
deck.updateCardQACacheFromIds([f['fid'] for f in modded],
|
||||||
type="facts")
|
type="facts")
|
||||||
if field:
|
if field:
|
||||||
|
|
@ -596,7 +596,7 @@ def findReplace(deck, factIds, src, dst, isRe=False, field=None):
|
||||||
|
|
||||||
def findDuplicates(deck, fmids):
|
def findDuplicates(deck, fmids):
|
||||||
data = deck.db.all(
|
data = deck.db.all(
|
||||||
"select factId, value from fields where fieldModelId in %s" %
|
"select fid, value from fdata where fmid in %s" %
|
||||||
ids2str(fmids))
|
ids2str(fmids))
|
||||||
vals = {}
|
vals = {}
|
||||||
for (fid, val) in data:
|
for (fid, val) in data:
|
||||||
|
|
@ -657,7 +657,7 @@ def findSorted(deck, query, sortKey):
|
||||||
if sortKey == "fact":
|
if sortKey == "fact":
|
||||||
query = """
|
query = """
|
||||||
select cards.id from cards, facts
|
select cards.id from cards, facts
|
||||||
where cards.factId = facts.id """
|
where cards.fid = facts.id """
|
||||||
if ads:
|
if ads:
|
||||||
query += "and " + ads + " "
|
query += "and " + ads + " "
|
||||||
else:
|
else:
|
||||||
|
|
@ -668,20 +668,20 @@ where cards.factId = facts.id """
|
||||||
else:
|
else:
|
||||||
# field value
|
# field value
|
||||||
ret = self.deck.db.all(
|
ret = self.deck.db.all(
|
||||||
"select id, numeric from fieldModels where name = :name",
|
"select id, numeric from fields where name = :name",
|
||||||
name=sortKey[1])
|
name=sortKey[1])
|
||||||
fields = ",".join([str(x[0]) for x in ret])
|
fields = ",".join([str(x[0]) for x in ret])
|
||||||
# if multiple models have the same field, use the first numeric bool
|
# if multiple models have the same field, use the first numeric bool
|
||||||
numeric = ret[0][1]
|
numeric = ret[0][1]
|
||||||
if numeric:
|
if numeric:
|
||||||
order = "cast(fields.value as real)"
|
order = "cast(fdata.value as real)"
|
||||||
else:
|
else:
|
||||||
order = "fields.value collate nocase"
|
order = "fdata.value collate nocase"
|
||||||
if ads:
|
if ads:
|
||||||
ads = " and " + ads
|
ads = " and " + ads
|
||||||
query = ("select cards.id "
|
query = ("select cards.id "
|
||||||
"from fields, cards where fields.fieldModelId in (%s) "
|
"from fdata, cards where fdata.fmid in (%s) "
|
||||||
"and fields.factId = cards.factId" + ads +
|
"and fdata.fid = cards.fid" + ads +
|
||||||
" order by cards.ordinal, %s") % (fields, order)
|
" order by cards.ordinal, %s") % (fields, order)
|
||||||
# run the query
|
# run the query
|
||||||
self.cards = self.deck.db.all(query)
|
self.cards = self.deck.db.all(query)
|
||||||
|
|
|
||||||
|
|
@ -239,11 +239,11 @@ group by day order by day
|
||||||
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
|
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
|
||||||
limit = self.endOfDay - (numdays) * 86400
|
limit = self.endOfDay - (numdays) * 86400
|
||||||
if attr == "created":
|
if attr == "created":
|
||||||
res = self.deck.db.column0("select %s from cards where %s >= %f" %
|
res = self.deck.db.list("select %s from cards where %s >= %f" %
|
||||||
(attr, attr, limit))
|
(attr, attr, limit))
|
||||||
else:
|
else:
|
||||||
# firstAnswered
|
# firstAnswered
|
||||||
res = self.deck.db.column0(
|
res = self.deck.db.list(
|
||||||
"select time/1000 from revlog where rep = 1")
|
"select time/1000 from revlog where rep = 1")
|
||||||
for r in res:
|
for r in res:
|
||||||
d = int((r - self.endOfDay) / 86400.0)
|
d = int((r - self.endOfDay) / 86400.0)
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@
|
||||||
# - port all the code referencing the old tables
|
# - port all the code referencing the old tables
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from anki.db import *
|
|
||||||
from anki.utils import intTime
|
from anki.utils import intTime
|
||||||
|
|
||||||
FACT = 0
|
FACT = 0
|
||||||
|
|
@ -17,19 +16,13 @@ MEDIA = 3
|
||||||
GROUP = 4
|
GROUP = 4
|
||||||
GROUPCONFIG = 5
|
GROUPCONFIG = 5
|
||||||
|
|
||||||
gravestonesTable = Table(
|
|
||||||
'gravestones', metadata,
|
|
||||||
Column('delTime', Integer, nullable=False),
|
|
||||||
Column('objectId', Integer, nullable=False),
|
|
||||||
Column('type', Integer, nullable=False))
|
|
||||||
|
|
||||||
def registerOne(db, type, id):
|
def registerOne(db, type, id):
|
||||||
db.statement("insert into gravestones values (:t, :id, :ty)",
|
db.execute("insert into gravestones values (:t, :id, :ty)",
|
||||||
t=intTime(), id=id, ty=type)
|
t=intTime(), id=id, ty=type)
|
||||||
|
|
||||||
def registerMany(db, type, ids):
|
def registerMany(db, type, ids):
|
||||||
db.statements("insert into gravestones values (:t, :id, :ty)",
|
db.executemany("insert into gravestones values (:t, :id, :ty)",
|
||||||
[{'t':intTime(), 'id':x, 'ty':type} for x in ids])
|
[{'t':intTime(), 'id':x, 'ty':type} for x in ids])
|
||||||
|
|
||||||
def forgetAll(db):
|
def forgetAll(db):
|
||||||
db.statement("delete from gravestones")
|
db.execute("delete from gravestones")
|
||||||
|
|
|
||||||
|
|
@ -4,14 +4,6 @@
|
||||||
|
|
||||||
import simplejson, time
|
import simplejson, time
|
||||||
from anki.utils import intTime
|
from anki.utils import intTime
|
||||||
from anki.db import *
|
|
||||||
|
|
||||||
groupsTable = Table(
|
|
||||||
'groups', metadata,
|
|
||||||
Column('id', Integer, primary_key=True),
|
|
||||||
Column('modified', Integer, nullable=False, default=intTime),
|
|
||||||
Column('name', UnicodeText, nullable=False),
|
|
||||||
Column('confId', Integer, nullable=False))
|
|
||||||
|
|
||||||
# maybe define a random cutoff at say +/-30% which controls exit interval
|
# maybe define a random cutoff at say +/-30% which controls exit interval
|
||||||
# variation - 30% of 1 day is 0.7 or 1.3 so always 1 day; 30% of 4 days is
|
# variation - 30% of 1 day is 0.7 or 1.3 so always 1 day; 30% of 4 days is
|
||||||
|
|
@ -32,14 +24,6 @@ defaultConf = {
|
||||||
'leechFails': 16,
|
'leechFails': 16,
|
||||||
}
|
}
|
||||||
|
|
||||||
groupConfigTable = Table(
|
|
||||||
'groupConfig', metadata,
|
|
||||||
Column('id', Integer, primary_key=True),
|
|
||||||
Column('modified', Integer, nullable=False, default=intTime),
|
|
||||||
Column('name', UnicodeText, nullable=False),
|
|
||||||
Column('config', UnicodeText, nullable=False,
|
|
||||||
default=unicode(simplejson.dumps(defaultConf))))
|
|
||||||
|
|
||||||
class GroupConfig(object):
|
class GroupConfig(object):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
@ -53,7 +37,3 @@ class GroupConfig(object):
|
||||||
def save(self):
|
def save(self):
|
||||||
self._config = simplejson.dumps(self.config)
|
self._config = simplejson.dumps(self.config)
|
||||||
self.modified = intTime()
|
self.modified = intTime()
|
||||||
|
|
||||||
mapper(GroupConfig, groupConfigTable, properties={
|
|
||||||
'_config': groupConfigTable.c.config,
|
|
||||||
})
|
|
||||||
|
|
|
||||||
|
|
@ -12,8 +12,8 @@ particular FieldModel, replace it with None. A special number 0 donates a tags
|
||||||
field. The same field model should not occur more than once."""
|
field. The same field model should not occur more than once."""
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from anki.cards import cardsTable
|
#from anki.cards import cardsTable
|
||||||
from anki.facts import factsTable, fieldsTable
|
#from anki.facts import factsTable, fieldsTable
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
from anki.utils import genID, canonifyTags, fieldChecksum
|
from anki.utils import genID, canonifyTags, fieldChecksum
|
||||||
from anki.utils import canonifyTags, ids2str
|
from anki.utils import canonifyTags, ids2str
|
||||||
|
|
@ -49,7 +49,7 @@ class Importer(object):
|
||||||
self.tagsToAdd = u""
|
self.tagsToAdd = u""
|
||||||
|
|
||||||
def doImport(self):
|
def doImport(self):
|
||||||
"Import. Caller must .reset()"
|
"Import."
|
||||||
if self.updateKey is not None:
|
if self.updateKey is not None:
|
||||||
return self.doUpdate()
|
return self.doUpdate()
|
||||||
random = self.deck.newCardOrder == NEW_CARDS_RANDOM
|
random = self.deck.newCardOrder == NEW_CARDS_RANDOM
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
# Copyright: Damien Elmes <anki@ichi2.net>
|
# Copyright: Damien Elmes <anki@ichi2.net>
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
from anki import DeckStorage
|
from anki import Deck
|
||||||
from anki.importing import Importer
|
from anki.importing import Importer
|
||||||
from anki.sync import SyncClient, SyncServer, copyLocalMedia
|
from anki.sync import SyncClient, SyncServer, copyLocalMedia
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
|
|
@ -57,13 +57,13 @@ class Anki10Importer(Importer):
|
||||||
fids = [f[0] for f in res['added-facts']['facts']]
|
fids = [f[0] for f in res['added-facts']['facts']]
|
||||||
self.deck.addTags(fids, self.tagsToAdd)
|
self.deck.addTags(fids, self.tagsToAdd)
|
||||||
# mark import material as newly added
|
# mark import material as newly added
|
||||||
self.deck.db.statement(
|
self.deck.db.execute(
|
||||||
"update cards set modified = :t where id in %s" %
|
"update cards set modified = :t where id in %s" %
|
||||||
ids2str([x[0] for x in res['added-cards']]), t=time.time())
|
ids2str([x[0] for x in res['added-cards']]), t=time.time())
|
||||||
self.deck.db.statement(
|
self.deck.db.execute(
|
||||||
"update facts set modified = :t where id in %s" %
|
"update facts set modified = :t where id in %s" %
|
||||||
ids2str([x[0] for x in res['added-facts']['facts']]), t=time.time())
|
ids2str([x[0] for x in res['added-facts']['facts']]), t=time.time())
|
||||||
self.deck.db.statement(
|
self.deck.db.execute(
|
||||||
"update models set modified = :t where id in %s" %
|
"update models set modified = :t where id in %s" %
|
||||||
ids2str([x['id'] for x in res['added-models']]), t=time.time())
|
ids2str([x['id'] for x in res['added-models']]), t=time.time())
|
||||||
# update total and refresh
|
# update total and refresh
|
||||||
|
|
|
||||||
|
|
@ -50,11 +50,9 @@ CHANGES MADE TO LIBANKI:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from anki.importing import Importer
|
from anki.importing import Importer
|
||||||
from anki import DeckStorage
|
from anki import Deck
|
||||||
from anki.facts import Fact
|
from anki.facts import Fact
|
||||||
from anki.models import FieldModel
|
from anki.models import Field, Template, Model
|
||||||
from anki.models import CardModel
|
|
||||||
from anki.models import Model
|
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
|
|
||||||
from xml.sax import make_parser
|
from xml.sax import make_parser
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ import re, unicodedata, time
|
||||||
#import chardet
|
#import chardet
|
||||||
|
|
||||||
|
|
||||||
from anki.deck import Deck
|
from anki import Deck
|
||||||
|
|
||||||
class SmartDict(dict):
|
class SmartDict(dict):
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
469
anki/media.py
469
anki/media.py
|
|
@ -3,260 +3,271 @@
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
import os, shutil, re, urllib2, time, tempfile, unicodedata, urllib
|
import os, shutil, re, urllib2, time, tempfile, unicodedata, urllib
|
||||||
from anki.db import *
|
|
||||||
from anki.utils import checksum, genID, intTime
|
from anki.utils import checksum, genID, intTime
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
|
|
||||||
# other code depends on this order, so don't reorder
|
class MediaRegistry(object):
|
||||||
regexps = ("(?i)(\[sound:([^]]+)\])",
|
|
||||||
"(?i)(<img[^>]+src=[\"']?([^\"'>]+)[\"']?[^>]*>)")
|
|
||||||
|
|
||||||
# Tables
|
# other code depends on this order, so don't reorder
|
||||||
##########################################################################
|
regexps = ("(?i)(\[sound:([^]]+)\])",
|
||||||
|
"(?i)(<img[^>]+src=[\"']?([^\"'>]+)[\"']?[^>]*>)")
|
||||||
|
|
||||||
mediaTable = Table(
|
def __init__(self, deck):
|
||||||
'media', metadata,
|
self.deck = deck
|
||||||
Column('id', Integer, primary_key=True, nullable=False),
|
self.mediaPrefix = ""
|
||||||
Column('filename', UnicodeText, nullable=False, unique=True),
|
self._mediaDir = None
|
||||||
Column('refcnt', Integer, nullable=False),
|
self._updateMediaDir()
|
||||||
Column('modified', Integer, nullable=False),
|
|
||||||
Column('chksum', UnicodeText, nullable=False, default=u""))
|
|
||||||
|
|
||||||
# File handling
|
def mediaDir(self, create=False):
|
||||||
##########################################################################
|
if self._mediaDir:
|
||||||
|
return self._mediaDir
|
||||||
|
elif create:
|
||||||
|
self._updateMediaDir(True)
|
||||||
|
return self._mediaDir
|
||||||
|
|
||||||
def copyToMedia(deck, path):
|
def _updateMediaDir(self, create=False):
|
||||||
"""Copy PATH to MEDIADIR, and return new filename.
|
if self.mediaPrefix:
|
||||||
|
dir = os.path.join(
|
||||||
|
self.mediaPrefix, os.path.basename(self.deck.path))
|
||||||
|
else:
|
||||||
|
dir = self.deck.path
|
||||||
|
dir = re.sub("(?i)\.(anki)$", ".media", dir)
|
||||||
|
if create == None:
|
||||||
|
# don't create, but return dir
|
||||||
|
return dir
|
||||||
|
if not os.path.exists(dir):
|
||||||
|
if not create:
|
||||||
|
return
|
||||||
|
# will raise error if we can't create
|
||||||
|
os.makedirs(dir)
|
||||||
|
# change to the current dir
|
||||||
|
os.chdir(dir)
|
||||||
|
self._mediaDir = dir
|
||||||
|
|
||||||
|
# Adding and registering media
|
||||||
|
##########################################################################
|
||||||
|
|
||||||
|
def addFile(self, path):
|
||||||
|
"""Copy PATH to MEDIADIR, and return new filename.
|
||||||
If a file with the same md5sum exists in the DB, return that.
|
If a file with the same md5sum exists in the DB, return that.
|
||||||
If a file with the same name exists, return a unique name.
|
If a file with the same name exists, return a unique name."""
|
||||||
This does not modify the media table."""
|
# see if have duplicate contents
|
||||||
# see if have duplicate contents
|
csum = self.mediaChecksum(path)
|
||||||
newpath = deck.db.scalar(
|
if not csum:
|
||||||
"select filename from media where chksum = :cs",
|
# file was unreadable or didn't exist
|
||||||
cs=checksum(open(path, "rb").read()))
|
return None
|
||||||
# check if this filename already exists
|
file = self.deck.db.scalar(
|
||||||
if not newpath:
|
"select file from media where csum = :cs",
|
||||||
base = os.path.basename(path)
|
cs=csum)
|
||||||
mdir = deck.mediaDir(create=True)
|
if not file:
|
||||||
newpath = uniquePath(mdir, base)
|
base = os.path.basename(path)
|
||||||
shutil.copy2(path, newpath)
|
mdir = self.mediaDir(create=True)
|
||||||
return os.path.basename(newpath)
|
file = self.uniquePath(mdir, base)
|
||||||
|
shutil.copy2(path, file)
|
||||||
|
self.registerFile(base)
|
||||||
|
return os.path.basename(file)
|
||||||
|
|
||||||
def uniquePath(dir, base):
|
def registerFile(self, file):
|
||||||
# remove any dangerous characters
|
"Add a single file to the media database."
|
||||||
base = re.sub(r"[][<>:/\\&]", "", base)
|
if self.mediaDir():
|
||||||
# find a unique name
|
csum = self.mediaChecksum(os.path.join(self.mediaDir(), file))
|
||||||
(root, ext) = os.path.splitext(base)
|
|
||||||
def repl(match):
|
|
||||||
n = int(match.group(1))
|
|
||||||
return " (%d)" % (n+1)
|
|
||||||
while True:
|
|
||||||
path = os.path.join(dir, root + ext)
|
|
||||||
if not os.path.exists(path):
|
|
||||||
break
|
|
||||||
reg = " \((\d+)\)$"
|
|
||||||
if not re.search(reg, root):
|
|
||||||
root = root + " (1)"
|
|
||||||
else:
|
else:
|
||||||
root = re.sub(reg, repl, root)
|
csum = ""
|
||||||
return path
|
self.deck.db.execute(
|
||||||
|
"insert or replace into media values (?, ?, ?)",
|
||||||
|
file, intTime(), csum)
|
||||||
|
|
||||||
# DB routines
|
def registerText(self, string):
|
||||||
##########################################################################
|
"Add all media in string to the media database."
|
||||||
|
for f in self.mediaFiles(string):
|
||||||
|
self.registerFile(f)
|
||||||
|
|
||||||
def updateMediaCount(deck, file, count=1):
|
def removeUnusedMedia(deck):
|
||||||
mdir = deck.mediaDir()
|
ids = deck.s.list("select id from media where size = 0")
|
||||||
if deck.db.scalar(
|
for id in ids:
|
||||||
"select 1 from media where filename = :file", file=file):
|
deck.s.statement("insert into mediaDeleted values (:id, :t)",
|
||||||
deck.db.statement(
|
id=id, t=time.time())
|
||||||
"update media set refcnt = refcnt + :c, modified = :t where filename = :file",
|
deck.s.statement("delete from media where size = 0")
|
||||||
file=file, c=count, t=intTime())
|
|
||||||
elif count > 0:
|
# Moving media
|
||||||
|
##########################################################################
|
||||||
|
|
||||||
|
def renameMediaDir(self, oldPath):
|
||||||
|
"Copy oldPath to our current media dir. "
|
||||||
|
assert os.path.exists(oldPath)
|
||||||
|
newPath = self.mediaDir(create=None)
|
||||||
|
# copytree doesn't want the dir to exist
|
||||||
try:
|
try:
|
||||||
sum = unicode(
|
shutil.copytree(oldPath, newPath)
|
||||||
checksum(open(os.path.join(mdir, file), "rb").read()))
|
|
||||||
except:
|
except:
|
||||||
sum = u""
|
# FIXME: should really remove everything in old dir instead of
|
||||||
deck.db.statement("""
|
# giving up
|
||||||
insert into media (id, filename, refcnt, modified, chksum)
|
pass
|
||||||
values (:id, :file, :c, :mod, :sum)""",
|
|
||||||
id=genID(), file=file, c=count, mod=intTime(),
|
|
||||||
sum=sum)
|
|
||||||
|
|
||||||
def removeUnusedMedia(deck):
|
# Tools
|
||||||
ids = deck.db.column0("select id from media where refcnt = 0")
|
##########################################################################
|
||||||
for id in ids:
|
|
||||||
deck.db.statement("insert into mediaDeleted values (:id, :t)",
|
|
||||||
id=id, t=time.time())
|
|
||||||
deck.db.statement("delete from media where refcnt = 0")
|
|
||||||
|
|
||||||
# String manipulation
|
def mediaChecksum(self, path):
|
||||||
##########################################################################
|
"Return checksum of PATH, or empty string."
|
||||||
|
try:
|
||||||
|
return checksum(open(path, "rb").read())
|
||||||
|
except:
|
||||||
|
return ""
|
||||||
|
|
||||||
def mediaFiles(string, remote=False):
|
def uniquePath(self, dir, base):
|
||||||
l = []
|
# remove any dangerous characters
|
||||||
for reg in regexps:
|
base = re.sub(r"[][<>:/\\&]", "", base)
|
||||||
for (full, fname) in re.findall(reg, string):
|
# find a unique name
|
||||||
isLocal = not re.match("(https?|ftp)://", fname.lower())
|
(root, ext) = os.path.splitext(base)
|
||||||
if not remote and isLocal:
|
def repl(match):
|
||||||
l.append(fname)
|
n = int(match.group(1))
|
||||||
elif remote and not isLocal:
|
return " (%d)" % (n+1)
|
||||||
l.append(fname)
|
while True:
|
||||||
return l
|
path = os.path.join(dir, root + ext)
|
||||||
|
if not os.path.exists(path):
|
||||||
|
break
|
||||||
|
reg = " \((\d+)\)$"
|
||||||
|
if not re.search(reg, root):
|
||||||
|
root = root + " (1)"
|
||||||
|
else:
|
||||||
|
root = re.sub(reg, repl, root)
|
||||||
|
return path
|
||||||
|
|
||||||
def stripMedia(txt):
|
# String manipulation
|
||||||
for reg in regexps:
|
##########################################################################
|
||||||
txt = re.sub(reg, "", txt)
|
|
||||||
return txt
|
|
||||||
|
|
||||||
def escapeImages(string):
|
def mediaFiles(self, string, includeRemote=False):
|
||||||
def repl(match):
|
l = []
|
||||||
tag = match.group(1)
|
for reg in self.regexps:
|
||||||
fname = match.group(2)
|
for (full, fname) in re.findall(reg, string):
|
||||||
if re.match("(https?|ftp)://", fname):
|
isLocal = not re.match("(https?|ftp)://", fname.lower())
|
||||||
return tag
|
if isLocal or includeRemote:
|
||||||
return tag.replace(
|
l.append(fname)
|
||||||
fname, urllib.quote(fname.encode("utf-8")))
|
return l
|
||||||
return re.sub(regexps[1], repl, string)
|
|
||||||
|
|
||||||
# Rebuilding DB
|
def stripMedia(self, txt):
|
||||||
##########################################################################
|
for reg in self.regexps:
|
||||||
|
txt = re.sub(reg, "", txt)
|
||||||
|
return txt
|
||||||
|
|
||||||
def rebuildMediaDir(deck, delete=False, dirty=True):
|
def escapeImages(self, string):
|
||||||
mdir = deck.mediaDir()
|
def repl(match):
|
||||||
if not mdir:
|
tag = match.group(1)
|
||||||
return (0, 0)
|
fname = match.group(2)
|
||||||
deck.startProgress(title=_("Check Media DB"))
|
if re.match("(https?|ftp)://", fname):
|
||||||
# set all ref counts to 0
|
return tag
|
||||||
deck.db.statement("update media set refcnt = 0")
|
return tag.replace(
|
||||||
# look through cards for media references
|
fname, urllib.quote(fname.encode("utf-8")))
|
||||||
refs = {}
|
return re.sub(self.regexps[1], repl, string)
|
||||||
normrefs = {}
|
|
||||||
def norm(s):
|
# Rebuilding DB
|
||||||
if isinstance(s, unicode):
|
##########################################################################
|
||||||
return unicodedata.normalize('NFD', s)
|
|
||||||
return s
|
def rebuildMediaDir(self, delete=False):
|
||||||
for (question, answer) in deck.db.all(
|
mdir = self.mediaDir()
|
||||||
"select question, answer from cards"):
|
if not mdir:
|
||||||
for txt in (question, answer):
|
return (0, 0)
|
||||||
for f in mediaFiles(txt):
|
self.deck.startProgress()
|
||||||
if f in refs:
|
# delete all media entries in database
|
||||||
refs[f] += 1
|
self.deck.db.execute("delete from media")
|
||||||
else:
|
# look through cards for media references
|
||||||
refs[f] = 1
|
normrefs = {}
|
||||||
|
def norm(s):
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
return unicodedata.normalize('NFD', s)
|
||||||
|
return s
|
||||||
|
for (question, answer) in self.deck.db.all(
|
||||||
|
"select q, a from cards"):
|
||||||
|
for txt in (question, answer):
|
||||||
|
for f in self.mediaFiles(txt):
|
||||||
normrefs[norm(f)] = True
|
normrefs[norm(f)] = True
|
||||||
# update ref counts
|
self.registerFile(f)
|
||||||
for (file, count) in refs.items():
|
# find unused media
|
||||||
updateMediaCount(deck, file, count)
|
unused = []
|
||||||
# find unused media
|
for file in os.listdir(mdir):
|
||||||
unused = []
|
path = os.path.join(mdir, file)
|
||||||
for file in os.listdir(mdir):
|
if not os.path.isfile(path):
|
||||||
path = os.path.join(mdir, file)
|
# ignore directories
|
||||||
if not os.path.isfile(path):
|
continue
|
||||||
# ignore directories
|
nfile = norm(file)
|
||||||
continue
|
if nfile not in normrefs:
|
||||||
nfile = norm(file)
|
unused.append(file)
|
||||||
if nfile not in normrefs:
|
# optionally delete
|
||||||
unused.append(file)
|
if delete:
|
||||||
# optionally delete
|
for f in unused:
|
||||||
if delete:
|
path = os.path.join(mdir, f)
|
||||||
for f in unused:
|
os.unlink(path)
|
||||||
|
nohave = self.deck.db.list(
|
||||||
|
"select file from media where csum = ''")
|
||||||
|
self.deck.finishProgress()
|
||||||
|
return (nohave, unused)
|
||||||
|
|
||||||
|
# Download missing
|
||||||
|
##########################################################################
|
||||||
|
|
||||||
|
def downloadMissing(self):
|
||||||
|
urlbase = self.deck.getVar("mediaURL")
|
||||||
|
if not urlbase:
|
||||||
|
return None
|
||||||
|
mdir = self.deck.mediaDir(create=True)
|
||||||
|
self.deck.startProgress()
|
||||||
|
missing = 0
|
||||||
|
grabbed = 0
|
||||||
|
for c, (f, sum) in enumerate(self.deck.db.all(
|
||||||
|
"select file, csum from media")):
|
||||||
path = os.path.join(mdir, f)
|
path = os.path.join(mdir, f)
|
||||||
os.unlink(path)
|
if not os.path.exists(path):
|
||||||
# remove entries in db for unused media
|
try:
|
||||||
removeUnusedMedia(deck)
|
rpath = urlbase + f
|
||||||
# check md5s are up to date
|
url = urllib2.urlopen(rpath)
|
||||||
update = []
|
open(f, "wb").write(url.read())
|
||||||
for (file, md5) in deck.db.all(
|
grabbed += 1
|
||||||
"select filename, chksum from media"):
|
except:
|
||||||
path = os.path.join(mdir, file)
|
if sum:
|
||||||
if not os.path.exists(path):
|
# the file is supposed to exist
|
||||||
if md5:
|
self.deck.finishProgress()
|
||||||
update.append({'f':file, 'sum':u"", 'c':intTime()})
|
return (False, rpath)
|
||||||
else:
|
else:
|
||||||
sum = unicode(
|
# ignore and keep going
|
||||||
checksum(open(os.path.join(mdir, file), "rb").read()))
|
missing += 1
|
||||||
if md5 != sum:
|
self.deck.updateProgress(label=_("File %d...") % (grabbed+missing))
|
||||||
update.append({'f':file, 'sum':sum, 'c':intTime()})
|
self.deck.finishProgress()
|
||||||
if update:
|
return (True, grabbed, missing)
|
||||||
deck.db.statements("""
|
|
||||||
update media set chksum = :sum, modified = :c where filename = :f""",
|
|
||||||
update)
|
|
||||||
# update deck and get return info
|
|
||||||
if dirty:
|
|
||||||
deck.flushMod()
|
|
||||||
nohave = deck.db.column0("select filename from media where chksum = ''")
|
|
||||||
deck.finishProgress()
|
|
||||||
return (nohave, unused)
|
|
||||||
|
|
||||||
# Download missing
|
# Convert remote links to local ones
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def downloadMissing(deck):
|
def downloadRemote(self):
|
||||||
urlbase = deck.getVar("mediaURL")
|
mdir = self.deck.mediaDir(create=True)
|
||||||
if not urlbase:
|
refs = {}
|
||||||
return None
|
self.deck.startProgress()
|
||||||
mdir = deck.mediaDir(create=True)
|
for (question, answer) in self.deck.db.all(
|
||||||
deck.startProgress()
|
"select question, answer from cards"):
|
||||||
missing = 0
|
for txt in (question, answer):
|
||||||
grabbed = 0
|
for f in mediaFiles(txt, remote=True):
|
||||||
for c, (f, sum) in enumerate(deck.db.all(
|
refs[f] = True
|
||||||
"select filename, chksum from media")):
|
|
||||||
path = os.path.join(mdir, f)
|
tmpdir = tempfile.mkdtemp(prefix="anki")
|
||||||
if not os.path.exists(path):
|
failed = []
|
||||||
|
passed = []
|
||||||
|
for c, link in enumerate(refs.keys()):
|
||||||
try:
|
try:
|
||||||
rpath = urlbase + f
|
path = os.path.join(tmpdir, os.path.basename(link))
|
||||||
url = urllib2.urlopen(rpath)
|
url = urllib2.urlopen(link)
|
||||||
open(f, "wb").write(url.read())
|
open(path, "wb").write(url.read())
|
||||||
grabbed += 1
|
newpath = copyToMedia(self.deck, path)
|
||||||
|
passed.append([link, newpath])
|
||||||
except:
|
except:
|
||||||
if sum:
|
failed.append(link)
|
||||||
# the file is supposed to exist
|
self.deck.updateProgress(label=_("Download %d...") % c)
|
||||||
deck.finishProgress()
|
for (url, name) in passed:
|
||||||
return (False, rpath)
|
self.deck.db.execute(
|
||||||
else:
|
"update fields set value = replace(value, :url, :name)",
|
||||||
# ignore and keep going
|
url=url, name=name)
|
||||||
missing += 1
|
self.deck.updateProgress(label=_("Updating references..."))
|
||||||
deck.updateProgress(label=_("File %d...") % (grabbed+missing))
|
self.deck.updateProgress(label=_("Updating cards..."))
|
||||||
deck.finishProgress()
|
# rebuild entire q/a cache
|
||||||
return (True, grabbed, missing)
|
for m in self.deck.models:
|
||||||
|
self.deck.updateCardsFromModel(m, dirty=True)
|
||||||
# Convert remote links to local ones
|
self.deck.finishProgress()
|
||||||
##########################################################################
|
return (passed, failed)
|
||||||
|
|
||||||
def downloadRemote(deck):
|
|
||||||
mdir = deck.mediaDir(create=True)
|
|
||||||
refs = {}
|
|
||||||
deck.startProgress()
|
|
||||||
for (question, answer) in deck.db.all(
|
|
||||||
"select question, answer from cards"):
|
|
||||||
for txt in (question, answer):
|
|
||||||
for f in mediaFiles(txt, remote=True):
|
|
||||||
refs[f] = True
|
|
||||||
|
|
||||||
tmpdir = tempfile.mkdtemp(prefix="anki")
|
|
||||||
failed = []
|
|
||||||
passed = []
|
|
||||||
for c, link in enumerate(refs.keys()):
|
|
||||||
try:
|
|
||||||
path = os.path.join(tmpdir, os.path.basename(link))
|
|
||||||
url = urllib2.urlopen(link)
|
|
||||||
open(path, "wb").write(url.read())
|
|
||||||
newpath = copyToMedia(deck, path)
|
|
||||||
passed.append([link, newpath])
|
|
||||||
except:
|
|
||||||
failed.append(link)
|
|
||||||
deck.updateProgress(label=_("Download %d...") % c)
|
|
||||||
for (url, name) in passed:
|
|
||||||
deck.db.statement(
|
|
||||||
"update fields set value = replace(value, :url, :name)",
|
|
||||||
url=url, name=name)
|
|
||||||
deck.updateProgress(label=_("Updating references..."))
|
|
||||||
deck.updateProgress(label=_("Updating cards..."))
|
|
||||||
# rebuild entire q/a cache
|
|
||||||
for m in deck.models:
|
|
||||||
deck.updateCardsFromModel(m, dirty=True)
|
|
||||||
deck.finishProgress()
|
|
||||||
deck.flushMod()
|
|
||||||
return (passed, failed)
|
|
||||||
|
|
|
||||||
349
anki/models.py
349
anki/models.py
|
|
@ -2,9 +2,13 @@
|
||||||
# Copyright: Damien Elmes <anki@ichi2.net>
|
# Copyright: Damien Elmes <anki@ichi2.net>
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
import time, re, simplejson
|
"""\
|
||||||
from sqlalchemy.ext.orderinglist import ordering_list
|
Models load their templates and fields when they are loaded. If you update a
|
||||||
from anki.db import *
|
template or field, you should call model.flush(), rather than trying to save
|
||||||
|
the subobject directly.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time, re, simplejson, copy as copyMod
|
||||||
from anki.utils import genID, canonifyTags, intTime
|
from anki.utils import genID, canonifyTags, intTime
|
||||||
from anki.fonts import toPlatformFont
|
from anki.fonts import toPlatformFont
|
||||||
from anki.utils import parseTags, hexifyID, checksum, stripHTML, intTime
|
from anki.utils import parseTags, hexifyID, checksum, stripHTML, intTime
|
||||||
|
|
@ -13,185 +17,176 @@ from anki.hooks import runFilter
|
||||||
from anki.template import render
|
from anki.template import render
|
||||||
from copy import copy
|
from copy import copy
|
||||||
|
|
||||||
def alignmentLabels():
|
# Models
|
||||||
return {
|
|
||||||
0: _("Center"),
|
|
||||||
1: _("Left"),
|
|
||||||
2: _("Right"),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Field models
|
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
fieldModelsTable = Table(
|
defaultConf = {
|
||||||
'fieldModels', metadata,
|
}
|
||||||
Column('id', Integer, primary_key=True),
|
|
||||||
Column('ordinal', Integer, nullable=False),
|
|
||||||
Column('modelId', Integer, ForeignKey('models.id'), nullable=False),
|
|
||||||
Column('name', UnicodeText, nullable=False),
|
|
||||||
Column('description', UnicodeText, nullable=False, default=u""), # obsolete
|
|
||||||
# reused as RTL marker
|
|
||||||
Column('features', UnicodeText, nullable=False, default=u""),
|
|
||||||
Column('required', Boolean, nullable=False, default=True),
|
|
||||||
# if code changes this, it should call deck.updateFieldChecksums()
|
|
||||||
Column('unique', Boolean, nullable=False, default=True), # sqlite keyword
|
|
||||||
Column('numeric', Boolean, nullable=False, default=False),
|
|
||||||
# display
|
|
||||||
Column('quizFontFamily', UnicodeText, default=u"Arial"),
|
|
||||||
Column('quizFontSize', Integer, default=20),
|
|
||||||
Column('quizFontColour', String(7)),
|
|
||||||
Column('editFontFamily', UnicodeText, default=u"1"), # reused as <pre> toggle
|
|
||||||
Column('editFontSize', Integer, default=20))
|
|
||||||
|
|
||||||
class FieldModel(object):
|
|
||||||
"The definition of one field in a fact."
|
|
||||||
|
|
||||||
def __init__(self, name=u"", required=True, unique=True):
|
|
||||||
self.name = name
|
|
||||||
self.required = required
|
|
||||||
self.unique = unique
|
|
||||||
self.id = genID()
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
new = FieldModel()
|
|
||||||
for p in class_mapper(FieldModel).iterate_properties:
|
|
||||||
setattr(new, p.key, getattr(self, p.key))
|
|
||||||
new.id = genID()
|
|
||||||
new.model = None
|
|
||||||
return new
|
|
||||||
|
|
||||||
mapper(FieldModel, fieldModelsTable)
|
|
||||||
|
|
||||||
# Card models
|
|
||||||
##########################################################################
|
|
||||||
|
|
||||||
cardModelsTable = Table(
|
|
||||||
'cardModels', metadata,
|
|
||||||
Column('id', Integer, primary_key=True),
|
|
||||||
Column('ordinal', Integer, nullable=False),
|
|
||||||
Column('modelId', Integer, ForeignKey('models.id'), nullable=False),
|
|
||||||
Column('name', UnicodeText, nullable=False),
|
|
||||||
Column('description', UnicodeText, nullable=False, default=u""), # obsolete
|
|
||||||
Column('active', Boolean, nullable=False, default=True),
|
|
||||||
# formats: question/answer/last(not used)
|
|
||||||
Column('qformat', UnicodeText, nullable=False),
|
|
||||||
Column('aformat', UnicodeText, nullable=False),
|
|
||||||
Column('lformat', UnicodeText),
|
|
||||||
# question/answer editor format (not used yet)
|
|
||||||
Column('qedformat', UnicodeText),
|
|
||||||
Column('aedformat', UnicodeText),
|
|
||||||
Column('questionInAnswer', Boolean, nullable=False, default=False),
|
|
||||||
# unused
|
|
||||||
Column('questionFontFamily', UnicodeText, default=u"Arial"),
|
|
||||||
Column('questionFontSize', Integer, default=20),
|
|
||||||
Column('questionFontColour', String(7), default=u"#000000"),
|
|
||||||
# used for both question & answer
|
|
||||||
Column('questionAlign', Integer, default=0),
|
|
||||||
# ununsed
|
|
||||||
Column('answerFontFamily', UnicodeText, default=u"Arial"),
|
|
||||||
Column('answerFontSize', Integer, default=20),
|
|
||||||
Column('answerFontColour', String(7), default=u"#000000"),
|
|
||||||
Column('answerAlign', Integer, default=0),
|
|
||||||
Column('lastFontFamily', UnicodeText, default=u"Arial"),
|
|
||||||
Column('lastFontSize', Integer, default=20),
|
|
||||||
# used as background colour
|
|
||||||
Column('lastFontColour', String(7), default=u"#FFFFFF"),
|
|
||||||
Column('editQuestionFontFamily', UnicodeText, default=None),
|
|
||||||
Column('editQuestionFontSize', Integer, default=None),
|
|
||||||
Column('editAnswerFontFamily', UnicodeText, default=None),
|
|
||||||
Column('editAnswerFontSize', Integer, default=None),
|
|
||||||
# empty answer
|
|
||||||
Column('allowEmptyAnswer', Boolean, nullable=False, default=True),
|
|
||||||
Column('typeAnswer', UnicodeText, nullable=False, default=u""))
|
|
||||||
|
|
||||||
class CardModel(object):
|
|
||||||
"""Represents how to generate the front and back of a card."""
|
|
||||||
def __init__(self, name=u"", qformat=u"q", aformat=u"a", active=True):
|
|
||||||
self.name = name
|
|
||||||
self.qformat = qformat
|
|
||||||
self.aformat = aformat
|
|
||||||
self.active = active
|
|
||||||
self.id = genID()
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
new = CardModel()
|
|
||||||
for p in class_mapper(CardModel).iterate_properties:
|
|
||||||
setattr(new, p.key, getattr(self, p.key))
|
|
||||||
new.id = genID()
|
|
||||||
new.model = None
|
|
||||||
return new
|
|
||||||
|
|
||||||
mapper(CardModel, cardModelsTable)
|
|
||||||
|
|
||||||
def formatQA(cid, mid, fact, tags, cm, deck):
|
|
||||||
"Return a dict of {id, question, answer}"
|
|
||||||
d = {'id': cid}
|
|
||||||
fields = {}
|
|
||||||
for (k, v) in fact.items():
|
|
||||||
fields["text:"+k] = stripHTML(v[1])
|
|
||||||
if v[1]:
|
|
||||||
fields[k] = '<span class="fm%s">%s</span>' % (
|
|
||||||
hexifyID(v[0]), v[1])
|
|
||||||
else:
|
|
||||||
fields[k] = u""
|
|
||||||
fields['tags'] = tags[0]
|
|
||||||
fields['Tags'] = tags[0]
|
|
||||||
fields['modelTags'] = tags[1]
|
|
||||||
fields['cardModel'] = tags[2]
|
|
||||||
# render q & a
|
|
||||||
ret = []
|
|
||||||
for (type, format) in (("question", cm.qformat),
|
|
||||||
("answer", cm.aformat)):
|
|
||||||
# convert old style
|
|
||||||
format = re.sub("%\((.+?)\)s", "{{\\1}}", format)
|
|
||||||
# allow custom rendering functions & info
|
|
||||||
fields = runFilter("prepareFields", fields, cid, mid, fact, tags, cm, deck)
|
|
||||||
html = render(format, fields)
|
|
||||||
d[type] = runFilter("formatQA", html, type, cid, mid, fact, tags, cm, deck)
|
|
||||||
return d
|
|
||||||
|
|
||||||
# Model table
|
|
||||||
##########################################################################
|
|
||||||
|
|
||||||
modelsTable = Table(
|
|
||||||
'models', metadata,
|
|
||||||
Column('id', Integer, primary_key=True),
|
|
||||||
Column('modified', Integer, nullable=False, default=intTime),
|
|
||||||
Column('name', UnicodeText, nullable=False),
|
|
||||||
# currently unused
|
|
||||||
Column('config', UnicodeText, nullable=False, default=u"")
|
|
||||||
)
|
|
||||||
|
|
||||||
class Model(object):
|
class Model(object):
|
||||||
"Defines the way a fact behaves, what fields it can contain, etc."
|
|
||||||
def __init__(self, name=u""):
|
|
||||||
self.name = name
|
|
||||||
self.id = genID()
|
|
||||||
|
|
||||||
def setModified(self):
|
def __init__(self, deck, id=None):
|
||||||
self.modified = intTime()
|
self.deck = deck
|
||||||
|
if id:
|
||||||
|
self.id = id
|
||||||
|
self.load()
|
||||||
|
else:
|
||||||
|
self.id = genID()
|
||||||
|
self.name = u""
|
||||||
|
self.mod = intTime()
|
||||||
|
self.conf = defaultConf.copy()
|
||||||
|
self.fields = []
|
||||||
|
self.templates = []
|
||||||
|
|
||||||
def addFieldModel(self, field):
|
def load(self):
|
||||||
"Add a field model. Don't call this directly."
|
(self.mod,
|
||||||
self.fieldModels.append(field)
|
self.name,
|
||||||
s = object_session(self)
|
self.conf) = self.deck.db.first("""
|
||||||
if s:
|
select mod, name, conf from models where id = ?""", self.id)
|
||||||
s.flush()
|
self.conf = simplejson.loads(self.conf)
|
||||||
|
self.loadFields()
|
||||||
|
self.loadTemplates()
|
||||||
|
|
||||||
def addCardModel(self, card):
|
def flush(self):
|
||||||
"Add a card model. Don't call this directly."
|
self.mod = intTime()
|
||||||
self.cardModels.append(card)
|
self.deck.db.execute("""
|
||||||
s = object_session(self)
|
insert or replace into models values (?, ?, ?, ?)""",
|
||||||
if s:
|
self.id, self.mod, self.name,
|
||||||
s.flush()
|
simplejson.dumps(self.conf))
|
||||||
|
[f._flush() for f in self.fields]
|
||||||
|
[t._flush() for t in self.templates]
|
||||||
|
|
||||||
mapper(Model, modelsTable, properties={
|
def updateCache(self):
|
||||||
'fieldModels': relation(FieldModel, backref='model',
|
self.deck.updateCache([self.id], "model")
|
||||||
collection_class=ordering_list('ordinal'),
|
|
||||||
order_by=[fieldModelsTable.c.ordinal],
|
# Fields
|
||||||
cascade="all, delete-orphan"),
|
##################################################
|
||||||
'cardModels': relation(CardModel, backref='model',
|
|
||||||
collection_class=ordering_list('ordinal'),
|
def loadFields(self):
|
||||||
order_by=[cardModelsTable.c.ordinal],
|
sql = "select * from fields where mid = ? order by ord"
|
||||||
cascade="all, delete-orphan"),
|
self.fields = [Field(self.deck, data)
|
||||||
})
|
for data in self.deck.db.all(sql, self.id)]
|
||||||
|
|
||||||
|
def addField(self, field):
|
||||||
|
self.deck.modSchema()
|
||||||
|
field.mid = self.id
|
||||||
|
field.ord = len(self.fields)
|
||||||
|
self.fields.append(field)
|
||||||
|
|
||||||
|
def fieldMap(self):
|
||||||
|
"Mapping of field name -> (fmid, ord)."
|
||||||
|
return dict([(f.name, (f.id, f.ord, f.conf)) for f in self.fields])
|
||||||
|
|
||||||
|
# Templates
|
||||||
|
##################################################
|
||||||
|
|
||||||
|
def loadTemplates(self):
|
||||||
|
sql = "select * from templates where mid = ? order by ord"
|
||||||
|
self.templates = [Template(self.deck, data)
|
||||||
|
for data in self.deck.db.all(sql, self.id)]
|
||||||
|
|
||||||
|
def addTemplate(self, template):
|
||||||
|
self.deck.modSchema()
|
||||||
|
template.mid = self.id
|
||||||
|
template.ord = len(self.templates)
|
||||||
|
self.templates.append(template)
|
||||||
|
|
||||||
|
# Copying
|
||||||
|
##################################################
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
"Copy, flush and return."
|
||||||
|
new = Model(self.deck, self.id)
|
||||||
|
new.id = genID()
|
||||||
|
new.name += _(" copy")
|
||||||
|
for f in new.fields:
|
||||||
|
f.id = genID()
|
||||||
|
f.mid = new.id
|
||||||
|
for t in new.templates:
|
||||||
|
t.id = genID()
|
||||||
|
t.mid = new.id
|
||||||
|
new.flush()
|
||||||
|
return new
|
||||||
|
|
||||||
|
# Field model object
|
||||||
|
##########################################################################
|
||||||
|
|
||||||
|
defaultFieldConf = {
|
||||||
|
'rtl': False, # features
|
||||||
|
'required': False,
|
||||||
|
'unique': False,
|
||||||
|
'font': "Arial",
|
||||||
|
'editSize': 20,
|
||||||
|
'quizSize': 20,
|
||||||
|
'quizColour': "#fff",
|
||||||
|
'pre': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
class Field(object):
|
||||||
|
|
||||||
|
def __init__(self, deck, data=None):
|
||||||
|
self.deck = deck
|
||||||
|
if data:
|
||||||
|
self.initFromData(data)
|
||||||
|
else:
|
||||||
|
self.id = genID()
|
||||||
|
self.numeric = 0
|
||||||
|
self.conf = defaultFieldConf.copy()
|
||||||
|
|
||||||
|
def initFromData(self, data):
|
||||||
|
(self.id,
|
||||||
|
self.mid,
|
||||||
|
self.ord,
|
||||||
|
self.name,
|
||||||
|
self.numeric,
|
||||||
|
self.conf) = data
|
||||||
|
self.conf = simplejson.loads(self.conf)
|
||||||
|
|
||||||
|
def _flush(self):
|
||||||
|
self.deck.db.execute("""
|
||||||
|
insert or replace into fields values (?, ?, ?, ?, ?, ?)""",
|
||||||
|
self.id, self.mid, self.ord,
|
||||||
|
self.name, self.numeric,
|
||||||
|
simplejson.dumps(self.conf))
|
||||||
|
|
||||||
|
# Template object
|
||||||
|
##########################################################################
|
||||||
|
|
||||||
|
# FIXME: change typeAnswer to field id
|
||||||
|
|
||||||
|
defaultTemplateConf = {
|
||||||
|
'hideQ': False,
|
||||||
|
'align': 0,
|
||||||
|
'bg': "#000",
|
||||||
|
'allowEmptyAns': None,
|
||||||
|
'typeAnswer': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
class Template(object):
|
||||||
|
|
||||||
|
def __init__(self, deck, data=None):
|
||||||
|
self.deck = deck
|
||||||
|
if data:
|
||||||
|
self.initFromData(data)
|
||||||
|
else:
|
||||||
|
self.id = genID()
|
||||||
|
self.active = True
|
||||||
|
self.conf = defaultTemplateConf.copy()
|
||||||
|
|
||||||
|
def initFromData(self, data):
|
||||||
|
(self.id,
|
||||||
|
self.mid,
|
||||||
|
self.ord,
|
||||||
|
self.name,
|
||||||
|
self.active,
|
||||||
|
self.qfmt,
|
||||||
|
self.afmt,
|
||||||
|
self.conf) = data
|
||||||
|
self.conf = simplejson.loads(self.conf)
|
||||||
|
|
||||||
|
def _flush(self):
|
||||||
|
self.deck.db.execute("""
|
||||||
|
insert or replace into templates values (?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
self.id, self.mid, self.ord, self.name,
|
||||||
|
self.active, self.qfmt, self.afmt,
|
||||||
|
simplejson.dumps(self.conf))
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from anki.db import *
|
|
||||||
|
|
||||||
# Flags: 0=standard review, 1=reschedule due to cram, drill, etc
|
# Flags: 0=standard review, 1=reschedule due to cram, drill, etc
|
||||||
# Rep: Repetition number. The same number may appear twice if a card has been
|
# Rep: Repetition number. The same number may appear twice if a card has been
|
||||||
|
|
@ -12,20 +11,8 @@ from anki.db import *
|
||||||
# We store the times in integer milliseconds to avoid an extra index on the
|
# We store the times in integer milliseconds to avoid an extra index on the
|
||||||
# primary key.
|
# primary key.
|
||||||
|
|
||||||
revlogTable = Table(
|
|
||||||
'revlog', metadata,
|
|
||||||
Column('time', Integer, nullable=False, primary_key=True),
|
|
||||||
Column('cardId', Integer, nullable=False),
|
|
||||||
Column('ease', Integer, nullable=False),
|
|
||||||
Column('rep', Integer, nullable=False),
|
|
||||||
Column('lastInterval', Integer, nullable=False),
|
|
||||||
Column('interval', Integer, nullable=False),
|
|
||||||
Column('factor', Integer, nullable=False),
|
|
||||||
Column('userTime', Integer, nullable=False),
|
|
||||||
Column('flags', Integer, nullable=False, default=0))
|
|
||||||
|
|
||||||
def logReview(db, card, ease, flags=0):
|
def logReview(db, card, ease, flags=0):
|
||||||
db.statement("""
|
db.execute("""
|
||||||
insert into revlog values (
|
insert into revlog values (
|
||||||
:created, :cardId, :ease, :rep, :lastInterval, :interval, :factor,
|
:created, :cardId, :ease, :rep, :lastInterval, :interval, :factor,
|
||||||
:userTime, :flags)""",
|
:userTime, :flags)""",
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,7 @@
|
||||||
import time, datetime, simplejson, random
|
import time, datetime, simplejson, random
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from heapq import *
|
from heapq import *
|
||||||
from anki.db import *
|
#from anki.cards import Card
|
||||||
from anki.cards import Card
|
|
||||||
from anki.utils import parseTags, ids2str
|
from anki.utils import parseTags, ids2str
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
from anki.consts import *
|
from anki.consts import *
|
||||||
|
|
@ -26,12 +25,10 @@ class Scheduler(object):
|
||||||
self.checkDay()
|
self.checkDay()
|
||||||
id = self.getCardId()
|
id = self.getCardId()
|
||||||
if id:
|
if id:
|
||||||
card = Card()
|
return self.deck.getCard(id)
|
||||||
assert card.fromDB(self.db, id)
|
|
||||||
return card
|
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.resetConfig()
|
self.resetConf()
|
||||||
t = time.time()
|
t = time.time()
|
||||||
self.resetLearn()
|
self.resetLearn()
|
||||||
print "lrn %0.2fms" % ((time.time() - t)*1000); t = time.time()
|
print "lrn %0.2fms" % ((time.time() - t)*1000); t = time.time()
|
||||||
|
|
@ -53,7 +50,7 @@ class Scheduler(object):
|
||||||
self.answerLearnCard(card, ease)
|
self.answerLearnCard(card, ease)
|
||||||
else:
|
else:
|
||||||
raise Exception("Invalid queue")
|
raise Exception("Invalid queue")
|
||||||
card.toDB(self.db)
|
card.flushSched()
|
||||||
|
|
||||||
def counts(self):
|
def counts(self):
|
||||||
# FIXME: should learn count include new cards due today, or be separate?
|
# FIXME: should learn count include new cards due today, or be separate?
|
||||||
|
|
@ -113,7 +110,7 @@ queue = 2 %s order by due limit %d""" % (self.newOrder(), self.groupLimit('new')
|
||||||
return self.newQueue.pop()[0]
|
return self.newQueue.pop()[0]
|
||||||
|
|
||||||
def newOrder(self):
|
def newOrder(self):
|
||||||
return (",ordinal", "")[self.deck.qconf['newTodayOrder']]
|
return (",ord", "")[self.deck.qconf['newTodayOrder']]
|
||||||
|
|
||||||
def updateNewCardRatio(self):
|
def updateNewCardRatio(self):
|
||||||
if self.deck.qconf['newCardSpacing'] == NEW_CARDS_DISTRIBUTE:
|
if self.deck.qconf['newCardSpacing'] == NEW_CARDS_DISTRIBUTE:
|
||||||
|
|
@ -172,7 +169,7 @@ limit %d""" % self.learnLimit, lim=self.dayCutoff)
|
||||||
card.due = time.time() + conf['delays'][card.grade]*60
|
card.due = time.time() + conf['delays'][card.grade]*60
|
||||||
|
|
||||||
def learnConf(self, card):
|
def learnConf(self, card):
|
||||||
conf = self.configForCard(card)
|
conf = self.confForCard(card)
|
||||||
if card.type == 2:
|
if card.type == 2:
|
||||||
return conf['new']
|
return conf['new']
|
||||||
else:
|
else:
|
||||||
|
|
@ -287,7 +284,7 @@ queue = 1 %s and due < :lim order by %s limit %d""" % (
|
||||||
self.answerPreSave(card, ease)
|
self.answerPreSave(card, ease)
|
||||||
# save
|
# save
|
||||||
card.due = card.due
|
card.due = card.due
|
||||||
card.toDB(self.db)
|
card.saveSched()
|
||||||
# review history
|
# review history
|
||||||
print "make sure flags is set correctly when reviewing early"
|
print "make sure flags is set correctly when reviewing early"
|
||||||
logReview(self.db, card, ease, 0)
|
logReview(self.db, card, ease, 0)
|
||||||
|
|
@ -309,11 +306,10 @@ queue = 1 %s and due < :lim order by %s limit %d""" % (
|
||||||
card.successive += 1
|
card.successive += 1
|
||||||
# if not card.firstAnswered:
|
# if not card.firstAnswered:
|
||||||
# card.firstAnswered = time.time()
|
# card.firstAnswered = time.time()
|
||||||
card.setModified()
|
|
||||||
|
|
||||||
def spaceCards(self, card):
|
def spaceCards(self, card):
|
||||||
new = time.time() + self.newSpacing
|
new = time.time() + self.newSpacing
|
||||||
self.db.statement("""
|
self.db.execute("""
|
||||||
update cards set
|
update cards set
|
||||||
due = (case
|
due = (case
|
||||||
when queue = 1 then due + 86400 * (case
|
when queue = 1 then due + 86400 * (case
|
||||||
|
|
@ -323,13 +319,13 @@ when queue = 1 then due + 86400 * (case
|
||||||
when queue = 2 then :new
|
when queue = 2 then :new
|
||||||
end),
|
end),
|
||||||
modified = :now
|
modified = :now
|
||||||
where id != :id and factId = :factId
|
where id != :id and fid = :fid
|
||||||
and due < :cut
|
and due < :cut
|
||||||
and queue between 1 and 2""",
|
and queue between 1 and 2""",
|
||||||
id=card.id, now=time.time(), factId=card.factId,
|
id=card.id, now=time.time(), fid=card.fid,
|
||||||
cut=self.dayCutoff, new=new, rev=self.revSpacing)
|
cut=self.dayCutoff, new=new, rev=self.revSpacing)
|
||||||
# update local cache of seen facts
|
# update local cache of seen facts
|
||||||
self.spacedFacts[card.factId] = new
|
self.spacedFacts[card.fid] = new
|
||||||
|
|
||||||
# Interval management
|
# Interval management
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
@ -444,39 +440,36 @@ and queue between 1 and 2""",
|
||||||
(fmax - no) % (max(fmax/2, 1)) == 0)
|
(fmax - no) % (max(fmax/2, 1)) == 0)
|
||||||
|
|
||||||
def handleLeech(self, card):
|
def handleLeech(self, card):
|
||||||
self.refreshSession()
|
|
||||||
scard = self.cardFromId(card.id, True)
|
scard = self.cardFromId(card.id, True)
|
||||||
tags = scard.fact.tags
|
tags = scard.fact.tags
|
||||||
tags = addTags("Leech", tags)
|
tags = addTags("Leech", tags)
|
||||||
scard.fact.tags = canonifyTags(tags)
|
scard.fact.tags = canonifyTags(tags)
|
||||||
scard.fact.setModified(textChanged=True, deck=self)
|
scard.fact.setModified(textChanged=True, deck=self)
|
||||||
self.updateFactTags([scard.fact.id])
|
self.updateFactTags([scard.fact.id])
|
||||||
self.db.flush()
|
|
||||||
self.db.expunge(scard)
|
self.db.expunge(scard)
|
||||||
if self.getBool('suspendLeeches'):
|
if self.getBool('suspendLeeches'):
|
||||||
self.suspendCards([card.id])
|
self.suspendCards([card.id])
|
||||||
self.reset()
|
self.reset()
|
||||||
self.refreshSession()
|
|
||||||
|
|
||||||
# Tools
|
# Tools
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def resetConfig(self):
|
def resetConf(self):
|
||||||
"Update group config cache."
|
"Update group conf cache."
|
||||||
self.groupConfigs = dict(self.db.all("select id, confId from groups"))
|
self.groupConfs = dict(self.db.all("select id, gcid from groups"))
|
||||||
self.configCache = {}
|
self.confCache = {}
|
||||||
|
|
||||||
def configForCard(self, card):
|
def confForCard(self, card):
|
||||||
id = self.groupConfigs[card.groupId]
|
id = self.groupConfs[card.gid]
|
||||||
if id not in self.configCache:
|
if id not in self.confCache:
|
||||||
self.configCache[id] = simplejson.loads(
|
self.confCache[id] = simplejson.loads(
|
||||||
self.db.scalar("select config from groupConfig where id = :id",
|
self.db.scalar("select conf from gconf where id = :id",
|
||||||
id=id))
|
id=id))
|
||||||
return self.configCache[id]
|
return self.confCache[id]
|
||||||
|
|
||||||
def resetSchedBuried(self):
|
def resetSchedBuried(self):
|
||||||
"Put temporarily suspended cards back into play."
|
"Put temporarily suspended cards back into play."
|
||||||
self.db.statement(
|
self.db.execute(
|
||||||
"update cards set queue = type where queue = -3")
|
"update cards set queue = type where queue = -3")
|
||||||
|
|
||||||
def groupLimit(self, type):
|
def groupLimit(self, type):
|
||||||
|
|
@ -484,7 +477,7 @@ and queue between 1 and 2""",
|
||||||
if not l:
|
if not l:
|
||||||
# everything
|
# everything
|
||||||
return ""
|
return ""
|
||||||
return " and groupId in %s" % ids2str(l)
|
return " and gid in %s" % ids2str(l)
|
||||||
|
|
||||||
# Daily cutoff
|
# Daily cutoff
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
@ -538,7 +531,7 @@ select count() from cards c where queue = 1 and due > :now
|
||||||
self.revQueue = self.db.all(
|
self.revQueue = self.db.all(
|
||||||
self.cardLimit(
|
self.cardLimit(
|
||||||
"revActive", "revInactive", """
|
"revActive", "revInactive", """
|
||||||
select id, factId from cards c where queue = 1 and due > :lim
|
select id, fid from cards c where queue = 1 and due > :lim
|
||||||
order by due limit %d""" % self.queueLimit), lim=self.dayCutoff)
|
order by due limit %d""" % self.queueLimit), lim=self.dayCutoff)
|
||||||
self.revQueue.reverse()
|
self.revQueue.reverse()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
import time, sys, os, datetime
|
import time, sys, os, datetime
|
||||||
import anki, anki.utils
|
import anki, anki.utils
|
||||||
from anki.db import *
|
|
||||||
from anki.lang import _, ngettext
|
from anki.lang import _, ngettext
|
||||||
from anki.utils import canonifyTags, ids2str
|
from anki.utils import canonifyTags, ids2str
|
||||||
from anki.hooks import runFilter
|
from anki.hooks import runFilter
|
||||||
|
|
|
||||||
|
|
@ -2,48 +2,54 @@
|
||||||
# Copyright: Damien Elmes <anki@ichi2.net>
|
# Copyright: Damien Elmes <anki@ichi2.net>
|
||||||
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
|
||||||
|
|
||||||
"""\
|
from anki.models import Model, Template, Field
|
||||||
Standard Models.
|
|
||||||
==============================================================
|
|
||||||
|
|
||||||
Plugins can add to the 'models' dict to provide more standard
|
|
||||||
models.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from anki.models import Model, CardModel, FieldModel
|
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
|
|
||||||
models = {}
|
models = []
|
||||||
|
|
||||||
def byName(name):
|
|
||||||
fn = models.get(name)
|
|
||||||
if fn:
|
|
||||||
return fn()
|
|
||||||
raise ValueError("No such model available!")
|
|
||||||
|
|
||||||
def names():
|
|
||||||
return models.keys()
|
|
||||||
|
|
||||||
# Basic
|
# Basic
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def BasicModel():
|
def BasicModel(deck):
|
||||||
m = Model(_('Basic'))
|
m = Model(deck)
|
||||||
m.addFieldModel(FieldModel(u'Front', True, True))
|
m.name = _("Basic")
|
||||||
m.addFieldModel(FieldModel(u'Back', False, False))
|
fm = Field(deck)
|
||||||
m.addCardModel(CardModel(u'Forward', u'%(Front)s', u'%(Back)s'))
|
fm.name = _("Front")
|
||||||
m.addCardModel(CardModel(u'Reverse', u'%(Back)s', u'%(Front)s',
|
fm.conf['required'] = True
|
||||||
active=False))
|
fm.conf['unique'] = True
|
||||||
|
m.addField(fm)
|
||||||
|
fm = Field(deck)
|
||||||
|
fm.name = _("Back")
|
||||||
|
m.addField(fm)
|
||||||
|
t = Template(deck)
|
||||||
|
t.name = _("Forward")
|
||||||
|
t.qfmt = "{{" + _("Front") + "}}"
|
||||||
|
t.afmt = "{{" + _("Back") + "}}"
|
||||||
|
m.addTemplate(t)
|
||||||
|
t = Template(deck)
|
||||||
|
t.name = _("Reverse")
|
||||||
|
t.qfmt = "{{" + _("Back") + "}}"
|
||||||
|
t.afmt = "{{" + _("Front") + "}}"
|
||||||
|
t.active = False
|
||||||
|
m.addTemplate(t)
|
||||||
return m
|
return m
|
||||||
|
|
||||||
models['Basic'] = BasicModel
|
models.append(BasicModel)
|
||||||
|
|
||||||
# Recovery
|
# Recovery
|
||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
def RecoveryModel():
|
def RecoveryModel():
|
||||||
m = Model(_('Recovery'))
|
m.name = _("Recovery")
|
||||||
m.addFieldModel(FieldModel(u'Question', False, False))
|
fm = Field(deck)
|
||||||
m.addFieldModel(FieldModel(u'Answer', False, False))
|
fm.name = _("Question")
|
||||||
m.addCardModel(CardModel(u'Single', u'{{{Question}}}', u'{{{Answer}}}'))
|
m.addField(fm)
|
||||||
|
fm = Field(deck)
|
||||||
|
fm.name = _("Back")
|
||||||
|
m.addField(fm)
|
||||||
|
t = Template(deck)
|
||||||
|
t.name = _("Forward")
|
||||||
|
t.qfmt = "{{" + _("Question") + "}}"
|
||||||
|
t.afmt = "{{" + _("Back") + "}}"
|
||||||
|
m.addTemplate(t)
|
||||||
return m
|
return m
|
||||||
|
|
|
||||||
|
|
@ -4,11 +4,47 @@
|
||||||
|
|
||||||
DECK_VERSION = 100
|
DECK_VERSION = 100
|
||||||
|
|
||||||
import time, simplejson
|
import os, time, simplejson
|
||||||
from anki.db import *
|
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
from anki.media import rebuildMediaDir
|
#from anki.media import rebuildMediaDir
|
||||||
from anki.utils import intTime
|
from anki.utils import intTime
|
||||||
|
from anki.db import DB
|
||||||
|
from anki.deck import _Deck
|
||||||
|
import anki.groups
|
||||||
|
from anki.stdmodels import BasicModel
|
||||||
|
|
||||||
|
def Deck(path, queue=True):
|
||||||
|
"Open a new or existing deck. Path must be unicode."
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
create = not os.path.exists(path)
|
||||||
|
# connect
|
||||||
|
db = DB(path)
|
||||||
|
if create:
|
||||||
|
ver = _createDB(db)
|
||||||
|
else:
|
||||||
|
ver = _upgradeSchema(db)
|
||||||
|
db.execute("pragma cache_size = 20000")
|
||||||
|
# add db to deck and do any remaining upgrades
|
||||||
|
deck = _Deck(db)
|
||||||
|
if ver < DECK_VERSION:
|
||||||
|
_upgradeDeck(deck, ver)
|
||||||
|
elif create:
|
||||||
|
deck.addModel(BasicModel(deck))
|
||||||
|
deck.save()
|
||||||
|
if not queue:
|
||||||
|
return deck
|
||||||
|
# rebuild queue
|
||||||
|
deck.reset()
|
||||||
|
return deck
|
||||||
|
|
||||||
|
def _createDB(db):
|
||||||
|
db.execute("pragma page_size = 4096")
|
||||||
|
db.execute("pragma legacy_file_format = 0")
|
||||||
|
db.execute("vacuum")
|
||||||
|
_addSchema(db)
|
||||||
|
_updateIndices(db)
|
||||||
|
db.execute("analyze")
|
||||||
|
return DECK_VERSION
|
||||||
|
|
||||||
def moveTable(s, table):
|
def moveTable(s, table):
|
||||||
sql = s.scalar(
|
sql = s.scalar(
|
||||||
|
|
@ -18,19 +54,16 @@ def moveTable(s, table):
|
||||||
s.execute("insert into %s2 select * from %s" % (table, table))
|
s.execute("insert into %s2 select * from %s" % (table, table))
|
||||||
s.execute("drop table "+table)
|
s.execute("drop table "+table)
|
||||||
|
|
||||||
def upgradeSchema(engine, s):
|
def _upgradeSchema(db):
|
||||||
"Alter tables prior to ORM initialization."
|
"Alter tables prior to ORM initialization."
|
||||||
try:
|
try:
|
||||||
ver = s.scalar("select version from deck limit 1")
|
ver = db.scalar("select version from deck")
|
||||||
except:
|
except:
|
||||||
ver = s.scalar("select version from decks limit 1")
|
ver = db.scalar("select version from decks")
|
||||||
if ver < 65:
|
if ver < 65:
|
||||||
raise Exception("oldDeckVersion")
|
raise Exception("oldDeckVersion")
|
||||||
if ver < 99:
|
if ver < 99:
|
||||||
# fields
|
raise "upgrade"
|
||||||
###########
|
|
||||||
s.execute(
|
|
||||||
"alter table fields add column chksum text not null default ''")
|
|
||||||
# cards
|
# cards
|
||||||
###########
|
###########
|
||||||
moveTable(s, "cards")
|
moveTable(s, "cards")
|
||||||
|
|
@ -83,9 +116,20 @@ cast(modified as int), tags, cache from facts2""")
|
||||||
insert or ignore into media select id, filename, size, cast(created as int),
|
insert or ignore into media select id, filename, size, cast(created as int),
|
||||||
originalPath from media2""")
|
originalPath from media2""")
|
||||||
s.execute("drop table media2")
|
s.execute("drop table media2")
|
||||||
# deck
|
# longer migrations
|
||||||
###########
|
###########
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
migrateDeck(s, engine)
|
migrateDeck(s, engine)
|
||||||
|
migrateFields(s, engine)
|
||||||
|
# # fields
|
||||||
|
# ###########
|
||||||
|
# db.execute(
|
||||||
|
# "alter table fields add column csum text not null default ''")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# models
|
# models
|
||||||
###########
|
###########
|
||||||
moveTable(s, "models")
|
moveTable(s, "models")
|
||||||
|
|
@ -141,91 +185,64 @@ utcOffset, "", "", "" from decks""")
|
||||||
s.execute("drop table decks")
|
s.execute("drop table decks")
|
||||||
s.execute("drop table deckVars")
|
s.execute("drop table deckVars")
|
||||||
|
|
||||||
def updateIndices(db):
|
def _upgradeDeck(deck, version):
|
||||||
"Add indices to the DB."
|
|
||||||
# sync summaries
|
|
||||||
db.execute("""
|
|
||||||
create index if not exists ix_cards_modified on cards
|
|
||||||
(modified)""")
|
|
||||||
db.execute("""
|
|
||||||
create index if not exists ix_facts_modified on facts
|
|
||||||
(modified)""")
|
|
||||||
# card spacing
|
|
||||||
db.execute("""
|
|
||||||
create index if not exists ix_cards_factId on cards (factId)""")
|
|
||||||
# fields
|
|
||||||
db.execute("""
|
|
||||||
create index if not exists ix_fields_factId on fields (factId)""")
|
|
||||||
db.execute("""
|
|
||||||
create index if not exists ix_fields_chksum on fields (chksum)""")
|
|
||||||
# media
|
|
||||||
db.execute("""
|
|
||||||
create index if not exists ix_media_chksum on media (chksum)""")
|
|
||||||
# deletion tracking
|
|
||||||
db.execute("""
|
|
||||||
create index if not exists ix_gravestones_delTime on gravestones (delTime)""")
|
|
||||||
|
|
||||||
def upgradeDeck(deck):
|
|
||||||
"Upgrade deck to the latest version."
|
"Upgrade deck to the latest version."
|
||||||
if deck.version < DECK_VERSION:
|
print version, DECK_VERSION
|
||||||
|
if version < DECK_VERSION:
|
||||||
prog = True
|
prog = True
|
||||||
deck.startProgress()
|
deck.startProgress()
|
||||||
deck.updateProgress(_("Upgrading Deck..."))
|
deck.updateProgress(_("Upgrading Deck..."))
|
||||||
oldmod = deck.modified
|
oldmod = deck.modified
|
||||||
else:
|
else:
|
||||||
prog = False
|
prog = False
|
||||||
if deck.version < 100:
|
if version < 100:
|
||||||
# update dynamic indices given we don't use priority anymore
|
# update dynamic indices given we don't use priority anymore
|
||||||
for d in ("intervalDesc", "intervalAsc", "randomOrder",
|
for d in ("intervalDesc", "intervalAsc", "randomOrder",
|
||||||
"dueAsc", "dueDesc"):
|
"dueAsc", "dueDesc"):
|
||||||
deck.db.statement("drop index if exists ix_cards_%s2" % d)
|
deck.db.execute("drop index if exists ix_cards_%s2" % d)
|
||||||
deck.db.statement("drop index if exists ix_cards_%s" % d)
|
execute.db.statement("drop index if exists ix_cards_%s" % d)
|
||||||
# remove old views
|
# remove old views
|
||||||
for v in ("failedCards", "revCardsOld", "revCardsNew",
|
for v in ("failedCards", "revCardsOld", "revCardsNew",
|
||||||
"revCardsDue", "revCardsRandom", "acqCardsRandom",
|
"revCardsDue", "revCardsRandom", "acqCardsRandom",
|
||||||
"acqCardsOld", "acqCardsNew"):
|
"acqCardsOld", "acqCardsNew"):
|
||||||
deck.db.statement("drop view if exists %s" % v)
|
deck.db.execute("drop view if exists %s" % v)
|
||||||
# remove the expensive value cache
|
|
||||||
deck.db.statement("drop index if exists ix_fields_value")
|
|
||||||
# add checksums and index
|
# add checksums and index
|
||||||
deck.updateAllFieldChecksums()
|
deck.updateAllFieldChecksums()
|
||||||
# this was only used for calculating average factor
|
# this was only used for calculating average factor
|
||||||
deck.db.statement("drop index if exists ix_cards_factor")
|
deck.db.execute("drop index if exists ix_cards_factor")
|
||||||
# remove stats, as it's all in the revlog now
|
# remove stats, as it's all in the revlog now
|
||||||
deck.db.statement("drop table if exists stats")
|
deck.db.execute("drop table if exists stats")
|
||||||
# migrate revlog data to new table
|
# migrate revlog data to new table
|
||||||
deck.db.statement("""
|
deck.db.execute("""
|
||||||
insert or ignore into revlog select
|
insert or ignore into revlog select
|
||||||
cast(time*1000 as int), cardId, ease, reps,
|
cast(time*1000 as int), cardId, ease, reps,
|
||||||
cast(lastInterval as int), cast(nextInterval as int),
|
cast(lastInterval as int), cast(nextInterval as int),
|
||||||
cast(nextFactor*1000 as int), cast(min(thinkingTime, 60)*1000 as int),
|
cast(nextFactor*1000 as int), cast(min(thinkingTime, 60)*1000 as int),
|
||||||
0 from reviewHistory""")
|
0 from reviewHistory""")
|
||||||
deck.db.statement("drop table reviewHistory")
|
deck.db.execute("drop table reviewHistory")
|
||||||
# convert old ease0 into ease1
|
# convert old ease0 into ease1
|
||||||
deck.db.statement("update revlog set ease = 1 where ease = 0")
|
deck.db.execute("update revlog set ease = 1 where ease = 0")
|
||||||
# remove priority index
|
# remove priority index
|
||||||
deck.db.statement("drop index if exists ix_cards_priority")
|
deck.db.execute("drop index if exists ix_cards_priority")
|
||||||
# suspended cards don't use ranges anymore
|
# suspended cards don't use ranges anymore
|
||||||
deck.db.execute("update cards set queue=-1 where queue between -3 and -1")
|
deck.db.execute("update cards set queue=-1 where queue between -3 and -1")
|
||||||
deck.db.execute("update cards set queue=-2 where queue between 3 and 5")
|
deck.db.execute("update cards set queue=-2 where queue between 3 and 5")
|
||||||
deck.db.execute("update cards set queue=-3 where queue between 6 and 8")
|
deck.db.execute("update cards set queue=-3 where queue between 6 and 8")
|
||||||
# don't need an index on fieldModelId
|
|
||||||
deck.db.statement("drop index if exists ix_fields_fieldModelId")
|
|
||||||
# update schema time
|
# update schema time
|
||||||
deck.db.statement("update deck set schemaMod = :t", t=intTime())
|
deck.db.execute("update deck set schemaMod = :t", t=intTime())
|
||||||
# remove queueDue as it's become dynamic, and type index
|
# remove queueDue as it's become dynamic, and type index
|
||||||
deck.db.statement("drop index if exists ix_cards_queueDue")
|
deck.db.execute("drop index if exists ix_cards_queueDue")
|
||||||
deck.db.statement("drop index if exists ix_cards_type")
|
deck.db.execute("drop index if exists ix_cards_type")
|
||||||
# remove old deleted tables
|
# remove old deleted tables
|
||||||
for t in ("cards", "facts", "models", "media"):
|
for t in ("cards", "facts", "models", "media"):
|
||||||
deck.db.statement("drop table if exists %sDeleted" % t)
|
deck.db.execute("drop table if exists %sDeleted" % t)
|
||||||
# finally, update indices & optimize
|
# finally, update indices & optimize
|
||||||
updateIndices(deck.db)
|
updateIndices(deck.db)
|
||||||
# rewrite due times for new cards
|
# rewrite due times for new cards
|
||||||
deck.db.statement("""
|
deck.db.execute("""
|
||||||
update cards set due = (select pos from facts where factId = facts.id) where type=2""")
|
update cards set due = (select pos from facts where factId = facts.id) where type=2""")
|
||||||
# convert due cards into day-based due
|
# convert due cards into day-based due
|
||||||
deck.db.statement("""
|
deck.db.execute("""
|
||||||
update cards set due = cast(
|
update cards set due = cast(
|
||||||
(case when due < :stamp then 0 else 1 end) +
|
(case when due < :stamp then 0 else 1 end) +
|
||||||
((due-:stamp)/86400) as int)+:today where type
|
((due-:stamp)/86400) as int)+:today where type
|
||||||
|
|
@ -240,14 +257,170 @@ between 0 and 1""", stamp=deck.sched.dayCutoff, today=deck.sched.today)
|
||||||
deck.config['nextFactPos'] = deck.db.scalar("select max(pos) from facts")+1
|
deck.config['nextFactPos'] = deck.db.scalar("select max(pos) from facts")+1
|
||||||
deck.flushConfig()
|
deck.flushConfig()
|
||||||
# add default config
|
# add default config
|
||||||
import deck as deckMod
|
|
||||||
deckMod.DeckStorage._addConfig(deck.engine)
|
|
||||||
|
|
||||||
deck.updateDynamicIndices()
|
deck.updateDynamicIndices()
|
||||||
deck.db.execute("vacuum")
|
deck.db.execute("vacuum")
|
||||||
deck.db.execute("analyze")
|
deck.db.execute("analyze")
|
||||||
deck.version = 100
|
deck.db.execute("update deck set version = ?", DECK_VERSION)
|
||||||
deck.db.commit()
|
deck.db.commit()
|
||||||
if prog:
|
if prog:
|
||||||
assert deck.modified == oldmod
|
assert deck.modified == oldmod
|
||||||
deck.finishProgress()
|
deck.finishProgress()
|
||||||
|
|
||||||
|
def _addSchema(db):
|
||||||
|
db.executescript("""
|
||||||
|
create table if not exists deck (
|
||||||
|
id integer primary key,
|
||||||
|
created integer not null,
|
||||||
|
mod integer not null,
|
||||||
|
schema integer not null,
|
||||||
|
version integer not null,
|
||||||
|
syncName text not null,
|
||||||
|
lastSync integer not null,
|
||||||
|
utcOffset integer not null,
|
||||||
|
qconf text not null,
|
||||||
|
conf text not null,
|
||||||
|
data text not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists cards (
|
||||||
|
id integer primary key,
|
||||||
|
fid integer not null,
|
||||||
|
tid integer not null,
|
||||||
|
gid integer not null,
|
||||||
|
mod integer not null,
|
||||||
|
q text not null,
|
||||||
|
a text not null,
|
||||||
|
ord integer not null,
|
||||||
|
type integer not null,
|
||||||
|
queue integer not null,
|
||||||
|
due integer not null,
|
||||||
|
interval integer not null,
|
||||||
|
factor integer not null,
|
||||||
|
reps integer not null,
|
||||||
|
streak integer not null,
|
||||||
|
lapses integer not null,
|
||||||
|
grade integer not null,
|
||||||
|
cycles integer not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists facts (
|
||||||
|
id integer primary key,
|
||||||
|
mid integer not null,
|
||||||
|
mod integer not null,
|
||||||
|
pos integer not null,
|
||||||
|
tags text not null,
|
||||||
|
cache text not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists models (
|
||||||
|
id integer primary key,
|
||||||
|
mod integer not null,
|
||||||
|
name text not null,
|
||||||
|
conf text not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists fields (
|
||||||
|
id integer primary key,
|
||||||
|
mid integer not null,
|
||||||
|
ord integer not null,
|
||||||
|
name text not null,
|
||||||
|
numeric integer not null,
|
||||||
|
conf text not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists templates (
|
||||||
|
id integer primary key,
|
||||||
|
mid integer not null,
|
||||||
|
ord integer not null,
|
||||||
|
name text not null,
|
||||||
|
active integer not null,
|
||||||
|
qfmt text not null,
|
||||||
|
afmt text not null,
|
||||||
|
conf text not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists fdata (
|
||||||
|
fid integer not null,
|
||||||
|
fmid integer not null,
|
||||||
|
ord integer not null,
|
||||||
|
val text not null,
|
||||||
|
csum text not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists gravestones (
|
||||||
|
delTime integer not null,
|
||||||
|
objectId integer not null,
|
||||||
|
type integer not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists gconf (
|
||||||
|
id integer primary key,
|
||||||
|
mod integer not null,
|
||||||
|
name text not null,
|
||||||
|
conf text not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists groups (
|
||||||
|
id integer primary key autoincrement,
|
||||||
|
mod integer not null,
|
||||||
|
name text not null,
|
||||||
|
gcid integer not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists media (
|
||||||
|
file text primary key,
|
||||||
|
mod integer not null,
|
||||||
|
csum text not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists revlog (
|
||||||
|
time integer primary key,
|
||||||
|
cid integer not null,
|
||||||
|
ease integer not null,
|
||||||
|
rep integer not null,
|
||||||
|
lastInt integer not null,
|
||||||
|
interval integer not null,
|
||||||
|
factor integer not null,
|
||||||
|
userTime integer not null,
|
||||||
|
flags integer not null
|
||||||
|
);
|
||||||
|
|
||||||
|
create table if not exists tags (
|
||||||
|
id integer primary key,
|
||||||
|
mod integer not null,
|
||||||
|
name text not null collate nocase unique
|
||||||
|
);
|
||||||
|
|
||||||
|
insert or ignore into deck
|
||||||
|
values(1,%(t)s,%(t)s,%(t)s,%(v)s,'',0,-2,'', '', '');
|
||||||
|
""" % ({'t': intTime(), 'v':DECK_VERSION}))
|
||||||
|
import anki.deck
|
||||||
|
db.execute("update deck set qconf = ?, conf = ?, data = ?",
|
||||||
|
simplejson.dumps(anki.deck.defaultQconf),
|
||||||
|
simplejson.dumps(anki.deck.defaultConf),
|
||||||
|
"{}")
|
||||||
|
db.execute(
|
||||||
|
"insert or ignore into gconf values (1, ?, ?, ?)""",
|
||||||
|
intTime(), _("Default Config"),
|
||||||
|
simplejson.dumps(anki.groups.defaultConf))
|
||||||
|
db.execute(
|
||||||
|
"insert or ignore into groups values (1, ?, ?, 1)",
|
||||||
|
intTime(), _("Default Group"))
|
||||||
|
|
||||||
|
def _updateIndices(db):
|
||||||
|
"Add indices to the DB."
|
||||||
|
db.executescript("""
|
||||||
|
-- sync summaries
|
||||||
|
create index if not exists ix_cards_mod on cards (mod);
|
||||||
|
create index if not exists ix_facts_mod on facts (mod);
|
||||||
|
-- card spacing
|
||||||
|
create index if not exists ix_cards_fid on cards (fid);
|
||||||
|
-- fact data
|
||||||
|
create index if not exists ix_fdata_fid on fdata (fid);
|
||||||
|
create index if not exists ix_fdata_csum on fdata (csum);
|
||||||
|
-- media
|
||||||
|
create index if not exists ix_media_csum on media (csum);
|
||||||
|
-- deletion tracking
|
||||||
|
create index if not exists ix_gravestones_delTime on gravestones (delTime);
|
||||||
|
""")
|
||||||
27
anki/sync.py
27
anki/sync.py
|
|
@ -6,13 +6,12 @@ import zlib, re, urllib, urllib2, socket, simplejson, time, shutil
|
||||||
import os, base64, httplib, sys, tempfile, httplib, types
|
import os, base64, httplib, sys, tempfile, httplib, types
|
||||||
from datetime import date
|
from datetime import date
|
||||||
import anki, anki.deck, anki.cards
|
import anki, anki.deck, anki.cards
|
||||||
from anki.db import sqlite
|
|
||||||
from anki.errors import *
|
from anki.errors import *
|
||||||
from anki.models import Model, FieldModel, CardModel
|
#from anki.models import Model, Field, Template
|
||||||
from anki.facts import Fact, Field
|
#from anki.facts import Fact
|
||||||
from anki.cards import Card
|
#from anki.cards import Card
|
||||||
from anki.utils import ids2str, hexifyID, checksum
|
from anki.utils import ids2str, hexifyID, checksum
|
||||||
from anki.media import mediaFiles
|
#from anki.media import mediaFiles
|
||||||
from anki.lang import _
|
from anki.lang import _
|
||||||
from hooks import runHook
|
from hooks import runHook
|
||||||
|
|
||||||
|
|
@ -334,7 +333,7 @@ class SyncTools(object):
|
||||||
self.applyDict(local, model)
|
self.applyDict(local, model)
|
||||||
self.mergeFieldModels(local, fms)
|
self.mergeFieldModels(local, fms)
|
||||||
self.mergeCardModels(local, cms)
|
self.mergeCardModels(local, cms)
|
||||||
self.deck.db.statement(
|
self.deck.db.execute(
|
||||||
"delete from modelsDeleted where modelId in %s" %
|
"delete from modelsDeleted where modelId in %s" %
|
||||||
ids2str([m['id'] for m in models]))
|
ids2str([m['id'] for m in models]))
|
||||||
|
|
||||||
|
|
@ -457,7 +456,7 @@ insert into fields
|
||||||
(id, factId, fieldModelId, ordinal, value, chksum)
|
(id, factId, fieldModelId, ordinal, value, chksum)
|
||||||
values
|
values
|
||||||
(:id, :factId, :fieldModelId, :ordinal, :value, :chksum)""", dlist)
|
(:id, :factId, :fieldModelId, :ordinal, :value, :chksum)""", dlist)
|
||||||
self.deck.db.statement(
|
self.deck.db.execute(
|
||||||
"delete from factsDeleted where factId in %s" %
|
"delete from factsDeleted where factId in %s" %
|
||||||
ids2str([f[0] for f in facts]))
|
ids2str([f[0] for f in facts]))
|
||||||
|
|
||||||
|
|
@ -535,7 +534,7 @@ values
|
||||||
:matureEase1, :matureEase2, :matureEase3, :matureEase4, :yesCount,
|
:matureEase1, :matureEase2, :matureEase3, :matureEase4, :yesCount,
|
||||||
:noCount, :question, :answer, :lastFactor, :spaceUntil,
|
:noCount, :question, :answer, :lastFactor, :spaceUntil,
|
||||||
:type, :combinedDue, :rd, 0)""", dlist)
|
:type, :combinedDue, :rd, 0)""", dlist)
|
||||||
self.deck.db.statement(
|
self.deck.db.execute(
|
||||||
"delete from cardsDeleted where cardId in %s" %
|
"delete from cardsDeleted where cardId in %s" %
|
||||||
ids2str([c[0] for c in cards]))
|
ids2str([c[0] for c in cards]))
|
||||||
|
|
||||||
|
|
@ -569,7 +568,7 @@ values
|
||||||
if 'meta' in deck:
|
if 'meta' in deck:
|
||||||
meta = deck['meta']
|
meta = deck['meta']
|
||||||
for (k,v) in meta:
|
for (k,v) in meta:
|
||||||
self.deck.db.statement("""
|
self.deck.db.execute("""
|
||||||
insert or replace into deckVars
|
insert or replace into deckVars
|
||||||
(key, value) values (:k, :v)""", k=k, v=v)
|
(key, value) values (:k, :v)""", k=k, v=v)
|
||||||
del deck['meta']
|
del deck['meta']
|
||||||
|
|
@ -592,7 +591,7 @@ select * from revlog where time > :ls""",
|
||||||
'flags': h[8]} for h in history]
|
'flags': h[8]} for h in history]
|
||||||
if not dlist:
|
if not dlist:
|
||||||
return
|
return
|
||||||
self.deck.db.statements("""
|
self.deck.db.execute("""
|
||||||
insert or ignore into revlog values
|
insert or ignore into revlog values
|
||||||
(:time, :cardId, :ease, :rep, :lastInterval, :interval, :factor,
|
(:time, :cardId, :ease, :rep, :lastInterval, :interval, :factor,
|
||||||
:userTime, :flags)""",
|
:userTime, :flags)""",
|
||||||
|
|
@ -603,7 +602,7 @@ insert or ignore into revlog values
|
||||||
|
|
||||||
def updateSources(self, sources):
|
def updateSources(self, sources):
|
||||||
for s in sources:
|
for s in sources:
|
||||||
self.deck.db.statement("""
|
self.deck.db.execute("""
|
||||||
insert or replace into sources values
|
insert or replace into sources values
|
||||||
(:id, :name, :created, :lastSync, :syncPeriod)""",
|
(:id, :name, :created, :lastSync, :syncPeriod)""",
|
||||||
id=s[0],
|
id=s[0],
|
||||||
|
|
@ -633,12 +632,12 @@ from media where id in %s""" % ids2str(ids))]
|
||||||
'description': m[5]})
|
'description': m[5]})
|
||||||
# apply metadata
|
# apply metadata
|
||||||
if meta:
|
if meta:
|
||||||
self.deck.db.statements("""
|
self.deck.db.execute("""
|
||||||
insert or replace into media (id, filename, size, created,
|
insert or replace into media (id, filename, size, created,
|
||||||
originalPath, description)
|
originalPath, description)
|
||||||
values (:id, :filename, :size, :created, :originalPath,
|
values (:id, :filename, :size, :created, :originalPath,
|
||||||
:description)""", meta)
|
:description)""", meta)
|
||||||
self.deck.db.statement(
|
self.deck.db.execute(
|
||||||
"delete from mediaDeleted where mediaId in %s" %
|
"delete from mediaDeleted where mediaId in %s" %
|
||||||
ids2str([m[0] for m in media]))
|
ids2str([m[0] for m in media]))
|
||||||
|
|
||||||
|
|
@ -646,7 +645,7 @@ values (:id, :filename, :size, :created, :originalPath,
|
||||||
sids = ids2str(ids)
|
sids = ids2str(ids)
|
||||||
files = self.deck.db.column0(
|
files = self.deck.db.column0(
|
||||||
"select filename from media where id in %s" % sids)
|
"select filename from media where id in %s" % sids)
|
||||||
self.deck.db.statement("""
|
self.deck.db.execute("""
|
||||||
insert into mediaDeleted
|
insert into mediaDeleted
|
||||||
select id, :now from media
|
select id, :now from media
|
||||||
where media.id in %s""" % sids, now=time.time())
|
where media.id in %s""" % sids, now=time.time())
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,6 @@ except ImportError:
|
||||||
import md5
|
import md5
|
||||||
md5 = md5.new
|
md5 = md5.new
|
||||||
|
|
||||||
from anki.db import *
|
|
||||||
from anki.lang import _, ngettext
|
from anki.lang import _, ngettext
|
||||||
import locale, sys
|
import locale, sys
|
||||||
|
|
||||||
|
|
@ -151,7 +150,7 @@ def tidyHTML(html):
|
||||||
"margin-right:\d+px;(?: -qt-block-indent:0; "
|
"margin-right:\d+px;(?: -qt-block-indent:0; "
|
||||||
"text-indent:0px;)?", u"", html)
|
"text-indent:0px;)?", u"", html)
|
||||||
html = re.sub(u"-qt-paragraph-type:empty;", u"", html)
|
html = re.sub(u"-qt-paragraph-type:empty;", u"", html)
|
||||||
# strip leading space in style statements, and remove if no contents
|
# strip leading space in style execute, and remove if no contents
|
||||||
html = re.sub(u'style=" ', u'style="', html)
|
html = re.sub(u'style=" ', u'style="', html)
|
||||||
html = re.sub(u' style=""', u"", html)
|
html = re.sub(u' style=""', u"", html)
|
||||||
# convert P tags into SPAN and/or BR
|
# convert P tags into SPAN and/or BR
|
||||||
|
|
@ -246,7 +245,7 @@ def canonifyTags(tags):
|
||||||
tags = [t.lstrip(":") for t in set(parseTags(tags))]
|
tags = [t.lstrip(":") for t in set(parseTags(tags))]
|
||||||
return joinTags(sorted(tags))
|
return joinTags(sorted(tags))
|
||||||
|
|
||||||
def findTag(tag, tags):
|
def hasTag(tag, tags):
|
||||||
"True if TAG is in TAGS. Ignore case."
|
"True if TAG is in TAGS. Ignore case."
|
||||||
return tag.lower() in [t.lower() for t in tags]
|
return tag.lower() in [t.lower() for t in tags]
|
||||||
|
|
||||||
|
|
@ -254,7 +253,7 @@ def addTags(addtags, tags):
|
||||||
"Add tags if they don't exist."
|
"Add tags if they don't exist."
|
||||||
currentTags = parseTags(tags)
|
currentTags = parseTags(tags)
|
||||||
for tag in parseTags(addtags):
|
for tag in parseTags(addtags):
|
||||||
if not findTag(tag, currentTags):
|
if not hasTag(tag, currentTags):
|
||||||
currentTags.append(tag)
|
currentTags.append(tag)
|
||||||
return joinTags(currentTags)
|
return joinTags(currentTags)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
abc
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<vocabulary version="3">
|
|
||||||
<info><title>Deutsch - Italienisch</title><author>Rick Gruber-Riemer</author><notes>Klett Grund- und Aufbauwortschatz</notes><copyright></copyright><licence></licence><blabel>Deutsch</blabel><tlabel>Italienisch</tlabel><alabel>Attributes</alabel><ulabel>Lektion</ulabel><clabel>Kategori</clabel><olabel>Others</olabel><explabel>Erklärung</explabel><exlabel>Unregelmässig</exlabel><bor>en_US</bor><tor>en_US</tor><aor>en_US</aor><uor>en_US</uor><cor>en_US</cor><expor>en_US</expor><exor>en_US</exor><por>en_US</por><ror>en_US</ror><visa>1</visa><visu>0</visu><viscat>0</viscat><visexp>0</visexp><visex>2</visex><vispro>2</vispro><visrel>2</visrel><syllb>false</syllb><syllt>false</syllt></info>
|
|
||||||
<etattributes>
|
|
||||||
<eta eid="ETA4" n="Steigerung" di="ETAI10" lu="20061222 07:56:27 CET"><etai eid="ETAI10" ir="false">Regelmässig</etai><etai eid="ETAI11" ir="false">Unregelmässig</etai></eta>
|
|
||||||
<eta eid="ETA5" n="Perfekt" di="ETAI12" lu="20070210 13:20:09 CET"><etai eid="ETAI12" ir="false">avere</etai><etai eid="ETAI13" ir="false">essere</etai><etai eid="ETAI14" ir="false">avere oder essere</etai></eta>
|
|
||||||
<eta eid="ETA1" n="Konjugation" di="ETAI1" lu="20070211 12:35:19 CET"><etai eid="ETAI1" ir="false">-are (regelmässig)</etai><etai eid="ETAI16" ir="false">-ere (regelmässig)</etai><etai eid="ETAI15" ir="false">-ire (regelmässig)</etai><etai eid="ETAI2" ir="false">Unregelmässig</etai></eta>
|
|
||||||
<eta eid="ETA2" n="Geschlecht" di="ETAI3" lu="20070210 21:08:17 CET"><etai eid="ETAI3" ir="false">il</etai><etai eid="ETAI4" ir="false">la</etai></eta>
|
|
||||||
<eta eid="ETA3" n="Mehrzahl" di="ETAI6" lu="20070212 10:03:56 CET"><etai eid="ETAI6" ir="false">Regelmässig</etai><etai eid="ETAI7" ir="false">Unregelmässig</etai><etai eid="ETAI8" ir="false">Nur Einzahl</etai><etai eid="ETAI9" ir="false">Nur Mehrzahl</etai></eta>
|
|
||||||
</etattributes>
|
|
||||||
<entrytypes>
|
|
||||||
<entrytype eid="ET8" n="Sätze und Redewendungen" lu="20070310 20:16:30 CET"></entrytype>
|
|
||||||
<entrytype eid="ET7" n="Slang" lu="20070210 20:58:29 CET"></entrytype>
|
|
||||||
<entrytype eid="ET4" n="Adjektiv / Adverb" a1="ETA4" lu="20061222 07:58:14 CET"></entrytype>
|
|
||||||
<entrytype eid="ET3" n="Substantiv" a1="ETA2" a2="ETA3" lu="20061222 07:55:39 CET"></entrytype>
|
|
||||||
<entrytype eid="ET6" n="Modi di dire" lu="20070210 13:29:14 CET"></entrytype>
|
|
||||||
<entrytype eid="ET5" n="Konjugation" lu="20070210 13:20:36 CET"></entrytype>
|
|
||||||
<entrytype eid="ET2" n="Anderes" lu="20061222 07:52:31 CET"></entrytype>
|
|
||||||
<entrytype eid="ET1" n="Verb" a1="ETA1" a2="ETA5" lu="20061222 07:57:41 CET"></entrytype>
|
|
||||||
</entrytypes>
|
|
||||||
<units>
|
|
||||||
<unit eid="U4" lu="20070217 20:14:02 CET"><name>Rest</name><desc></desc></unit>
|
|
||||||
<unit eid="U3" lu="20070217 20:03:30 CET"><name>Harenberg Kalender Italienisch 2007</name><desc></desc></unit>
|
|
||||||
<unit eid="U5" lu="20070310 20:15:52 CET"><name>50. Restaurant, Café, Hotel</name><desc></desc></unit>
|
|
||||||
<unit eid="U2" lu="20070210 13:31:47 CET"><name>Berlitz Kalender 2005</name><desc></desc></unit>
|
|
||||||
<unit eid="U1" lu="20061222 07:48:58 CET"><name>A</name><desc></desc></unit>
|
|
||||||
</units>
|
|
||||||
<categories>
|
|
||||||
<category eid="C1" lu="20061222 07:46:40 CET"><name>Default</name><desc></desc></category>
|
|
||||||
</categories>
|
|
||||||
<entries>
|
|
||||||
<e et="ET1" eid="E113" u="U1" c="C1" lv="1" st="true" lu="20070211 14:18:49 CET" ll="19700101 01:00:00 CET" a1="ETAI1" a2="ETAI12"><o>entfernen, beseitigen</o><d>allontanare</d></e>
|
|
||||||
<e et="ET2" eid="E114" u="U1" c="C1" lv="2" st="true" lu="20070211 14:20:31 CET" ll="19700101 01:00:00 CET"><o>dann; damals, also; früher</o><d>allora</d></e>
|
|
||||||
<e et="ET3" eid="E112" u="U1" c="C1" lv="3" st="true" lu="20070211 14:17:19 CET" ll="19700101 01:00:00 CET" a1="ETAI3" a2="ETAI6"><o>Schüler, Zögling</o><d>allievo</d></e>
|
|
||||||
<e et="ET4" eid="E110" u="U1" c="C1" lv="4" st="true" lu="20070211 14:10:56 CET" ll="19700101 01:00:00 CET" a1="ETAI10"><o>lustig, heiter</o><d>allegro</d></e>
|
|
||||||
<e et="ET6" eid="E8" u="U2" c="C1" lv="5" st="true" lu="20070210 13:31:58 CET" ll="19700101 01:00:00 CET"><o>sich in einer unbequemen Situation befinden</o><d>essere un pesche four d' aqua</d></e>
|
|
||||||
<e et="ET7" eid="E49" u="U2" c="C1" lv="6" st="true" lu="20070210 20:59:34 CET" ll="19700101 01:00:00 CET"><o>das ist mir egal</o><d>me ne frego</d><ep>Geste: unter dem Kinn mit der Hand vonhinten nach vorne reiben</ep></e>
|
|
||||||
<e et="ET3" eid="E251" u="U5" c="C1" lv="7" st="true" lu="20070310 20:29:49 CET" ll="19700101 01:00:00 CET" a1="ETAI4" a2="ETAI6"><o>Wirtin</o><d>ostessa</d></e>
|
|
||||||
</entries>
|
|
||||||
<stats>
|
|
||||||
<sset ts="20070310 21:16:36 CET"><sne lv="1">236</sne><sne lv="2">19</sne><sne lv="3">1</sne><sne lv="4">5</sne><sne lv="5">3</sne><sne lv="6">1</sne><sne lv="7">2</sne></sset>
|
|
||||||
<sset ts="20070217 20:37:22 CET"><sne lv="1">196</sne><sne lv="2">19</sne><sne lv="3">1</sne><sne lv="4">5</sne><sne lv="5">3</sne><sne lv="6">1</sne><sne lv="7">2</sne></sset>
|
|
||||||
<sset ts="20070212 10:13:05 CET"><sne lv="1">125</sne><sne lv="2">12</sne><sne lv="3">5</sne><sne lv="4">1</sne><sne lv="5">1</sne><sne lv="6">0</sne><sne lv="7">0</sne></sset>
|
|
||||||
<sset ts="20070228 21:44:04 CET"><sne lv="1">202</sne><sne lv="2">19</sne><sne lv="3">1</sne><sne lv="4">5</sne><sne lv="5">3</sne><sne lv="6">1</sne><sne lv="7">2</sne></sset>
|
|
||||||
<sset ts="20070217 19:10:49 CET"><sne lv="1">188</sne><sne lv="2">12</sne><sne lv="3">5</sne><sne lv="4">1</sne><sne lv="5">1</sne><sne lv="6">0</sne><sne lv="7">0</sne></sset>
|
|
||||||
<sset ts="20070211 20:55:08 CET"><sne lv="1">124</sne><sne lv="2">18</sne><sne lv="3">1</sne><sne lv="4">1</sne><sne lv="5">0</sne><sne lv="6">0</sne><sne lv="7">0</sne></sset>
|
|
||||||
<sset ts="20070217 19:14:04 CET"><sne lv="1">176</sne><sne lv="2">19</sne><sne lv="3">1</sne><sne lv="4">5</sne><sne lv="5">3</sne><sne lv="6">1</sne><sne lv="7">2</sne></sset>
|
|
||||||
<sset ts="20070211 20:57:10 CET"><sne lv="1">124</sne><sne lv="2">18</sne><sne lv="3">1</sne><sne lv="4">1</sne><sne lv="5">0</sne><sne lv="6">0</sne><sne lv="7">0</sne></sset>
|
|
||||||
</stats>
|
|
||||||
</vocabulary>
|
|
||||||
|
|
@ -1,89 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<SuperMemoCollection>
|
|
||||||
<Count>3572</Count>
|
|
||||||
|
|
||||||
<SuperMemoElement>
|
|
||||||
<ID>1</ID>
|
|
||||||
|
|
||||||
<Type>Topic</Type>
|
|
||||||
|
|
||||||
<Content>
|
|
||||||
<Question />
|
|
||||||
|
|
||||||
<Answer />
|
|
||||||
</Content>
|
|
||||||
|
|
||||||
<SuperMemoElement>
|
|
||||||
<ID>40326</ID>
|
|
||||||
|
|
||||||
<Title>aoeu</Title>
|
|
||||||
|
|
||||||
<Type>Topic</Type>
|
|
||||||
|
|
||||||
<SuperMemoElement>
|
|
||||||
<ID>40327</ID>
|
|
||||||
|
|
||||||
<Title>1-400</Title>
|
|
||||||
|
|
||||||
<Type>Topic</Type>
|
|
||||||
|
|
||||||
<SuperMemoElement>
|
|
||||||
<ID>40615</ID>
|
|
||||||
|
|
||||||
<Title>aoeu</Title>
|
|
||||||
|
|
||||||
<Type>Topic</Type>
|
|
||||||
|
|
||||||
<SuperMemoElement>
|
|
||||||
<ID>10247</ID>
|
|
||||||
|
|
||||||
<Type>Item</Type>
|
|
||||||
|
|
||||||
<Content>
|
|
||||||
<Question>aoeu</Question>
|
|
||||||
|
|
||||||
<Answer>aoeu</Answer>
|
|
||||||
</Content>
|
|
||||||
|
|
||||||
<LearningData>
|
|
||||||
<Interval>1844</Interval>
|
|
||||||
|
|
||||||
<Repetitions>7</Repetitions>
|
|
||||||
|
|
||||||
<Lapses>0</Lapses>
|
|
||||||
|
|
||||||
<LastRepetition>19.09.2002</LastRepetition>
|
|
||||||
|
|
||||||
<AFactor>5,701</AFactor>
|
|
||||||
|
|
||||||
<UFactor>2,452</UFactor>
|
|
||||||
</LearningData>
|
|
||||||
</SuperMemoElement>
|
|
||||||
|
|
||||||
</SuperMemoElement>
|
|
||||||
|
|
||||||
<Type>Topic</Type>
|
|
||||||
|
|
||||||
<Content>
|
|
||||||
<Question>aoeu</Question>
|
|
||||||
<Answer />
|
|
||||||
</Content>
|
|
||||||
|
|
||||||
<LearningData>
|
|
||||||
<Interval>0</Interval>
|
|
||||||
|
|
||||||
<Repetitions>0</Repetitions>
|
|
||||||
|
|
||||||
<Lapses>0</Lapses>
|
|
||||||
|
|
||||||
<LastRepetition>04.08.2000</LastRepetition>
|
|
||||||
|
|
||||||
<AFactor>3,000</AFactor>
|
|
||||||
|
|
||||||
<UFactor>0,000</UFactor>
|
|
||||||
</LearningData>
|
|
||||||
|
|
||||||
</SuperMemoElement>
|
|
||||||
</SuperMemoElement>
|
|
||||||
</SuperMemoElement>
|
|
||||||
</SuperMemoCollection>
|
|
||||||
|
|
@ -1,219 +0,0 @@
|
||||||
--- Mnemosyne Data Base --- Format Version 1 ---
|
|
||||||
(lp1
|
|
||||||
(imnemosyne.core.mnemosyne_core
|
|
||||||
StartTime
|
|
||||||
p2
|
|
||||||
(dp3
|
|
||||||
S'time'
|
|
||||||
p4
|
|
||||||
F1183141800
|
|
||||||
sba(lp5
|
|
||||||
(imnemosyne.core.mnemosyne_core
|
|
||||||
Category
|
|
||||||
p6
|
|
||||||
(dp7
|
|
||||||
S'active'
|
|
||||||
p8
|
|
||||||
I01
|
|
||||||
sS'name'
|
|
||||||
p9
|
|
||||||
V<default>
|
|
||||||
p10
|
|
||||||
sba(imnemosyne.core.mnemosyne_core
|
|
||||||
Category
|
|
||||||
p11
|
|
||||||
(dp12
|
|
||||||
S'active'
|
|
||||||
p13
|
|
||||||
I01
|
|
||||||
sS'name'
|
|
||||||
p14
|
|
||||||
Vfoo
|
|
||||||
p15
|
|
||||||
sba(imnemosyne.core.mnemosyne_core
|
|
||||||
Category
|
|
||||||
p16
|
|
||||||
(dp17
|
|
||||||
g13
|
|
||||||
I01
|
|
||||||
sg14
|
|
||||||
Vbaz, quux
|
|
||||||
p18
|
|
||||||
sbaa(lp19
|
|
||||||
(imnemosyne.core.mnemosyne_core
|
|
||||||
Item
|
|
||||||
p20
|
|
||||||
(dp21
|
|
||||||
S'a'
|
|
||||||
Vbar
|
|
||||||
p22
|
|
||||||
sS'last_rep'
|
|
||||||
p23
|
|
||||||
L34L
|
|
||||||
sS'ret_reps'
|
|
||||||
p24
|
|
||||||
I0
|
|
||||||
sS'cat'
|
|
||||||
p25
|
|
||||||
g16
|
|
||||||
sS'q'
|
|
||||||
Vfoo
|
|
||||||
p26
|
|
||||||
sS'grade'
|
|
||||||
p27
|
|
||||||
I0
|
|
||||||
sS'acq_reps'
|
|
||||||
p28
|
|
||||||
I1
|
|
||||||
sS'ret_reps_since_lapse'
|
|
||||||
p29
|
|
||||||
I0
|
|
||||||
sS'easiness'
|
|
||||||
p30
|
|
||||||
F2.5
|
|
||||||
sS'lapses'
|
|
||||||
p31
|
|
||||||
I0
|
|
||||||
sS'acq_reps_since_lapse'
|
|
||||||
p32
|
|
||||||
I1
|
|
||||||
sS'next_rep'
|
|
||||||
p33
|
|
||||||
L34L
|
|
||||||
sS'id'
|
|
||||||
p34
|
|
||||||
S'9f401476'
|
|
||||||
p35
|
|
||||||
sba(imnemosyne.core.mnemosyne_core
|
|
||||||
Item
|
|
||||||
p36
|
|
||||||
(dp37
|
|
||||||
S'a'
|
|
||||||
Vfoo
|
|
||||||
p38
|
|
||||||
sg23
|
|
||||||
L34L
|
|
||||||
sg24
|
|
||||||
I0
|
|
||||||
sg25
|
|
||||||
g6
|
|
||||||
sS'q'
|
|
||||||
Vbar
|
|
||||||
p39
|
|
||||||
sg27
|
|
||||||
I0
|
|
||||||
sg28
|
|
||||||
I1
|
|
||||||
sg29
|
|
||||||
I0
|
|
||||||
sg30
|
|
||||||
F2.5
|
|
||||||
sg31
|
|
||||||
I0
|
|
||||||
sg32
|
|
||||||
I1
|
|
||||||
sg33
|
|
||||||
L34L
|
|
||||||
sg34
|
|
||||||
S'a869958d'
|
|
||||||
p40
|
|
||||||
sba(imnemosyne.core.mnemosyne_core
|
|
||||||
Item
|
|
||||||
p41
|
|
||||||
(dp42
|
|
||||||
S'a'
|
|
||||||
Vquux
|
|
||||||
p43
|
|
||||||
sg23
|
|
||||||
L34L
|
|
||||||
sg24
|
|
||||||
I0
|
|
||||||
sg25
|
|
||||||
g11
|
|
||||||
sS'q'
|
|
||||||
Vbaz
|
|
||||||
p44
|
|
||||||
sg27
|
|
||||||
I5
|
|
||||||
sg28
|
|
||||||
I2
|
|
||||||
sg29
|
|
||||||
I0
|
|
||||||
sg30
|
|
||||||
F2.5
|
|
||||||
sg31
|
|
||||||
I0
|
|
||||||
sg32
|
|
||||||
I2
|
|
||||||
sg33
|
|
||||||
L35L
|
|
||||||
sg34
|
|
||||||
S'74651aa3'
|
|
||||||
p45
|
|
||||||
sba(imnemosyne.core.mnemosyne_core
|
|
||||||
Item
|
|
||||||
p46
|
|
||||||
(dp47
|
|
||||||
S'a'
|
|
||||||
Vbaz
|
|
||||||
p48
|
|
||||||
sg23
|
|
||||||
L34L
|
|
||||||
sg24
|
|
||||||
I0
|
|
||||||
sg25
|
|
||||||
g6
|
|
||||||
sS'q'
|
|
||||||
Vquux
|
|
||||||
p49
|
|
||||||
sg27
|
|
||||||
I0
|
|
||||||
sg28
|
|
||||||
I1
|
|
||||||
sg29
|
|
||||||
I0
|
|
||||||
sg30
|
|
||||||
F2.5
|
|
||||||
sg31
|
|
||||||
I0
|
|
||||||
sg32
|
|
||||||
I1
|
|
||||||
sg33
|
|
||||||
L34L
|
|
||||||
sg34
|
|
||||||
S'0bd8f10b'
|
|
||||||
p50
|
|
||||||
sba(imnemosyne.core.mnemosyne_core
|
|
||||||
Item
|
|
||||||
p51
|
|
||||||
(dp52
|
|
||||||
S'a'
|
|
||||||
Vcard
|
|
||||||
p53
|
|
||||||
sg23
|
|
||||||
L34L
|
|
||||||
sg24
|
|
||||||
I0
|
|
||||||
sg25
|
|
||||||
g11
|
|
||||||
sS'q'
|
|
||||||
Vonesided
|
|
||||||
p54
|
|
||||||
sg27
|
|
||||||
I0
|
|
||||||
sg28
|
|
||||||
I1
|
|
||||||
sg29
|
|
||||||
I0
|
|
||||||
sg30
|
|
||||||
F2.5
|
|
||||||
sg31
|
|
||||||
I0
|
|
||||||
sg32
|
|
||||||
I1
|
|
||||||
sg33
|
|
||||||
L34L
|
|
||||||
sg34
|
|
||||||
S'bb503cf1'
|
|
||||||
p55
|
|
||||||
sbaa.
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,11 +0,0 @@
|
||||||
# this is a test file
|
|
||||||
|
|
||||||
食べる to eat
|
|
||||||
飲む to drink
|
|
||||||
テスト test
|
|
||||||
to eat 食べる
|
|
||||||
飲む to drink
|
|
||||||
多すぎる too many fields
|
|
||||||
not, enough, fields
|
|
||||||
遊ぶ
|
|
||||||
to play
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
foo bar baz,qux
|
|
||||||
foo2 bar2 baz2
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
# this is a test file
|
|
||||||
|
|
||||||
食べる to ate
|
|
||||||
む to drink
|
|
||||||
テスト testing
|
|
||||||
to eat 食べる
|
|
||||||
飲む to drink
|
|
||||||
多すぎる too many fields
|
|
||||||
not, enough, fields
|
|
||||||
遊ぶ
|
|
||||||
to play
|
|
||||||
|
|
@ -9,7 +9,9 @@ def assertException(exception, func):
|
||||||
found = True
|
found = True
|
||||||
assert found
|
assert found
|
||||||
|
|
||||||
def getDeck():
|
def getEmptyDeck():
|
||||||
(fd, nam) = tempfile.mkstemp(suffix=".anki")
|
(fd, nam) = tempfile.mkstemp(suffix=".anki")
|
||||||
os.unlink(nam)
|
os.unlink(nam)
|
||||||
return Deck(nam)
|
return Deck(nam)
|
||||||
|
|
||||||
|
testDir = os.path.dirname(__file__)
|
||||||
|
|
|
||||||
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 545 B |
Binary file not shown.
|
Before Width: | Height: | Size: 580 B |
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 644 B |
|
|
@ -1,43 +1,34 @@
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
import nose, os, re, tempfile, shutil
|
import os, re
|
||||||
from tests.shared import assertException, getDeck
|
from tests.shared import assertException, getEmptyDeck, testDir
|
||||||
|
|
||||||
from anki.errors import *
|
|
||||||
from anki import Deck
|
from anki import Deck
|
||||||
from anki.db import *
|
|
||||||
from anki.models import FieldModel, Model, CardModel
|
|
||||||
from anki.stdmodels import BasicModel
|
|
||||||
from anki.utils import stripHTML
|
|
||||||
|
|
||||||
newPath = None
|
newPath = None
|
||||||
newModified = None
|
newMod = None
|
||||||
|
|
||||||
testDir = os.path.dirname(__file__)
|
def test_create():
|
||||||
|
global newPath, newMod
|
||||||
## opening/closing
|
|
||||||
|
|
||||||
def test_attachNew():
|
|
||||||
global newPath, newModified
|
|
||||||
path = "/tmp/test_attachNew.anki"
|
path = "/tmp/test_attachNew.anki"
|
||||||
try:
|
try:
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
deck = Deck(path)
|
deck = Deck(path)
|
||||||
# for attachOld()
|
# for open()
|
||||||
newPath = deck.path
|
newPath = deck.path
|
||||||
deck.save()
|
deck.save()
|
||||||
newModified = deck.modified
|
newMod = deck.mod
|
||||||
deck.close()
|
deck.close()
|
||||||
del deck
|
del deck
|
||||||
|
|
||||||
def test_attachOld():
|
def test_open():
|
||||||
deck = Deck(newPath, backup=False)
|
deck = Deck(newPath)
|
||||||
assert deck.modified == newModified
|
assert deck.mod == newMod
|
||||||
deck.close()
|
deck.close()
|
||||||
|
|
||||||
def test_attachReadOnly():
|
def test_openReadOnly():
|
||||||
# non-writeable dir
|
# non-writeable dir
|
||||||
assertException(Exception,
|
assertException(Exception,
|
||||||
lambda: Deck("/attachroot"))
|
lambda: Deck("/attachroot"))
|
||||||
|
|
@ -48,260 +39,87 @@ def test_attachReadOnly():
|
||||||
os.chmod(newPath, 0666)
|
os.chmod(newPath, 0666)
|
||||||
os.unlink(newPath)
|
os.unlink(newPath)
|
||||||
|
|
||||||
def test_saveAs():
|
|
||||||
path = "/tmp/test_saveAs.anki"
|
|
||||||
try:
|
|
||||||
os.unlink(path)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
path2 = "/tmp/test_saveAs2.anki"
|
|
||||||
try:
|
|
||||||
os.unlink(path2)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
# start with an in-memory deck
|
|
||||||
deck = getDeck()
|
|
||||||
deck.addModel(BasicModel())
|
|
||||||
# add a card
|
|
||||||
f = deck.newFact()
|
|
||||||
f['Front'] = u"foo"; f['Back'] = u"bar"
|
|
||||||
deck.addFact(f)
|
|
||||||
assert deck.cardCount() == 1
|
|
||||||
# save in new deck
|
|
||||||
newDeck = deck.saveAs(path)
|
|
||||||
assert newDeck.cardCount() == 1
|
|
||||||
# delete card
|
|
||||||
id = newDeck.db.scalar("select id from cards")
|
|
||||||
newDeck.deleteCard(id)
|
|
||||||
# save into new deck
|
|
||||||
newDeck2 = newDeck.saveAs(path2)
|
|
||||||
# new deck should have zero cards
|
|
||||||
assert newDeck2.cardCount() == 0
|
|
||||||
# but old deck should have reverted the unsaved changes
|
|
||||||
newDeck = Deck(path)
|
|
||||||
assert newDeck.cardCount() == 1
|
|
||||||
newDeck.close()
|
|
||||||
|
|
||||||
def test_factAddDelete():
|
def test_factAddDelete():
|
||||||
deck = getDeck()
|
deck = getEmptyDeck()
|
||||||
deck.addModel(BasicModel())
|
|
||||||
# set rollback point
|
|
||||||
deck.db.commit()
|
|
||||||
f = deck.newFact()
|
|
||||||
# empty fields
|
|
||||||
try:
|
|
||||||
deck.addFact(f)
|
|
||||||
except Exception, e:
|
|
||||||
pass
|
|
||||||
assert e.data['type'] == 'fieldEmpty'
|
|
||||||
# add a fact
|
# add a fact
|
||||||
|
f = deck.newFact()
|
||||||
f['Front'] = u"one"; f['Back'] = u"two"
|
f['Front'] = u"one"; f['Back'] = u"two"
|
||||||
f = deck.addFact(f)
|
n = deck.addFact(f)
|
||||||
assert len(f.cards) == 1
|
assert n == 1
|
||||||
deck.rollback()
|
deck.rollback()
|
||||||
|
assert deck.cardCount() == 0
|
||||||
# try with two cards
|
# try with two cards
|
||||||
f = deck.newFact()
|
f = deck.newFact()
|
||||||
f['Front'] = u"one"; f['Back'] = u"two"
|
f['Front'] = u"one"; f['Back'] = u"two"
|
||||||
f.model.cardModels[1].active = True
|
m = f.model
|
||||||
f = deck.addFact(f)
|
m.templates[1].active = True
|
||||||
assert len(f.cards) == 2
|
m.flush()
|
||||||
# ensure correct order
|
n = deck.addFact(f)
|
||||||
c0 = [c for c in f.cards if c.ordinal == 0][0]
|
assert n == 2
|
||||||
assert re.sub("</?.+?>", "", c0.question) == u"one"
|
# check q/a generation
|
||||||
# now let's make a duplicate
|
c0 = f.cards()[0]
|
||||||
|
assert re.sub("</?.+?>", "", c0.q) == u"one"
|
||||||
|
# it should not be a duplicate
|
||||||
|
for p in f.problems():
|
||||||
|
assert not p
|
||||||
|
# now let's make a duplicate and test uniqueness
|
||||||
f2 = deck.newFact()
|
f2 = deck.newFact()
|
||||||
f2['Front'] = u"one"; f2['Back'] = u"three"
|
f2.model.fields[1].conf['required'] = True
|
||||||
try:
|
f2['Front'] = u"one"; f2['Back'] = u""
|
||||||
f2 = deck.addFact(f2)
|
p = f2.problems()
|
||||||
except Exception, e:
|
assert p[0] == "unique"
|
||||||
pass
|
assert p[1] == "required"
|
||||||
assert e.data['type'] == 'fieldNotUnique'
|
|
||||||
# try delete the first card
|
# try delete the first card
|
||||||
id1 = f.cards[0].id; id2 = f.cards[1].id
|
cards = f.cards()
|
||||||
|
id1 = cards[0].id; id2 = cards[1].id
|
||||||
|
assert deck.cardCount() == 2
|
||||||
|
assert deck.factCount() == 1
|
||||||
deck.deleteCard(id1)
|
deck.deleteCard(id1)
|
||||||
|
assert deck.cardCount() == 1
|
||||||
|
assert deck.factCount() == 1
|
||||||
# and the second should clear the fact
|
# and the second should clear the fact
|
||||||
deck.deleteCard(id2)
|
deck.deleteCard(id2)
|
||||||
|
assert deck.cardCount() == 0
|
||||||
|
assert deck.factCount() == 0
|
||||||
|
|
||||||
def test_fieldChecksum():
|
def test_fieldChecksum():
|
||||||
deck = getDeck()
|
deck = getEmptyDeck()
|
||||||
deck.addModel(BasicModel())
|
|
||||||
f = deck.newFact()
|
f = deck.newFact()
|
||||||
f['Front'] = u"new"; f['Back'] = u"new2"
|
f['Front'] = u"new"; f['Back'] = u"new2"
|
||||||
deck.addFact(f)
|
deck.addFact(f)
|
||||||
(id, sum) = deck.db.first(
|
assert deck.db.scalar(
|
||||||
"select id, chksum from fields where value = 'new'")
|
"select csum from fdata where ord = 0") == "22af645d"
|
||||||
assert sum == "22af645d"
|
|
||||||
# empty field should have no checksum
|
# empty field should have no checksum
|
||||||
f['Front'] = u""
|
f['Front'] = u""
|
||||||
deck.db.flush()
|
f.flush()
|
||||||
assert deck.db.scalar(
|
assert deck.db.scalar(
|
||||||
"select chksum from fields where id = :id", id=id) == ""
|
"select csum from fdata where ord = 0") == ""
|
||||||
# changing the value should change the checksum
|
# changing the val should change the checksum
|
||||||
f['Front'] = u"newx"
|
f['Front'] = u"newx"
|
||||||
deck.db.flush()
|
f.flush()
|
||||||
assert deck.db.scalar(
|
assert deck.db.scalar(
|
||||||
"select chksum from fields where id = :id", id=id) == "4b0e5a4c"
|
"select csum from fdata where ord = 0") == "4b0e5a4c"
|
||||||
# back should have no checksum, because it's not set to be unique
|
# back should have no checksum, because it's not set to be unique
|
||||||
(id, sum) = deck.db.first(
|
assert deck.db.scalar(
|
||||||
"select id, chksum from fields where value = 'new2'")
|
"select csum from fdata where ord = 1") == ""
|
||||||
assert sum == ""
|
|
||||||
# if we turn on unique, it should get a checksum
|
# if we turn on unique, it should get a checksum
|
||||||
fm = f.model.fieldModels[1]
|
f.model.fields[1].conf['unique'] = True
|
||||||
fm.unique = True
|
f.model.flush()
|
||||||
deck.updateFieldChecksums(fm.id)
|
f.model.updateCache()
|
||||||
|
print deck.db.scalar(
|
||||||
|
"select csum from fdata where ord = 1")
|
||||||
assert deck.db.scalar(
|
assert deck.db.scalar(
|
||||||
"select chksum from fields where id = :id", id=id) == "82f2ec5f"
|
"select csum from fdata where ord = 1") == "82f2ec5f"
|
||||||
# and turning it off should zero the checksum again
|
# turning it off doesn't currently zero the checksum for efficiency reasons
|
||||||
fm.unique = False
|
# f.model.fields[1].conf['unique'] = False
|
||||||
deck.updateFieldChecksums(fm.id)
|
# f.model.flush()
|
||||||
assert deck.db.scalar(
|
# f.model.updateCache()
|
||||||
"select chksum from fields where id = :id", id=id) == ""
|
# assert deck.db.scalar(
|
||||||
|
# "select csum from fdata where ord = 1") == ""
|
||||||
def test_modelAddDelete():
|
|
||||||
deck = getDeck()
|
|
||||||
deck.addModel(BasicModel())
|
|
||||||
deck.addModel(BasicModel())
|
|
||||||
f = deck.newFact()
|
|
||||||
f['Front'] = u'1'
|
|
||||||
f['Back'] = u'2'
|
|
||||||
deck.addFact(f)
|
|
||||||
assert deck.cardCount() == 1
|
|
||||||
deck.deleteModel(deck.currentModel)
|
|
||||||
deck.reset()
|
|
||||||
assert deck.cardCount() == 0
|
|
||||||
deck.db.refresh(deck)
|
|
||||||
|
|
||||||
def test_modelCopy():
|
|
||||||
deck = getDeck()
|
|
||||||
m = BasicModel()
|
|
||||||
assert len(m.fieldModels) == 2
|
|
||||||
assert len(m.cardModels) == 2
|
|
||||||
deck.addModel(m)
|
|
||||||
f = deck.newFact()
|
|
||||||
f['Front'] = u'1'
|
|
||||||
deck.addFact(f)
|
|
||||||
m2 = deck.copyModel(m)
|
|
||||||
assert m2.name == "Basic copy"
|
|
||||||
assert m2.id != m.id
|
|
||||||
assert m2.fieldModels[0].id != m.fieldModels[0].id
|
|
||||||
assert m2.cardModels[0].id != m.cardModels[0].id
|
|
||||||
assert len(m2.fieldModels) == 2
|
|
||||||
assert len(m.fieldModels) == 2
|
|
||||||
assert len(m2.fieldModels) == len(m.fieldModels)
|
|
||||||
assert len(m.cardModels) == 2
|
|
||||||
assert len(m2.cardModels) == 2
|
|
||||||
|
|
||||||
def test_media():
|
|
||||||
deck = getDeck()
|
|
||||||
# create a media dir
|
|
||||||
deck.mediaDir(create=True)
|
|
||||||
# put a file into it
|
|
||||||
file = unicode(os.path.join(testDir, "deck/fake.png"))
|
|
||||||
deck.addMedia(file)
|
|
||||||
# make sure it gets copied on saveas
|
|
||||||
path = "/tmp/saveAs2.anki"
|
|
||||||
sum = "fake.png"
|
|
||||||
try:
|
|
||||||
os.unlink(path)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
deck.saveAs(path)
|
|
||||||
assert os.path.exists("/tmp/saveAs2.media/%s" % sum)
|
|
||||||
|
|
||||||
def test_modelChange():
|
|
||||||
deck = getDeck()
|
|
||||||
m = Model(u"Japanese")
|
|
||||||
m1 = m
|
|
||||||
f = FieldModel(u'Expression', True, True)
|
|
||||||
m.addFieldModel(f)
|
|
||||||
m.addFieldModel(FieldModel(u'Meaning', False, False))
|
|
||||||
f = FieldModel(u'Reading', False, False)
|
|
||||||
m.addFieldModel(f)
|
|
||||||
m.addCardModel(CardModel(u"Recognition",
|
|
||||||
u"%(Expression)s",
|
|
||||||
u"%(Reading)s<br>%(Meaning)s"))
|
|
||||||
m.addCardModel(CardModel(u"Recall",
|
|
||||||
u"%(Meaning)s",
|
|
||||||
u"%(Expression)s<br>%(Reading)s",
|
|
||||||
active=False))
|
|
||||||
m.tags = u"Japanese"
|
|
||||||
m1.cardModels[1].active = True
|
|
||||||
deck.addModel(m1)
|
|
||||||
f = deck.newFact()
|
|
||||||
f['Expression'] = u'e'
|
|
||||||
f['Meaning'] = u'm'
|
|
||||||
f['Reading'] = u'r'
|
|
||||||
f = deck.addFact(f)
|
|
||||||
f2 = deck.newFact()
|
|
||||||
f2['Expression'] = u'e2'
|
|
||||||
f2['Meaning'] = u'm2'
|
|
||||||
f2['Reading'] = u'r2'
|
|
||||||
deck.addFact(f2)
|
|
||||||
m2 = BasicModel()
|
|
||||||
m2.cardModels[1].active = True
|
|
||||||
deck.addModel(m2)
|
|
||||||
# convert to basic
|
|
||||||
assert deck.modelUseCount(m1) == 2
|
|
||||||
assert deck.modelUseCount(m2) == 0
|
|
||||||
assert deck.cardCount() == 4
|
|
||||||
assert deck.factCount() == 2
|
|
||||||
fmap = {m1.fieldModels[0]: m2.fieldModels[0],
|
|
||||||
m1.fieldModels[1]: None,
|
|
||||||
m1.fieldModels[2]: m2.fieldModels[1]}
|
|
||||||
cmap = {m1.cardModels[0]: m2.cardModels[0],
|
|
||||||
m1.cardModels[1]: None}
|
|
||||||
deck.changeModel([f.id], m2, fmap, cmap)
|
|
||||||
deck.reset()
|
|
||||||
assert deck.modelUseCount(m1) == 1
|
|
||||||
assert deck.modelUseCount(m2) == 1
|
|
||||||
assert deck.cardCount() == 3
|
|
||||||
assert deck.factCount() == 2
|
|
||||||
(q, a) = deck.db.first("""
|
|
||||||
select question, answer from cards where factId = :id""",
|
|
||||||
id=f.id)
|
|
||||||
assert stripHTML(q) == u"e"
|
|
||||||
assert stripHTML(a) == u"r"
|
|
||||||
|
|
||||||
def test_findCards():
|
|
||||||
deck = getDeck()
|
|
||||||
deck.addModel(BasicModel())
|
|
||||||
f = deck.newFact()
|
|
||||||
f['Front'] = u'dog'
|
|
||||||
f['Back'] = u'cat'
|
|
||||||
f.addTags(u"monkey")
|
|
||||||
deck.addFact(f)
|
|
||||||
f = deck.newFact()
|
|
||||||
f['Front'] = u'goats are fun'
|
|
||||||
f['Back'] = u'sheep'
|
|
||||||
f.addTags(u"sheep goat horse")
|
|
||||||
deck.addFact(f)
|
|
||||||
f = deck.newFact()
|
|
||||||
f['Front'] = u'cat'
|
|
||||||
f['Back'] = u'sheep'
|
|
||||||
deck.addFact(f)
|
|
||||||
assert not deck.findCards("tag:donkey")
|
|
||||||
assert len(deck.findCards("tag:sheep")) == 1
|
|
||||||
assert len(deck.findCards("tag:sheep tag:goat")) == 1
|
|
||||||
assert len(deck.findCards("tag:sheep tag:monkey")) == 0
|
|
||||||
assert len(deck.findCards("tag:monkey")) == 1
|
|
||||||
assert len(deck.findCards("tag:sheep -tag:monkey")) == 1
|
|
||||||
assert len(deck.findCards("-tag:sheep")) == 2
|
|
||||||
assert len(deck.findCards("cat")) == 2
|
|
||||||
assert len(deck.findCards("cat -dog")) == 1
|
|
||||||
assert len(deck.findCards("cat -dog")) == 1
|
|
||||||
assert len(deck.findCards("are goats")) == 1
|
|
||||||
assert len(deck.findCards('"are goats"')) == 0
|
|
||||||
assert len(deck.findCards('"goats are"')) == 1
|
|
||||||
deck.addTags(deck.db.column0("select id from cards"), "foo bar")
|
|
||||||
assert (len(deck.findCards("tag:foo")) ==
|
|
||||||
len(deck.findCards("tag:bar")) ==
|
|
||||||
3)
|
|
||||||
deck.deleteTags(deck.db.column0("select id from cards"), "foo")
|
|
||||||
assert len(deck.findCards("tag:foo")) == 0
|
|
||||||
assert len(deck.findCards("tag:bar")) == 3
|
|
||||||
|
|
||||||
def test_upgrade():
|
def test_upgrade():
|
||||||
src = os.path.expanduser("~/Scratch/upgrade.anki")
|
import tempfile, shutil
|
||||||
|
src = os.path.join(testDir, "support", "anki12.anki")
|
||||||
(fd, dst) = tempfile.mkstemp(suffix=".anki")
|
(fd, dst) = tempfile.mkstemp(suffix=".anki")
|
||||||
print "upgrade to", dst
|
print "upgrade to", dst
|
||||||
shutil.copy(src, dst)
|
shutil.copy(src, dst)
|
||||||
|
|
|
||||||
|
|
@ -1,55 +1,45 @@
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
import tempfile, os, time
|
import tempfile, os, time
|
||||||
import anki.media as m
|
|
||||||
from anki import Deck
|
from anki import Deck
|
||||||
from anki.stdmodels import BasicModel
|
|
||||||
from anki.utils import checksum
|
from anki.utils import checksum
|
||||||
|
from shared import getEmptyDeck, testDir
|
||||||
def getDeck():
|
|
||||||
import tempfile
|
|
||||||
(fd, nam) = tempfile.mkstemp(suffix=".anki")
|
|
||||||
os.unlink(nam)
|
|
||||||
return Deck(nam)
|
|
||||||
|
|
||||||
# uniqueness check
|
# uniqueness check
|
||||||
def test_unique():
|
def test_unique():
|
||||||
|
d = getEmptyDeck()
|
||||||
dir = tempfile.mkdtemp(prefix="anki")
|
dir = tempfile.mkdtemp(prefix="anki")
|
||||||
# new file
|
# new file
|
||||||
n = "foo.jpg"
|
n = "foo.jpg"
|
||||||
new = os.path.basename(m.uniquePath(dir, n))
|
new = os.path.basename(d.media.uniquePath(dir, n))
|
||||||
assert new == n
|
assert new == n
|
||||||
# duplicate file
|
# duplicate file
|
||||||
open(os.path.join(dir, n), "w").write("hello")
|
open(os.path.join(dir, n), "w").write("hello")
|
||||||
n = "foo.jpg"
|
n = "foo.jpg"
|
||||||
new = os.path.basename(m.uniquePath(dir, n))
|
new = os.path.basename(d.media.uniquePath(dir, n))
|
||||||
assert new == "foo (1).jpg"
|
assert new == "foo (1).jpg"
|
||||||
# another duplicate
|
# another duplicate
|
||||||
open(os.path.join(dir, "foo (1).jpg"), "w").write("hello")
|
open(os.path.join(dir, "foo (1).jpg"), "w").write("hello")
|
||||||
n = "foo.jpg"
|
n = "foo.jpg"
|
||||||
new = os.path.basename(m.uniquePath(dir, n))
|
new = os.path.basename(d.media.uniquePath(dir, n))
|
||||||
assert new == "foo (2).jpg"
|
assert new == "foo (2).jpg"
|
||||||
|
|
||||||
# copying files to media folder
|
# copying files to media folder
|
||||||
def test_copy():
|
def test_copy():
|
||||||
deck = getDeck()
|
d = getEmptyDeck()
|
||||||
dir = tempfile.mkdtemp(prefix="anki")
|
dir = tempfile.mkdtemp(prefix="anki")
|
||||||
path = os.path.join(dir, "foo.jpg")
|
path = os.path.join(dir, "foo.jpg")
|
||||||
open(path, "w").write("hello")
|
open(path, "w").write("hello")
|
||||||
# new file
|
# new file
|
||||||
assert m.copyToMedia(deck, path) == "foo.jpg"
|
assert d.media.addFile(path) == "foo.jpg"
|
||||||
# dupe md5
|
# dupe md5
|
||||||
deck.db.statement("""
|
|
||||||
insert into media values (null, 'foo.jpg', 0, 0, :sum)""",
|
|
||||||
sum=checksum("hello"))
|
|
||||||
path = os.path.join(dir, "bar.jpg")
|
path = os.path.join(dir, "bar.jpg")
|
||||||
open(path, "w").write("hello")
|
open(path, "w").write("hello")
|
||||||
assert m.copyToMedia(deck, path) == "foo.jpg"
|
assert d.media.addFile(path) == "foo.jpg"
|
||||||
|
|
||||||
# media db
|
# media db
|
||||||
def test_db():
|
def test_db():
|
||||||
deck = getDeck()
|
deck = getEmptyDeck()
|
||||||
deck.addModel(BasicModel())
|
|
||||||
dir = tempfile.mkdtemp(prefix="anki")
|
dir = tempfile.mkdtemp(prefix="anki")
|
||||||
path = os.path.join(dir, "foo.jpg")
|
path = os.path.join(dir, "foo.jpg")
|
||||||
open(path, "w").write("hello")
|
open(path, "w").write("hello")
|
||||||
|
|
@ -58,55 +48,42 @@ def test_db():
|
||||||
f['Front'] = u"<img src='foo.jpg'>"
|
f['Front'] = u"<img src='foo.jpg'>"
|
||||||
f['Back'] = u"back [sound:foo.jpg]"
|
f['Back'] = u"back [sound:foo.jpg]"
|
||||||
deck.addFact(f)
|
deck.addFact(f)
|
||||||
# 1 entry in the media db, with two references, and missing file
|
# 1 entry in the media db, and no checksum
|
||||||
assert deck.db.scalar("select count() from media") == 1
|
assert deck.db.scalar("select count() from media") == 1
|
||||||
assert deck.db.scalar("select refcnt from media") == 2
|
assert not deck.db.scalar("select group_concat(csum, '') from media")
|
||||||
assert not deck.db.scalar("select group_concat(chksum, '') from media")
|
# copy to media folder
|
||||||
# copy to media folder & check db
|
path = deck.media.addFile(path)
|
||||||
path = m.copyToMedia(deck, path)
|
|
||||||
m.rebuildMediaDir(deck)
|
|
||||||
# md5 should be set now
|
# md5 should be set now
|
||||||
assert deck.db.scalar("select count() from media") == 1
|
assert deck.db.scalar("select count() from media") == 1
|
||||||
assert deck.db.scalar("select refcnt from media") == 2
|
assert deck.db.scalar("select group_concat(csum, '') from media")
|
||||||
assert deck.db.scalar("select group_concat(chksum, '') from media")
|
|
||||||
# edit the fact to remove a reference
|
|
||||||
f['Back'] = u""
|
|
||||||
f.setModified(True, deck)
|
|
||||||
deck.db.flush()
|
|
||||||
assert deck.db.scalar("select count() from media") == 1
|
|
||||||
assert deck.db.scalar("select refcnt from media") == 1
|
|
||||||
# remove the front reference too
|
|
||||||
f['Front'] = u""
|
|
||||||
f.setModified(True, deck)
|
|
||||||
assert deck.db.scalar("select refcnt from media") == 0
|
|
||||||
# add the reference back
|
|
||||||
f['Front'] = u"<img src='foo.jpg'>"
|
|
||||||
f.setModified(True, deck)
|
|
||||||
assert deck.db.scalar("select refcnt from media") == 1
|
|
||||||
# detect file modifications
|
# detect file modifications
|
||||||
oldsum = deck.db.scalar("select chksum from media")
|
oldsum = deck.db.scalar("select csum from media")
|
||||||
open(path, "w").write("world")
|
open(path, "w").write("world")
|
||||||
m.rebuildMediaDir(deck)
|
deck.media.rebuildMediaDir()
|
||||||
newsum = deck.db.scalar("select chksum from media")
|
newsum = deck.db.scalar("select csum from media")
|
||||||
assert newsum and newsum != oldsum
|
assert newsum and newsum != oldsum
|
||||||
# delete underlying file and check db
|
# delete underlying file and check db
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
m.rebuildMediaDir(deck)
|
deck.media.rebuildMediaDir()
|
||||||
# md5 should be gone again
|
# md5 should be gone again
|
||||||
assert deck.db.scalar("select count() from media") == 1
|
assert deck.db.scalar("select count() from media") == 1
|
||||||
assert deck.db.scalar("select not chksum from media")
|
assert deck.db.scalar("select not csum from media")
|
||||||
# media db should pick up media defined via templates & bulk update
|
# media db should pick up media defined via templates & bulk update
|
||||||
f['Back'] = u"bar.jpg"
|
f['Back'] = u"bar.jpg"
|
||||||
f.setModified(True, deck)
|
f.flush()
|
||||||
deck.db.flush()
|
|
||||||
# modify template & regenerate
|
# modify template & regenerate
|
||||||
assert deck.db.scalar("select count() from media") == 1
|
assert deck.db.scalar("select count() from media") == 1
|
||||||
assert deck.db.scalar("select sum(refcnt) from media") == 1
|
m = deck.currentModel()
|
||||||
deck.currentModel.cardModels[0].aformat=u'<img src="{{{Back}}}">'
|
m.templates[0].afmt=u'<img src="{{{Back}}}">'
|
||||||
deck.updateCardsFromModel(deck.currentModel)
|
m.flush()
|
||||||
assert deck.db.scalar("select sum(refcnt) from media") == 2
|
m.updateCache()
|
||||||
assert deck.db.scalar("select count() from media") == 2
|
assert deck.db.scalar("select count() from media") == 2
|
||||||
deck.currentModel.cardModels[0].aformat=u'{{{Back}}}'
|
|
||||||
deck.updateCardsFromModel(deck.currentModel)
|
def test_deckIntegration():
|
||||||
assert deck.db.scalar("select count() from media") == 2
|
deck = getEmptyDeck()
|
||||||
assert deck.db.scalar("select sum(refcnt) from media") == 1
|
# create a media dir
|
||||||
|
deck.media.mediaDir(create=True)
|
||||||
|
# put a file into it
|
||||||
|
file = unicode(os.path.join(testDir, "deck/fake.png"))
|
||||||
|
deck.media.addFile(file)
|
||||||
|
print "todo: check media copied on rename"
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,13 @@
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from tests.shared import assertException, getDeck
|
from tests.shared import assertException, getEmptyDeck
|
||||||
from anki.stdmodels import BasicModel
|
from anki.stdmodels import BasicModel
|
||||||
#from anki.db import *
|
#from anki.db import *
|
||||||
|
|
||||||
def getEmptyDeck():
|
|
||||||
d = getDeck()
|
|
||||||
d.addModel(BasicModel())
|
|
||||||
d.db.commit()
|
|
||||||
return d
|
|
||||||
|
|
||||||
def test_basics():
|
def test_basics():
|
||||||
d = getEmptyDeck()
|
d = getEmptyDeck()
|
||||||
assert not d.getCard()
|
assert not d.sched.getCard()
|
||||||
|
|
||||||
def test_new():
|
def test_new():
|
||||||
d = getEmptyDeck()
|
d = getEmptyDeck()
|
||||||
|
|
@ -22,16 +16,15 @@ def test_new():
|
||||||
f = d.newFact()
|
f = d.newFact()
|
||||||
f['Front'] = u"one"; f['Back'] = u"two"
|
f['Front'] = u"one"; f['Back'] = u"two"
|
||||||
f = d.addFact(f)
|
f = d.addFact(f)
|
||||||
d.db.flush()
|
|
||||||
d.reset()
|
d.reset()
|
||||||
assert d.sched.newCount == 1
|
assert d.sched.newCount == 1
|
||||||
# fetch it
|
# fetch it
|
||||||
c = d.getCard()
|
c = d.sched.getCard()
|
||||||
assert c
|
assert c
|
||||||
assert c.queue == 2
|
assert c.queue == 2
|
||||||
assert c.type == 2
|
assert c.type == 2
|
||||||
# if we answer it, it should become a learn card
|
# if we answer it, it should become a learn card
|
||||||
d.answerCard(c, 1)
|
d.sched.answerCard(c, 1)
|
||||||
assert c.queue == 0
|
assert c.queue == 0
|
||||||
assert c.type == 2
|
assert c.type == 2
|
||||||
|
|
||||||
|
|
@ -41,31 +34,30 @@ def test_learn():
|
||||||
f = d.newFact()
|
f = d.newFact()
|
||||||
f['Front'] = u"one"; f['Back'] = u"two"
|
f['Front'] = u"one"; f['Back'] = u"two"
|
||||||
f = d.addFact(f)
|
f = d.addFact(f)
|
||||||
d.db.flush()
|
|
||||||
# set as a learn card and rebuild queues
|
# set as a learn card and rebuild queues
|
||||||
d.db.statement("update cards set queue=0, type=2")
|
d.db.execute("update cards set queue=0, type=2")
|
||||||
d.reset()
|
d.reset()
|
||||||
# getCard should return it, since it's due in the past
|
# sched.getCard should return it, since it's due in the past
|
||||||
c = d.getCard()
|
c = d.sched.getCard()
|
||||||
assert c
|
assert c
|
||||||
# it should have no cycles and a grade of 0
|
# it should have no cycles and a grade of 0
|
||||||
assert c.grade == c.cycles == 0
|
assert c.grade == c.cycles == 0
|
||||||
# fail it
|
# fail it
|
||||||
d.answerCard(c, 1)
|
d.sched.answerCard(c, 1)
|
||||||
# it should by due in 30 seconds
|
# it should by due in 30 seconds
|
||||||
assert round(c.due - time.time()) == 30
|
assert round(c.due - time.time()) == 30
|
||||||
# and have 1 cycle, but still a zero grade
|
# and have 1 cycle, but still a zero grade
|
||||||
assert c.grade == 0
|
assert c.grade == 0
|
||||||
assert c.cycles == 1
|
assert c.cycles == 1
|
||||||
# pass it once
|
# pass it once
|
||||||
d.answerCard(c, 2)
|
d.sched.answerCard(c, 2)
|
||||||
# it should by due in 3 minutes
|
# it should by due in 3 minutes
|
||||||
assert round(c.due - time.time()) == 180
|
assert round(c.due - time.time()) == 180
|
||||||
# and it should be grade 1 now
|
# and it should be grade 1 now
|
||||||
assert c.grade == 1
|
assert c.grade == 1
|
||||||
assert c.cycles == 2
|
assert c.cycles == 2
|
||||||
# pass again
|
# pass again
|
||||||
d.answerCard(c, 2)
|
d.sched.answerCard(c, 2)
|
||||||
# it should by due in 10 minutes
|
# it should by due in 10 minutes
|
||||||
assert round(c.due - time.time()) == 600
|
assert round(c.due - time.time()) == 600
|
||||||
# and it should be grade 1 now
|
# and it should be grade 1 now
|
||||||
|
|
@ -74,7 +66,7 @@ def test_learn():
|
||||||
# the next pass should graduate the card
|
# the next pass should graduate the card
|
||||||
assert c.queue == 0
|
assert c.queue == 0
|
||||||
assert c.type == 2
|
assert c.type == 2
|
||||||
d.answerCard(c, 2)
|
d.sched.answerCard(c, 2)
|
||||||
assert c.queue == 1
|
assert c.queue == 1
|
||||||
assert c.type == 1
|
assert c.type == 1
|
||||||
print "test intervals, check early removal, etc"
|
print "test intervals, check early removal, etc"
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue