be explicit in our not-None assumptions to appease type checker

This commit is contained in:
Damien Elmes 2019-12-16 17:47:07 +10:00
parent 6757b30e72
commit 1da52f89fe
4 changed files with 10 additions and 3 deletions

View file

@ -393,6 +393,7 @@ crt=?, mod=?, scm=?, dty=?, usn=?, ls=?, conf=?""",
for nid, mid, flds in self.db.execute( for nid, mid, flds in self.db.execute(
"select id, mid, flds from notes where id in "+snids): "select id, mid, flds from notes where id in "+snids):
model = self.models.get(mid) model = self.models.get(mid)
assert(model)
avail = self.models.availOrds(model, flds) avail = self.models.availOrds(model, flds)
did = dids.get(nid) or model['did'] did = dids.get(nid) or model['did']
due = dues.get(nid) due = dues.get(nid)
@ -456,6 +457,7 @@ insert into cards values (?,?,?,?,?,?,0,0,?,0,0,0,0,0,0,0,0,"")""",
card.did = note.model()['did'] card.did = note.model()['did']
# if invalid did, use default instead # if invalid did, use default instead
deck = self.decks.get(card.did) deck = self.decks.get(card.did)
assert(deck)
if deck['dyn']: if deck['dyn']:
# must not be a filtered deck # must not be a filtered deck
card.did = 1 card.did = 1
@ -567,6 +569,7 @@ where c.nid = n.id and c.id in %s group by nid""" % ids2str(cids)):
flist = splitFields(data[6]) flist = splitFields(data[6])
fields = {} fields = {}
model = self.models.get(data[2]) model = self.models.get(data[2])
assert(model)
for (name, (idx, conf)) in list(self.models.fieldMap(model).items()): for (name, (idx, conf)) in list(self.models.fieldMap(model).items()):
fields[name] = flist[idx] fields[name] = flist[idx]
fields['Tags'] = data[5].strip() fields['Tags'] = data[5].strip()

View file

@ -33,6 +33,7 @@ f._id=d._fact_id"""):
notes[note['_id']] = note notes[note['_id']] = note
note = {'_id': _id} note = {'_id': _id}
curid = id curid = id
assert(note)
note[k] = v note[k] = v
if note: if note:
notes[note['_id']] = note notes[note['_id']] = note
@ -82,6 +83,7 @@ acq_reps+ret_reps, lapses, card_type_id from cards"""):
c.due = self.col.sched.today+rem c.due = self.col.sched.today+rem
# get ord # get ord
m = re.search(r".(\d+)$", row[1]) m = re.search(r".(\d+)$", row[1])
assert(m)
ord = int(m.group(1))-1 ord = int(m.group(1))-1
if 'cards' not in note: if 'cards' not in note:
note['cards'] = {} note['cards'] = {}

View file

@ -50,6 +50,7 @@ class PaukerImporter(NoteImporter):
front = card.findtext('./FrontSide/Text') front = card.findtext('./FrontSide/Text')
back = card.findtext('./ReverseSide/Text') back = card.findtext('./ReverseSide/Text')
note = ForeignNote() note = ForeignNote()
assert(front and back)
note.fields = [html.escape(x.strip()).replace('\n','<br>').replace(' ',' &nbsp;') for x in [front,back]] note.fields = [html.escape(x.strip()).replace('\n','<br>').replace(' ',' &nbsp;') for x in [front,back]]
notes.append(note) notes.append(note)

View file

@ -363,8 +363,7 @@ group by day order by day""" % (self._limit(), lim),
return self._section(txt1) + self._section(txt2) return self._section(txt1) + self._section(txt2)
def _ansInfo(self, totd, studied, first, unit, convHours=False, total=None): def _ansInfo(self, totd, studied, first, unit, convHours=False, total=None):
if not totd: assert(totd)
return
tot = totd[-1][1] tot = totd[-1][1]
period = self._periodDays() period = self._periodDays()
if not period: if not period:
@ -504,12 +503,14 @@ group by day order by day""" % lim,
lim = "where " + " and ".join(lims) lim = "where " + " and ".join(lims)
else: else:
lim = "" lim = ""
return self.col.db.first(""" ret = self.col.db.first("""
select count(), abs(min(day)) from (select select count(), abs(min(day)) from (select
(cast((id/1000 - :cut) / 86400.0 as int)+1) as day (cast((id/1000 - :cut) / 86400.0 as int)+1) as day
from revlog %s from revlog %s
group by day order by day)""" % lim, group by day order by day)""" % lim,
cut=self.col.sched.dayCutoff) cut=self.col.sched.dayCutoff)
assert(ret)
return ret
# Intervals # Intervals
###################################################################### ######################################################################