mirror of
https://github.com/ankitects/anki.git
synced 2025-09-24 16:56:36 -04:00
embed deck messages
This commit is contained in:
parent
1a4c4373d2
commit
c4b3ab62c8
7 changed files with 80 additions and 81 deletions
|
@ -43,7 +43,7 @@ from anki.decks import Deck, DeckId, DeckManager
|
|||
from anki.errors import AbortSchemaModification, DBError
|
||||
from anki.lang import FormatTimeSpan
|
||||
from anki.media import MediaManager, media_paths_from_col_path
|
||||
from anki.models import ModelManager, NotetypeDict, NotetypeId, Notetype
|
||||
from anki.models import ModelManager, Notetype, NotetypeDict, NotetypeId
|
||||
from anki.notes import Note, NoteId
|
||||
from anki.scheduler.v1 import Scheduler as V1Scheduler
|
||||
from anki.scheduler.v2 import Scheduler as V2Scheduler
|
||||
|
|
|
@ -20,7 +20,7 @@ from anki.utils import from_json_bytes, ids2str, intTime, legacy_func, to_json_b
|
|||
# public exports
|
||||
DeckTreeNode = _pb.DeckTreeNode
|
||||
DeckNameId = _pb.DeckNameId
|
||||
FilteredDeckConfig = _pb.FilteredDeck
|
||||
FilteredDeckConfig = _pb.Deck.Filtered
|
||||
|
||||
# legacy code may pass this in as the type argument to .id()
|
||||
defaultDeck = 0
|
||||
|
|
|
@ -356,68 +356,6 @@ message DeckConfigInner {
|
|||
bytes other = 255;
|
||||
}
|
||||
|
||||
message DeckCommon {
|
||||
bool study_collapsed = 1;
|
||||
bool browser_collapsed = 2;
|
||||
|
||||
uint32 last_day_studied = 3;
|
||||
int32 new_studied = 4;
|
||||
int32 review_studied = 5;
|
||||
int32 milliseconds_studied = 7;
|
||||
|
||||
// previously set in the v1 scheduler,
|
||||
// but not currently used for anything
|
||||
int32 learning_studied = 6;
|
||||
|
||||
reserved 8 to 13;
|
||||
|
||||
bytes other = 255;
|
||||
}
|
||||
|
||||
message DeckKind {
|
||||
oneof kind {
|
||||
NormalDeck normal = 1;
|
||||
FilteredDeck filtered = 2;
|
||||
}
|
||||
}
|
||||
|
||||
message NormalDeck {
|
||||
int64 config_id = 1;
|
||||
uint32 extend_new = 2;
|
||||
uint32 extend_review = 3;
|
||||
string description = 4;
|
||||
bool markdown_description = 5;
|
||||
|
||||
reserved 6 to 11;
|
||||
}
|
||||
|
||||
message FilteredDeck {
|
||||
message SearchTerm {
|
||||
enum Order {
|
||||
OLDEST_REVIEWED_FIRST = 0;
|
||||
RANDOM = 1;
|
||||
INTERVALS_ASCENDING = 2;
|
||||
INTERVALS_DESCENDING = 3;
|
||||
LAPSES = 4;
|
||||
ADDED = 5;
|
||||
DUE = 6;
|
||||
REVERSE_ADDED = 7;
|
||||
DUE_PRIORITY = 8;
|
||||
}
|
||||
|
||||
string search = 1;
|
||||
uint32 limit = 2;
|
||||
Order order = 3;
|
||||
}
|
||||
|
||||
bool reschedule = 1;
|
||||
repeated SearchTerm search_terms = 2;
|
||||
// v1 scheduler only
|
||||
repeated float delays = 3;
|
||||
// v2 scheduler only
|
||||
uint32 preview_delay = 4;
|
||||
}
|
||||
|
||||
message NoteFieldConfig {
|
||||
bool sticky = 1;
|
||||
bool rtl = 2;
|
||||
|
@ -471,14 +409,77 @@ message CardRequirement {
|
|||
///////////////////////////////////////////////////////////
|
||||
|
||||
message Deck {
|
||||
message Common {
|
||||
bool study_collapsed = 1;
|
||||
bool browser_collapsed = 2;
|
||||
|
||||
uint32 last_day_studied = 3;
|
||||
int32 new_studied = 4;
|
||||
int32 review_studied = 5;
|
||||
int32 milliseconds_studied = 7;
|
||||
|
||||
// previously set in the v1 scheduler,
|
||||
// but not currently used for anything
|
||||
int32 learning_studied = 6;
|
||||
|
||||
reserved 8 to 13;
|
||||
|
||||
bytes other = 255;
|
||||
}
|
||||
message Normal {
|
||||
int64 config_id = 1;
|
||||
uint32 extend_new = 2;
|
||||
uint32 extend_review = 3;
|
||||
string description = 4;
|
||||
bool markdown_description = 5;
|
||||
|
||||
reserved 6 to 11;
|
||||
}
|
||||
message Filtered {
|
||||
message SearchTerm {
|
||||
enum Order {
|
||||
OLDEST_REVIEWED_FIRST = 0;
|
||||
RANDOM = 1;
|
||||
INTERVALS_ASCENDING = 2;
|
||||
INTERVALS_DESCENDING = 3;
|
||||
LAPSES = 4;
|
||||
ADDED = 5;
|
||||
DUE = 6;
|
||||
REVERSE_ADDED = 7;
|
||||
DUE_PRIORITY = 8;
|
||||
}
|
||||
|
||||
string search = 1;
|
||||
uint32 limit = 2;
|
||||
Order order = 3;
|
||||
}
|
||||
|
||||
bool reschedule = 1;
|
||||
repeated SearchTerm search_terms = 2;
|
||||
// v1 scheduler only
|
||||
repeated float delays = 3;
|
||||
// v2 scheduler only
|
||||
uint32 preview_delay = 4;
|
||||
}
|
||||
// a container to store the deck specifics in the DB
|
||||
// as a tagged enum
|
||||
message KindContainer {
|
||||
oneof kind {
|
||||
Normal normal = 1;
|
||||
Filtered filtered = 2;
|
||||
}
|
||||
}
|
||||
|
||||
int64 id = 1;
|
||||
string name = 2;
|
||||
int64 mtime_secs = 3;
|
||||
int32 usn = 4;
|
||||
DeckCommon common = 5;
|
||||
Common common = 5;
|
||||
// the specifics are inlined here when sending data to clients,
|
||||
// as otherwise an extra level of indirection would be required
|
||||
oneof kind {
|
||||
NormalDeck normal = 6;
|
||||
FilteredDeck filtered = 7;
|
||||
Normal normal = 6;
|
||||
Filtered filtered = 7;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1524,7 +1525,7 @@ message RenameDeckIn {
|
|||
message FilteredDeckForUpdate {
|
||||
int64 id = 1;
|
||||
string name = 2;
|
||||
FilteredDeck config = 3;
|
||||
Deck.Filtered config = 3;
|
||||
}
|
||||
|
||||
message SetFlagIn {
|
||||
|
|
|
@ -90,7 +90,7 @@ pub fn write_backend_proto_rs() {
|
|||
.out_dir(&out_dir)
|
||||
.service_generator(service_generator())
|
||||
.type_attribute(
|
||||
"FilteredDeck.SearchTerm.Order",
|
||||
"Deck.Filtered.SearchTerm.Order",
|
||||
"#[derive(strum::EnumIter)]",
|
||||
)
|
||||
.compile_protos(&[&backend_proto], &[&proto_dir, &out_dir])
|
||||
|
|
|
@ -3,11 +3,7 @@
|
|||
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
pub use crate::backend_proto::{
|
||||
deck_kind::Kind as DeckKind, Deck as DeckProto, DeckCommon, DeckKind as DeckKindProto,
|
||||
FilteredDeck, NormalDeck,
|
||||
};
|
||||
use crate::decks::{FilteredSearchOrder, FilteredSearchTerm};
|
||||
use super::{DeckCommon, DeckKind, FilteredDeck, FilteredSearchOrder, FilteredSearchTerm};
|
||||
use crate::prelude::*;
|
||||
|
||||
impl Deck {
|
||||
|
|
|
@ -8,9 +8,11 @@ mod tree;
|
|||
pub(crate) mod undo;
|
||||
|
||||
pub use crate::backend_proto::{
|
||||
deck_kind::Kind as DeckKind,
|
||||
filtered_deck::{search_term::Order as FilteredSearchOrder, SearchTerm as FilteredSearchTerm},
|
||||
Deck as DeckProto, DeckCommon, DeckKind as DeckKindProto, FilteredDeck, NormalDeck,
|
||||
deck::filtered::{search_term::Order as FilteredSearchOrder, SearchTerm as FilteredSearchTerm},
|
||||
deck::kind_container::Kind as DeckKind,
|
||||
deck::KindContainer as DeckKindContainer,
|
||||
deck::{Common as DeckCommon, Filtered as FilteredDeck, Normal as NormalDeck},
|
||||
Deck as DeckProto,
|
||||
};
|
||||
use crate::{
|
||||
backend_proto as pb,
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
card::CardQueue,
|
||||
config::SchedulerVersion,
|
||||
decks::immediate_parent_name,
|
||||
decks::{Deck, DeckCommon, DeckId, DeckKindProto, DeckSchema11, DueCounts},
|
||||
decks::{Deck, DeckCommon, DeckId, DeckKindContainer, DeckSchema11, DueCounts},
|
||||
error::{AnkiError, DbErrorKind, Result},
|
||||
i18n::I18n,
|
||||
timestamp::TimestampMillis,
|
||||
|
@ -19,7 +19,7 @@ use unicase::UniCase;
|
|||
|
||||
fn row_to_deck(row: &Row) -> Result<Deck> {
|
||||
let common = DeckCommon::decode(row.get_raw(4).as_blob()?)?;
|
||||
let kind = DeckKindProto::decode(row.get_raw(5).as_blob()?)?;
|
||||
let kind = DeckKindContainer::decode(row.get_raw(5).as_blob()?)?;
|
||||
let id = row.get(0)?;
|
||||
Ok(Deck {
|
||||
id,
|
||||
|
@ -117,7 +117,7 @@ impl SqliteStorage {
|
|||
let mut stmt = self.db.prepare_cached(include_str!("update_deck.sql"))?;
|
||||
let mut common = vec![];
|
||||
deck.common.encode(&mut common)?;
|
||||
let kind_enum = DeckKindProto {
|
||||
let kind_enum = DeckKindContainer {
|
||||
kind: Some(deck.kind.clone()),
|
||||
};
|
||||
let mut kind = vec![];
|
||||
|
@ -151,7 +151,7 @@ impl SqliteStorage {
|
|||
.prepare_cached(include_str!("add_or_update_deck.sql"))?;
|
||||
let mut common = vec![];
|
||||
deck.common.encode(&mut common)?;
|
||||
let kind_enum = DeckKindProto {
|
||||
let kind_enum = DeckKindContainer {
|
||||
kind: Some(deck.kind.clone()),
|
||||
};
|
||||
let mut kind = vec![];
|
||||
|
|
Loading…
Reference in a new issue