mirror of
https://github.com/ankitects/anki.git
synced 2025-09-19 06:22:22 -04:00
Automate schema 11 other duplicates clearing (#2542)
* Skip linting target folder Contains build files not passing the copyright header check. * Implicitly clear duplicate keys when serializing Fixes `originalStockKind` not being cleared from `other`, as it had mistakenly been added to the field list for `NoteFieldSchema11`.
This commit is contained in:
parent
bac05039a7
commit
651ba88393
8 changed files with 28 additions and 67 deletions
|
@ -48,7 +48,6 @@ pub struct DeckConfSchema11 {
|
||||||
|
|
||||||
// 2021 scheduler options: these were not in schema 11, but we need to persist them
|
// 2021 scheduler options: these were not in schema 11, but we need to persist them
|
||||||
// so the settings are not lost on upgrade/downgrade.
|
// so the settings are not lost on upgrade/downgrade.
|
||||||
// NOTE: if adding new ones, make sure to update clear_other_duplicates()
|
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
new_mix: i32,
|
new_mix: i32,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -335,7 +334,6 @@ impl From<DeckConfig> for DeckConfSchema11 {
|
||||||
top_other = Default::default();
|
top_other = Default::default();
|
||||||
} else {
|
} else {
|
||||||
top_other = serde_json::from_slice(&c.inner.other).unwrap_or_default();
|
top_other = serde_json::from_slice(&c.inner.other).unwrap_or_default();
|
||||||
clear_other_duplicates(&mut top_other);
|
|
||||||
if let Some(new) = top_other.remove("new") {
|
if let Some(new) = top_other.remove("new") {
|
||||||
let val: HashMap<String, Value> = serde_json::from_value(new).unwrap_or_default();
|
let val: HashMap<String, Value> = serde_json::from_value(new).unwrap_or_default();
|
||||||
new_other = val;
|
new_other = val;
|
||||||
|
@ -409,27 +407,6 @@ impl From<DeckConfig> for DeckConfSchema11 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clear_other_duplicates(top_other: &mut HashMap<String, Value>) {
|
|
||||||
// Older clients may have received keys from a newer client when
|
|
||||||
// syncing, which get bundled into `other`. If they then upgrade, then
|
|
||||||
// downgrade their collection to schema11, serde will serialize the
|
|
||||||
// new default keys, but then add them again from `other`, leading
|
|
||||||
// to the keys being duplicated in the resulting json - which older
|
|
||||||
// clients then can't read. So we need to strip out any new keys we
|
|
||||||
// add.
|
|
||||||
for key in &[
|
|
||||||
"newMix",
|
|
||||||
"newPerDayMinimum",
|
|
||||||
"interdayLearningMix",
|
|
||||||
"reviewOrder",
|
|
||||||
"newSortOrder",
|
|
||||||
"newGatherPriority",
|
|
||||||
"buryInterdayLearning",
|
|
||||||
] {
|
|
||||||
top_other.remove(*key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use serde::de::IntoDeserializer;
|
use serde::de::IntoDeserializer;
|
||||||
|
|
|
@ -380,12 +380,6 @@ impl From<Deck> for DeckSchema11 {
|
||||||
|
|
||||||
impl From<Deck> for DeckCommonSchema11 {
|
impl From<Deck> for DeckCommonSchema11 {
|
||||||
fn from(deck: Deck) -> Self {
|
fn from(deck: Deck) -> Self {
|
||||||
let mut other: HashMap<String, Value> = if deck.common.other.is_empty() {
|
|
||||||
Default::default()
|
|
||||||
} else {
|
|
||||||
serde_json::from_slice(&deck.common.other).unwrap_or_default()
|
|
||||||
};
|
|
||||||
clear_other_duplicates(&mut other);
|
|
||||||
DeckCommonSchema11 {
|
DeckCommonSchema11 {
|
||||||
id: deck.id,
|
id: deck.id,
|
||||||
mtime: deck.mtime_secs,
|
mtime: deck.mtime_secs,
|
||||||
|
@ -403,23 +397,11 @@ impl From<Deck> for DeckCommonSchema11 {
|
||||||
DeckKind::Normal(n) => n.description,
|
DeckKind::Normal(n) => n.description,
|
||||||
DeckKind::Filtered(_) => String::new(),
|
DeckKind::Filtered(_) => String::new(),
|
||||||
},
|
},
|
||||||
other,
|
other: serde_json::from_slice(&deck.common.other).unwrap_or_default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [crate::deckconfig::schema11::clear_other_duplicates()].
|
|
||||||
fn clear_other_duplicates(other: &mut HashMap<String, Value>) {
|
|
||||||
for key in [
|
|
||||||
"reviewLimit",
|
|
||||||
"newLimit",
|
|
||||||
"reviewLimitToday",
|
|
||||||
"newLimitToday",
|
|
||||||
] {
|
|
||||||
other.remove(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Deck> for DeckTodaySchema11 {
|
impl From<&Deck> for DeckTodaySchema11 {
|
||||||
fn from(deck: &Deck) -> Self {
|
fn from(deck: &Deck) -> Self {
|
||||||
let day = deck.common.last_day_studied as i32;
|
let day = deck.common.last_day_studied as i32;
|
||||||
|
|
|
@ -170,19 +170,6 @@ impl From<Notetype> for NotetypeSchema11 {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [crate::deckconfig::schema11::clear_other_duplicates()].
|
|
||||||
fn clear_other_field_duplicates(other: &mut HashMap<String, Value>) {
|
|
||||||
for key in &[
|
|
||||||
"description",
|
|
||||||
"plainText",
|
|
||||||
"collapsed",
|
|
||||||
"excludeFromSearch",
|
|
||||||
"originalStockKind",
|
|
||||||
] {
|
|
||||||
other.remove(*key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<CardRequirementSchema11> for CardRequirement {
|
impl From<CardRequirementSchema11> for CardRequirement {
|
||||||
fn from(r: CardRequirementSchema11) -> Self {
|
fn from(r: CardRequirementSchema11) -> Self {
|
||||||
CardRequirement {
|
CardRequirement {
|
||||||
|
@ -225,7 +212,6 @@ pub struct NoteFieldSchema11 {
|
||||||
|
|
||||||
// This was not in schema 11, but needs to be listed here so that the setting is not lost
|
// This was not in schema 11, but needs to be listed here so that the setting is not lost
|
||||||
// on downgrade/upgrade.
|
// on downgrade/upgrade.
|
||||||
// NOTE: if adding new ones, make sure to update clear_other_field_duplicates()
|
|
||||||
#[serde(default, deserialize_with = "default_on_invalid")]
|
#[serde(default, deserialize_with = "default_on_invalid")]
|
||||||
pub(crate) description: String,
|
pub(crate) description: String,
|
||||||
|
|
||||||
|
@ -285,8 +271,6 @@ impl From<NoteFieldSchema11> for NoteField {
|
||||||
impl From<NoteField> for NoteFieldSchema11 {
|
impl From<NoteField> for NoteFieldSchema11 {
|
||||||
fn from(p: NoteField) -> Self {
|
fn from(p: NoteField) -> Self {
|
||||||
let conf = p.config;
|
let conf = p.config;
|
||||||
let mut other = bytes_to_other(&conf.other);
|
|
||||||
clear_other_field_duplicates(&mut other);
|
|
||||||
NoteFieldSchema11 {
|
NoteFieldSchema11 {
|
||||||
name: p.name,
|
name: p.name,
|
||||||
ord: p.ord.map(|o| o as u16),
|
ord: p.ord.map(|o| o as u16),
|
||||||
|
@ -298,7 +282,7 @@ impl From<NoteField> for NoteFieldSchema11 {
|
||||||
description: conf.description,
|
description: conf.description,
|
||||||
collapsed: conf.collapsed,
|
collapsed: conf.collapsed,
|
||||||
exclude_from_search: conf.exclude_from_search,
|
exclude_from_search: conf.exclude_from_search,
|
||||||
other,
|
other: bytes_to_other(&conf.other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,24 @@ pub(crate) use serde_aux::field_attributes::deserialize_bool_from_anything;
|
||||||
pub(crate) use serde_aux::field_attributes::deserialize_number_from_string;
|
pub(crate) use serde_aux::field_attributes::deserialize_number_from_string;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::timestamp::TimestampSecs;
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
/// Serializes the value to JSON, removing any duplicate keys in the process.
|
||||||
|
///
|
||||||
|
/// This function solves a very specific problem when (de)serializing structs on
|
||||||
|
/// up-/downgrade:
|
||||||
|
/// Older clients may have received keys from a newer client when
|
||||||
|
/// syncing, which get bundled into `other`. If they then upgrade, then
|
||||||
|
/// downgrade their collection to schema11, serde will serialize the
|
||||||
|
/// new default keys, but then add them again from `other`, leading
|
||||||
|
/// to the keys being duplicated in the resulting json - which older
|
||||||
|
/// clients then can't read. So we need to strip out any new keys we
|
||||||
|
/// add.
|
||||||
|
pub(crate) fn schema11_to_string(value: impl serde::Serialize) -> Result<String> {
|
||||||
|
serde_json::to_value(value)
|
||||||
|
.and_then(|val| serde_json::to_string(&val))
|
||||||
|
.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
/// Note: if you wish to cover the case where a field is missing, make sure you
|
/// Note: if you wish to cover the case where a field is missing, make sure you
|
||||||
/// also use the `serde(default)` flag.
|
/// also use the `serde(default)` flag.
|
||||||
|
|
|
@ -404,7 +404,7 @@ impl SqliteStorage {
|
||||||
|
|
||||||
pub(crate) fn downgrade_decks_from_schema15(&self) -> Result<()> {
|
pub(crate) fn downgrade_decks_from_schema15(&self) -> Result<()> {
|
||||||
let decks = self.get_all_decks_as_schema11()?;
|
let decks = self.get_all_decks_as_schema11()?;
|
||||||
self.set_schema11_decks(decks)
|
self.set_schema11_decks(&decks)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_schema11_decks(&self) -> Result<HashMap<DeckId, DeckSchema11>> {
|
fn get_schema11_decks(&self) -> Result<HashMap<DeckId, DeckSchema11>> {
|
||||||
|
@ -420,8 +420,8 @@ impl SqliteStorage {
|
||||||
Ok(decks)
|
Ok(decks)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn set_schema11_decks(&self, decks: HashMap<DeckId, DeckSchema11>) -> Result<()> {
|
pub(crate) fn set_schema11_decks(&self, decks: &HashMap<DeckId, DeckSchema11>) -> Result<()> {
|
||||||
let json = serde_json::to_string(&decks)?;
|
let json = crate::serde::schema11_to_string(decks)?;
|
||||||
self.db.execute("update col set decks = ?", [json])?;
|
self.db.execute("update col set decks = ?", [json])?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -254,7 +254,7 @@ impl SqliteStorage {
|
||||||
.collect();
|
.collect();
|
||||||
self.db.execute(
|
self.db.execute(
|
||||||
"update col set dconf=?",
|
"update col set dconf=?",
|
||||||
params![serde_json::to_string(&confmap)?],
|
params![crate::serde::schema11_to_string(confmap)?],
|
||||||
)?;
|
)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -361,7 +361,7 @@ impl SqliteStorage {
|
||||||
|
|
||||||
pub(crate) fn downgrade_notetypes_from_schema15(&self) -> Result<()> {
|
pub(crate) fn downgrade_notetypes_from_schema15(&self) -> Result<()> {
|
||||||
let nts = self.get_all_notetypes_as_schema11()?;
|
let nts = self.get_all_notetypes_as_schema11()?;
|
||||||
self.set_schema11_notetypes(nts)
|
self.set_schema11_notetypes(&nts)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_schema11_notetypes(&self) -> Result<HashMap<NotetypeId, NotetypeSchema11>> {
|
fn get_schema11_notetypes(&self) -> Result<HashMap<NotetypeId, NotetypeSchema11>> {
|
||||||
|
@ -379,9 +379,9 @@ impl SqliteStorage {
|
||||||
|
|
||||||
pub(crate) fn set_schema11_notetypes(
|
pub(crate) fn set_schema11_notetypes(
|
||||||
&self,
|
&self,
|
||||||
notetypes: HashMap<NotetypeId, NotetypeSchema11>,
|
notetypes: &HashMap<NotetypeId, NotetypeSchema11>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let json = serde_json::to_string(¬etypes)?;
|
let json = crate::serde::schema11_to_string(notetypes)?;
|
||||||
self.db.execute("update col set models = ?", [json])?;
|
self.db.execute("update col set models = ?", [json])?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,7 @@ const IGNORED_FOLDERS: &[&str] = &[
|
||||||
"./qt/aqt/forms",
|
"./qt/aqt/forms",
|
||||||
"./tools/workspace-hack",
|
"./tools/workspace-hack",
|
||||||
"./qt/bundle/PyOxidizer",
|
"./qt/bundle/PyOxidizer",
|
||||||
|
"./target",
|
||||||
];
|
];
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
|
|
Loading…
Reference in a new issue