Automate schema 11 other duplicates clearing (#2542)

* Skip linting target folder

Contains build files not passing the copyright header check.

* Implicitly clear duplicate keys when serializing

Fixes `originalStockKind` not being cleared from `other`, as it had
mistakenly been added to the field list for `NoteFieldSchema11`.
This commit is contained in:
RumovZ 2023-06-12 03:14:14 +02:00 committed by GitHub
parent bac05039a7
commit 651ba88393
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 28 additions and 67 deletions

View file

@ -48,7 +48,6 @@ pub struct DeckConfSchema11 {
// 2021 scheduler options: these were not in schema 11, but we need to persist them
// so the settings are not lost on upgrade/downgrade.
// NOTE: if adding new ones, make sure to update clear_other_duplicates()
#[serde(default)]
new_mix: i32,
#[serde(default)]
@ -335,7 +334,6 @@ impl From<DeckConfig> for DeckConfSchema11 {
top_other = Default::default();
} else {
top_other = serde_json::from_slice(&c.inner.other).unwrap_or_default();
clear_other_duplicates(&mut top_other);
if let Some(new) = top_other.remove("new") {
let val: HashMap<String, Value> = serde_json::from_value(new).unwrap_or_default();
new_other = val;
@ -409,27 +407,6 @@ impl From<DeckConfig> for DeckConfSchema11 {
}
}
fn clear_other_duplicates(top_other: &mut HashMap<String, Value>) {
// Older clients may have received keys from a newer client when
// syncing, which get bundled into `other`. If they then upgrade, then
// downgrade their collection to schema11, serde will serialize the
// new default keys, but then add them again from `other`, leading
// to the keys being duplicated in the resulting json - which older
// clients then can't read. So we need to strip out any new keys we
// add.
for key in &[
"newMix",
"newPerDayMinimum",
"interdayLearningMix",
"reviewOrder",
"newSortOrder",
"newGatherPriority",
"buryInterdayLearning",
] {
top_other.remove(*key);
}
}
#[cfg(test)]
mod test {
use serde::de::IntoDeserializer;

View file

@ -380,12 +380,6 @@ impl From<Deck> for DeckSchema11 {
impl From<Deck> for DeckCommonSchema11 {
fn from(deck: Deck) -> Self {
let mut other: HashMap<String, Value> = if deck.common.other.is_empty() {
Default::default()
} else {
serde_json::from_slice(&deck.common.other).unwrap_or_default()
};
clear_other_duplicates(&mut other);
DeckCommonSchema11 {
id: deck.id,
mtime: deck.mtime_secs,
@ -403,23 +397,11 @@ impl From<Deck> for DeckCommonSchema11 {
DeckKind::Normal(n) => n.description,
DeckKind::Filtered(_) => String::new(),
},
other,
other: serde_json::from_slice(&deck.common.other).unwrap_or_default(),
}
}
}
/// See [crate::deckconfig::schema11::clear_other_duplicates()].
fn clear_other_duplicates(other: &mut HashMap<String, Value>) {
for key in [
"reviewLimit",
"newLimit",
"reviewLimitToday",
"newLimitToday",
] {
other.remove(key);
}
}
impl From<&Deck> for DeckTodaySchema11 {
fn from(deck: &Deck) -> Self {
let day = deck.common.last_day_studied as i32;

View file

@ -170,19 +170,6 @@ impl From<Notetype> for NotetypeSchema11 {
}
}
/// See [crate::deckconfig::schema11::clear_other_duplicates()].
fn clear_other_field_duplicates(other: &mut HashMap<String, Value>) {
for key in &[
"description",
"plainText",
"collapsed",
"excludeFromSearch",
"originalStockKind",
] {
other.remove(*key);
}
}
impl From<CardRequirementSchema11> for CardRequirement {
fn from(r: CardRequirementSchema11) -> Self {
CardRequirement {
@ -225,7 +212,6 @@ pub struct NoteFieldSchema11 {
// This was not in schema 11, but needs to be listed here so that the setting is not lost
// on downgrade/upgrade.
// NOTE: if adding new ones, make sure to update clear_other_field_duplicates()
#[serde(default, deserialize_with = "default_on_invalid")]
pub(crate) description: String,
@ -285,8 +271,6 @@ impl From<NoteFieldSchema11> for NoteField {
impl From<NoteField> for NoteFieldSchema11 {
fn from(p: NoteField) -> Self {
let conf = p.config;
let mut other = bytes_to_other(&conf.other);
clear_other_field_duplicates(&mut other);
NoteFieldSchema11 {
name: p.name,
ord: p.ord.map(|o| o as u16),
@ -298,7 +282,7 @@ impl From<NoteField> for NoteFieldSchema11 {
description: conf.description,
collapsed: conf.collapsed,
exclude_from_search: conf.exclude_from_search,
other,
other: bytes_to_other(&conf.other),
}
}
}

View file

@ -7,7 +7,24 @@ pub(crate) use serde_aux::field_attributes::deserialize_bool_from_anything;
pub(crate) use serde_aux::field_attributes::deserialize_number_from_string;
use serde_json::Value;
use crate::timestamp::TimestampSecs;
use crate::prelude::*;
/// Serializes the value to JSON, removing any duplicate keys in the process.
///
/// This function solves a very specific problem when (de)serializing structs on
/// up-/downgrade:
/// Older clients may have received keys from a newer client when
/// syncing, which get bundled into `other`. If they then upgrade, then
/// downgrade their collection to schema11, serde will serialize the
/// new default keys, but then add them again from `other`, leading
/// to the keys being duplicated in the resulting json - which older
/// clients then can't read. So we need to strip out any new keys we
/// add.
pub(crate) fn schema11_to_string(value: impl serde::Serialize) -> Result<String> {
serde_json::to_value(value)
.and_then(|val| serde_json::to_string(&val))
.map_err(Into::into)
}
/// Note: if you wish to cover the case where a field is missing, make sure you
/// also use the `serde(default)` flag.

View file

@ -404,7 +404,7 @@ impl SqliteStorage {
pub(crate) fn downgrade_decks_from_schema15(&self) -> Result<()> {
let decks = self.get_all_decks_as_schema11()?;
self.set_schema11_decks(decks)
self.set_schema11_decks(&decks)
}
fn get_schema11_decks(&self) -> Result<HashMap<DeckId, DeckSchema11>> {
@ -420,8 +420,8 @@ impl SqliteStorage {
Ok(decks)
}
pub(crate) fn set_schema11_decks(&self, decks: HashMap<DeckId, DeckSchema11>) -> Result<()> {
let json = serde_json::to_string(&decks)?;
pub(crate) fn set_schema11_decks(&self, decks: &HashMap<DeckId, DeckSchema11>) -> Result<()> {
let json = crate::serde::schema11_to_string(decks)?;
self.db.execute("update col set decks = ?", [json])?;
Ok(())
}

View file

@ -254,7 +254,7 @@ impl SqliteStorage {
.collect();
self.db.execute(
"update col set dconf=?",
params![serde_json::to_string(&confmap)?],
params![crate::serde::schema11_to_string(confmap)?],
)?;
Ok(())
}

View file

@ -361,7 +361,7 @@ impl SqliteStorage {
pub(crate) fn downgrade_notetypes_from_schema15(&self) -> Result<()> {
let nts = self.get_all_notetypes_as_schema11()?;
self.set_schema11_notetypes(nts)
self.set_schema11_notetypes(&nts)
}
fn get_schema11_notetypes(&self) -> Result<HashMap<NotetypeId, NotetypeSchema11>> {
@ -379,9 +379,9 @@ impl SqliteStorage {
pub(crate) fn set_schema11_notetypes(
&self,
notetypes: HashMap<NotetypeId, NotetypeSchema11>,
notetypes: &HashMap<NotetypeId, NotetypeSchema11>,
) -> Result<()> {
let json = serde_json::to_string(&notetypes)?;
let json = crate::serde::schema11_to_string(notetypes)?;
self.db.execute("update col set models = ?", [json])?;
Ok(())
}

View file

@ -36,6 +36,7 @@ const IGNORED_FOLDERS: &[&str] = &[
"./qt/aqt/forms",
"./tools/workspace-hack",
"./qt/bundle/PyOxidizer",
"./target",
];
fn main() -> Result<()> {