mirror of
https://github.com/ankitects/anki.git
synced 2025-09-20 23:12:21 -04:00
Since DupeResolution is in CsvMetadata, we don't need to pass it separately
Follow-up to #1930
This commit is contained in:
parent
67e4edcd8b
commit
5ea78e1c8e
9 changed files with 22 additions and 25 deletions
|
@ -58,7 +58,7 @@ message ImportResponse {
|
||||||
repeated Note missing_notetype = 6;
|
repeated Note missing_notetype = 6;
|
||||||
repeated Note missing_deck = 7;
|
repeated Note missing_deck = 7;
|
||||||
repeated Note empty_first_field = 8;
|
repeated Note empty_first_field = 8;
|
||||||
ImportCsvRequest.DupeResolution dupe_resolution = 9;
|
CsvMetadata.DupeResolution dupe_resolution = 9;
|
||||||
// Usually the sum of all queues, but may be lower if multiple duplicates
|
// Usually the sum of all queues, but may be lower if multiple duplicates
|
||||||
// have been updated with the same note.
|
// have been updated with the same note.
|
||||||
uint32 found_notes = 10;
|
uint32 found_notes = 10;
|
||||||
|
@ -106,15 +106,8 @@ message MediaEntries {
|
||||||
}
|
}
|
||||||
|
|
||||||
message ImportCsvRequest {
|
message ImportCsvRequest {
|
||||||
enum DupeResolution {
|
|
||||||
UPDATE = 0;
|
|
||||||
IGNORE = 1;
|
|
||||||
ADD = 2;
|
|
||||||
// UPDATE_IF_NEWER = 3;
|
|
||||||
}
|
|
||||||
string path = 1;
|
string path = 1;
|
||||||
CsvMetadata metadata = 2;
|
CsvMetadata metadata = 2;
|
||||||
DupeResolution dupe_resolution = 3;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
message CsvMetadataRequest {
|
message CsvMetadataRequest {
|
||||||
|
@ -127,6 +120,12 @@ message CsvMetadataRequest {
|
||||||
// Column indices are 1-based to make working with them in TS easier, where
|
// Column indices are 1-based to make working with them in TS easier, where
|
||||||
// unset numerical fields default to 0.
|
// unset numerical fields default to 0.
|
||||||
message CsvMetadata {
|
message CsvMetadata {
|
||||||
|
enum DupeResolution {
|
||||||
|
UPDATE = 0;
|
||||||
|
IGNORE = 1;
|
||||||
|
ADD = 2;
|
||||||
|
// UPDATE_IF_NEWER = 3;
|
||||||
|
}
|
||||||
// Order roughly in ascending expected frequency in note text, because the
|
// Order roughly in ascending expected frequency in note text, because the
|
||||||
// delimiter detection algorithm is stupidly picking the first one it
|
// delimiter detection algorithm is stupidly picking the first one it
|
||||||
// encounters.
|
// encounters.
|
||||||
|
@ -168,7 +167,7 @@ message CsvMetadata {
|
||||||
bool force_is_html = 12;
|
bool force_is_html = 12;
|
||||||
repeated generic.StringList preview = 13;
|
repeated generic.StringList preview = 13;
|
||||||
uint32 guid_column = 14;
|
uint32 guid_column = 14;
|
||||||
ImportCsvRequest.DupeResolution dupe_resolution = 15;
|
DupeResolution dupe_resolution = 15;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ExportCardCsvRequest {
|
message ExportCardCsvRequest {
|
||||||
|
|
|
@ -35,8 +35,8 @@ BrowserColumns = search_pb2.BrowserColumns
|
||||||
StripHtmlMode = card_rendering_pb2.StripHtmlRequest
|
StripHtmlMode = card_rendering_pb2.StripHtmlRequest
|
||||||
ImportLogWithChanges = import_export_pb2.ImportResponse
|
ImportLogWithChanges = import_export_pb2.ImportResponse
|
||||||
ImportCsvRequest = import_export_pb2.ImportCsvRequest
|
ImportCsvRequest = import_export_pb2.ImportCsvRequest
|
||||||
DupeResolution = ImportCsvRequest.DupeResolution
|
|
||||||
CsvMetadata = import_export_pb2.CsvMetadata
|
CsvMetadata = import_export_pb2.CsvMetadata
|
||||||
|
DupeResolution = CsvMetadata.DupeResolution
|
||||||
Delimiter = import_export_pb2.CsvMetadata.Delimiter
|
Delimiter = import_export_pb2.CsvMetadata.Delimiter
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
|
@ -112,7 +112,7 @@ pub fn write_backend_proto_rs() {
|
||||||
"#[derive(Copy, serde_derive::Deserialize, serde_derive::Serialize)]",
|
"#[derive(Copy, serde_derive::Deserialize, serde_derive::Serialize)]",
|
||||||
)
|
)
|
||||||
.type_attribute(
|
.type_attribute(
|
||||||
"ImportCsvRequest.DupeResolution",
|
"CsvMetadata.DupeResolution",
|
||||||
"#[derive(serde_derive::Deserialize, serde_derive::Serialize)]",
|
"#[derive(serde_derive::Deserialize, serde_derive::Serialize)]",
|
||||||
)
|
)
|
||||||
.compile_protos(paths.as_slice(), &[proto_dir])
|
.compile_protos(paths.as_slice(), &[proto_dir])
|
||||||
|
|
|
@ -86,11 +86,9 @@ impl ImportExportService for Backend {
|
||||||
|
|
||||||
fn import_csv(&self, input: pb::ImportCsvRequest) -> Result<pb::ImportResponse> {
|
fn import_csv(&self, input: pb::ImportCsvRequest) -> Result<pb::ImportResponse> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
let dupe_resolution = input.dupe_resolution();
|
|
||||||
col.import_csv(
|
col.import_csv(
|
||||||
&input.path,
|
&input.path,
|
||||||
input.metadata.unwrap_or_default(),
|
input.metadata.unwrap_or_default(),
|
||||||
dupe_resolution,
|
|
||||||
self.import_progress_fn(),
|
self.import_progress_fn(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
||||||
import_export::{
|
import_export::{
|
||||||
text::{
|
text::{
|
||||||
csv::metadata::{CsvDeck, CsvMetadata, CsvNotetype, Delimiter},
|
csv::metadata::{CsvDeck, CsvMetadata, CsvNotetype, Delimiter},
|
||||||
DupeResolution, ForeignData, ForeignNote, NameOrId,
|
ForeignData, ForeignNote, NameOrId,
|
||||||
},
|
},
|
||||||
ImportProgress, NoteLog,
|
ImportProgress, NoteLog,
|
||||||
},
|
},
|
||||||
|
@ -22,7 +22,6 @@ impl Collection {
|
||||||
&mut self,
|
&mut self,
|
||||||
path: &str,
|
path: &str,
|
||||||
metadata: CsvMetadata,
|
metadata: CsvMetadata,
|
||||||
dupe_resolution: DupeResolution,
|
|
||||||
progress_fn: impl 'static + FnMut(ImportProgress, bool) -> bool,
|
progress_fn: impl 'static + FnMut(ImportProgress, bool) -> bool,
|
||||||
) -> Result<OpOutput<NoteLog>> {
|
) -> Result<OpOutput<NoteLog>> {
|
||||||
let file = File::open(path)?;
|
let file = File::open(path)?;
|
||||||
|
@ -32,7 +31,7 @@ impl Collection {
|
||||||
let notes = ctx.deserialize_csv(file, metadata.delimiter())?;
|
let notes = ctx.deserialize_csv(file, metadata.delimiter())?;
|
||||||
|
|
||||||
ForeignData {
|
ForeignData {
|
||||||
dupe_resolution,
|
dupe_resolution: metadata.dupe_resolution(),
|
||||||
default_deck,
|
default_deck,
|
||||||
default_notetype,
|
default_notetype,
|
||||||
notes,
|
notes,
|
||||||
|
|
|
@ -12,8 +12,9 @@ use strum::IntoEnumIterator;
|
||||||
|
|
||||||
use super::import::build_csv_reader;
|
use super::import::build_csv_reader;
|
||||||
pub use crate::pb::import_export::{
|
pub use crate::pb::import_export::{
|
||||||
csv_metadata::{Deck as CsvDeck, Delimiter, MappedNotetype, Notetype as CsvNotetype},
|
csv_metadata::{
|
||||||
import_csv_request::DupeResolution,
|
Deck as CsvDeck, Delimiter, DupeResolution, MappedNotetype, Notetype as CsvNotetype,
|
||||||
|
},
|
||||||
CsvMetadata,
|
CsvMetadata,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
|
|
@ -8,7 +8,7 @@ mod json;
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::LogNote;
|
use super::LogNote;
|
||||||
use crate::pb::import_csv_request::DupeResolution;
|
use crate::pb::csv_metadata::DupeResolution;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
|
|
@ -8,19 +8,19 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
import * as tr from "../lib/ftl";
|
import * as tr from "../lib/ftl";
|
||||||
import { ImportExport } from "../lib/proto";
|
import { ImportExport } from "../lib/proto";
|
||||||
|
|
||||||
export let dupeResolution: ImportExport.ImportCsvRequest.DupeResolution;
|
export let dupeResolution: ImportExport.CsvMetadata.DupeResolution;
|
||||||
|
|
||||||
const dupeResolutions = [
|
const dupeResolutions = [
|
||||||
{
|
{
|
||||||
value: ImportExport.ImportCsvRequest.DupeResolution.UPDATE,
|
value: ImportExport.CsvMetadata.DupeResolution.UPDATE,
|
||||||
label: tr.importingUpdate(),
|
label: tr.importingUpdate(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
value: ImportExport.ImportCsvRequest.DupeResolution.ADD,
|
value: ImportExport.CsvMetadata.DupeResolution.ADD,
|
||||||
label: tr.importingDuplicate(),
|
label: tr.importingDuplicate(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
value: ImportExport.ImportCsvRequest.DupeResolution.IGNORE,
|
value: ImportExport.CsvMetadata.DupeResolution.IGNORE,
|
||||||
label: tr.importingPreserve(),
|
label: tr.importingPreserve(),
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
|
@ -30,7 +30,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
export let path: string;
|
export let path: string;
|
||||||
export let notetypeNameIds: Notetypes.NotetypeNameId[];
|
export let notetypeNameIds: Notetypes.NotetypeNameId[];
|
||||||
export let deckNameIds: Decks.DeckNameId[];
|
export let deckNameIds: Decks.DeckNameId[];
|
||||||
export let dupeResolution: ImportExport.ImportCsvRequest.DupeResolution;
|
export let dupeResolution: ImportExport.CsvMetadata.DupeResolution;
|
||||||
|
|
||||||
export let delimiter: ImportExport.CsvMetadata.Delimiter;
|
export let delimiter: ImportExport.CsvMetadata.Delimiter;
|
||||||
export let forceDelimiter: boolean;
|
export let forceDelimiter: boolean;
|
||||||
|
@ -75,8 +75,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
await importExport.importCsv(
|
await importExport.importCsv(
|
||||||
ImportExport.ImportCsvRequest.create({
|
ImportExport.ImportCsvRequest.create({
|
||||||
path,
|
path,
|
||||||
dupeResolution,
|
|
||||||
metadata: ImportExport.CsvMetadata.create({
|
metadata: ImportExport.CsvMetadata.create({
|
||||||
|
dupeResolution,
|
||||||
delimiter,
|
delimiter,
|
||||||
forceDelimiter,
|
forceDelimiter,
|
||||||
isHtml,
|
isHtml,
|
||||||
|
|
Loading…
Reference in a new issue