mirror of
https://github.com/ankitects/anki.git
synced 2025-09-24 16:56:36 -04:00
add some timeouts
I'm waiting to hear back from the reqwests author on the best way to handle this. For now, this change just adds short timeouts to the quick requests, and will wait for up to an hour when sending/ receiving files to allow for the slow connection+large file case.
This commit is contained in:
parent
c9da4bc1a6
commit
6495240914
1 changed files with 19 additions and 14 deletions
|
@ -20,6 +20,7 @@ use std::collections::HashMap;
|
||||||
use std::io::{Read, Write};
|
use std::io::{Read, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::{io, time};
|
use std::{io, time};
|
||||||
|
use time::Duration;
|
||||||
|
|
||||||
static SYNC_MAX_FILES: usize = 25;
|
static SYNC_MAX_FILES: usize = 25;
|
||||||
static SYNC_MAX_BYTES: usize = (2.5 * 1024.0 * 1024.0) as usize;
|
static SYNC_MAX_BYTES: usize = (2.5 * 1024.0 * 1024.0) as usize;
|
||||||
|
@ -143,7 +144,8 @@ where
|
||||||
log: Logger,
|
log: Logger,
|
||||||
) -> MediaSyncer<'a, P> {
|
) -> MediaSyncer<'a, P> {
|
||||||
let client = Client::builder()
|
let client = Client::builder()
|
||||||
.connect_timeout(time::Duration::from_secs(30))
|
.connect_timeout(Duration::from_secs(30))
|
||||||
|
.timeout(Duration::from_secs(60))
|
||||||
.build()
|
.build()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let ctx = mgr.dbctx();
|
let ctx = mgr.dbctx();
|
||||||
|
@ -386,7 +388,7 @@ where
|
||||||
let local = self.ctx.count()?;
|
let local = self.ctx.count()?;
|
||||||
|
|
||||||
let obj = FinalizeRequest { local };
|
let obj = FinalizeRequest { local };
|
||||||
let resp = ankiweb_json_request(&self.client, &url, &obj, self.skey()).await?;
|
let resp = ankiweb_json_request(&self.client, &url, &obj, self.skey(), false).await?;
|
||||||
let resp: FinalizeResponse = resp.json().await?;
|
let resp: FinalizeResponse = resp.json().await?;
|
||||||
|
|
||||||
if let Some(data) = resp.data {
|
if let Some(data) = resp.data {
|
||||||
|
@ -425,7 +427,7 @@ where
|
||||||
let url = format!("{}mediaChanges", self.endpoint);
|
let url = format!("{}mediaChanges", self.endpoint);
|
||||||
|
|
||||||
let req = RecordBatchRequest { last_usn };
|
let req = RecordBatchRequest { last_usn };
|
||||||
let resp = ankiweb_json_request(&self.client, &url, &req, self.skey()).await?;
|
let resp = ankiweb_json_request(&self.client, &url, &req, self.skey(), false).await?;
|
||||||
let res: RecordBatchResult = resp.json().await?;
|
let res: RecordBatchResult = resp.json().await?;
|
||||||
|
|
||||||
if let Some(batch) = res.data {
|
if let Some(batch) = res.data {
|
||||||
|
@ -441,14 +443,14 @@ where
|
||||||
debug!(self.log, "requesting files: {:?}", files);
|
debug!(self.log, "requesting files: {:?}", files);
|
||||||
|
|
||||||
let req = ZipRequest { files };
|
let req = ZipRequest { files };
|
||||||
let resp = ankiweb_json_request(&self.client, &url, &req, self.skey()).await?;
|
let resp = ankiweb_json_request(&self.client, &url, &req, self.skey(), true).await?;
|
||||||
resp.bytes().await.map_err(Into::into)
|
resp.bytes().await.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn send_zip_data(&self, data: Vec<u8>) -> Result<UploadReply> {
|
async fn send_zip_data(&self, data: Vec<u8>) -> Result<UploadReply> {
|
||||||
let url = format!("{}uploadChanges", self.endpoint);
|
let url = format!("{}uploadChanges", self.endpoint);
|
||||||
|
|
||||||
let resp = ankiweb_bytes_request(&self.client, &url, data, self.skey()).await?;
|
let resp = ankiweb_bytes_request(&self.client, &url, data, self.skey(), true).await?;
|
||||||
let res: UploadResult = resp.json().await?;
|
let res: UploadResult = resp.json().await?;
|
||||||
|
|
||||||
if let Some(reply) = res.data {
|
if let Some(reply) = res.data {
|
||||||
|
@ -545,13 +547,14 @@ async fn ankiweb_json_request<T>(
|
||||||
url: &str,
|
url: &str,
|
||||||
json: &T,
|
json: &T,
|
||||||
skey: &str,
|
skey: &str,
|
||||||
|
timeout_long: bool,
|
||||||
) -> Result<Response>
|
) -> Result<Response>
|
||||||
where
|
where
|
||||||
T: serde::Serialize,
|
T: serde::Serialize,
|
||||||
{
|
{
|
||||||
let req_json = serde_json::to_string(json)?;
|
let req_json = serde_json::to_string(json)?;
|
||||||
let part = multipart::Part::text(req_json);
|
let part = multipart::Part::text(req_json);
|
||||||
ankiweb_request(client, url, part, skey).await
|
ankiweb_request(client, url, part, skey, timeout_long).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn ankiweb_bytes_request(
|
async fn ankiweb_bytes_request(
|
||||||
|
@ -559,9 +562,10 @@ async fn ankiweb_bytes_request(
|
||||||
url: &str,
|
url: &str,
|
||||||
bytes: Vec<u8>,
|
bytes: Vec<u8>,
|
||||||
skey: &str,
|
skey: &str,
|
||||||
|
timeout_long: bool,
|
||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
let part = multipart::Part::bytes(bytes);
|
let part = multipart::Part::bytes(bytes);
|
||||||
ankiweb_request(client, url, part, skey).await
|
ankiweb_request(client, url, part, skey, timeout_long).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn ankiweb_request(
|
async fn ankiweb_request(
|
||||||
|
@ -569,6 +573,7 @@ async fn ankiweb_request(
|
||||||
url: &str,
|
url: &str,
|
||||||
data_part: multipart::Part,
|
data_part: multipart::Part,
|
||||||
skey: &str,
|
skey: &str,
|
||||||
|
timeout_long: bool,
|
||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
let data_part = data_part.file_name("data");
|
let data_part = data_part.file_name("data");
|
||||||
|
|
||||||
|
@ -576,13 +581,13 @@ async fn ankiweb_request(
|
||||||
.part("data", data_part)
|
.part("data", data_part)
|
||||||
.text("sk", skey.to_string());
|
.text("sk", skey.to_string());
|
||||||
|
|
||||||
client
|
let mut req = client.post(url).multipart(form);
|
||||||
.post(url)
|
|
||||||
.multipart(form)
|
if timeout_long {
|
||||||
.send()
|
req = req.timeout(Duration::from_secs(60 * 60));
|
||||||
.await?
|
}
|
||||||
.error_for_status()
|
|
||||||
.map_err(Into::into)
|
req.send().await?.error_for_status().map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_into_media_folder(
|
fn extract_into_media_folder(
|
||||||
|
|
Loading…
Reference in a new issue