mirror of
https://github.com/ankitects/anki.git
synced 2025-09-18 14:02:21 -04:00
use separate integration test for links
If we run into issues with unreliable network connections in the future, we'll be able to mark the test as flaky so Bazel can retry it multiple times.
This commit is contained in:
parent
56545db339
commit
d73852f272
5 changed files with 109 additions and 93 deletions
2
.bazelrc
2
.bazelrc
|
@ -26,7 +26,7 @@ test --test_output=errors
|
|||
# don't add empty __init__.py files
|
||||
build --incompatible_default_to_explicit_init_py
|
||||
|
||||
build:ci --show_timestamps --isatty=0 --color=yes --show_progress_rate_limit=5 --action_env=ANKI_CI=1
|
||||
build:ci --show_timestamps --isatty=0 --color=yes --show_progress_rate_limit=5
|
||||
build:opt -c opt
|
||||
|
||||
# the TypeScript workers on Windows choke when deps are changed while they're
|
||||
|
|
|
@ -16,7 +16,7 @@ test -e /state/node_modules && mv /state/node_modules ts/
|
|||
$BAZEL build $BUILDARGS ...
|
||||
|
||||
echo "+++ Running tests"
|
||||
$BAZEL test $BUILDARGS ...
|
||||
$BAZEL test $BUILDARGS ... //rslib:links
|
||||
|
||||
echo "--- Building wheels"
|
||||
$BAZEL build dist
|
||||
|
|
|
@ -147,6 +147,24 @@ rust_test(
|
|||
],
|
||||
)
|
||||
|
||||
rust_test(
|
||||
name = "links",
|
||||
srcs = ["tests/links.rs"],
|
||||
tags = [
|
||||
"ci",
|
||||
"manual",
|
||||
],
|
||||
deps = [
|
||||
":anki",
|
||||
"//rslib/cargo:futures",
|
||||
"//rslib/cargo:itertools",
|
||||
"//rslib/cargo:linkcheck",
|
||||
"//rslib/cargo:reqwest",
|
||||
"//rslib/cargo:strum",
|
||||
"//rslib/cargo:tokio",
|
||||
],
|
||||
)
|
||||
|
||||
rustfmt_test(
|
||||
name = "format_check",
|
||||
srcs = glob([
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use crate::backend_proto::links::help_page_link_request::HelpPage;
|
||||
pub use crate::backend_proto::links::help_page_link_request::HelpPage;
|
||||
|
||||
static HELP_SITE: &str = "https://docs.ankiweb.net/";
|
||||
|
||||
|
@ -10,7 +10,7 @@ impl HelpPage {
|
|||
format!("{}{}", HELP_SITE, self.to_link_suffix())
|
||||
}
|
||||
|
||||
fn to_link_suffix(self) -> &'static str {
|
||||
pub fn to_link_suffix(self) -> &'static str {
|
||||
match self {
|
||||
HelpPage::NoteType => "getting-started.html#note-types",
|
||||
HelpPage::Browsing => "browsing.html",
|
||||
|
@ -32,92 +32,3 @@ impl HelpPage {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::{env, iter};
|
||||
|
||||
use futures::StreamExt;
|
||||
use itertools::Itertools;
|
||||
use linkcheck::{
|
||||
validation::{check_web, Context, Reason},
|
||||
BasicContext,
|
||||
};
|
||||
use reqwest::Url;
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Aggregates [`Outcome`]s by collecting the error messages of the invalid ones.
|
||||
#[derive(Default)]
|
||||
struct Outcomes(Vec<String>);
|
||||
|
||||
enum Outcome {
|
||||
Valid,
|
||||
Invalid(String),
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn check_links() {
|
||||
if env::var("ANKI_CI").is_err() {
|
||||
println!("Skip, ANKI_CI not set.");
|
||||
return;
|
||||
}
|
||||
|
||||
let ctx = BasicContext::default();
|
||||
let result = futures::stream::iter(HelpPage::iter())
|
||||
.map(|page| check_page(page, &ctx))
|
||||
.buffer_unordered(ctx.concurrency())
|
||||
.collect::<Outcomes>()
|
||||
.await;
|
||||
if !result.0.is_empty() {
|
||||
panic!("{}", result.message());
|
||||
}
|
||||
}
|
||||
|
||||
async fn check_page(page: HelpPage, ctx: &BasicContext) -> Outcome {
|
||||
let link = page.to_link();
|
||||
match Url::parse(&link) {
|
||||
Ok(url) => {
|
||||
if url.as_str() == link {
|
||||
match check_web(&url, ctx).await {
|
||||
Ok(()) => Outcome::Valid,
|
||||
Err(Reason::Dom) => Outcome::Invalid(format!(
|
||||
"'#{}' not found on '{}{}'",
|
||||
url.fragment().unwrap(),
|
||||
url.domain().unwrap(),
|
||||
url.path(),
|
||||
)),
|
||||
Err(Reason::Web(err)) => Outcome::Invalid(err.to_string()),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
Outcome::Invalid(format!(
|
||||
"'{}' is not a valid URL part",
|
||||
page.to_link_suffix(),
|
||||
))
|
||||
}
|
||||
}
|
||||
Err(err) => Outcome::Invalid(err.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<Outcome> for Outcomes {
|
||||
fn extend<T: IntoIterator<Item = Outcome>>(&mut self, items: T) {
|
||||
for outcome in items {
|
||||
match outcome {
|
||||
Outcome::Valid => (),
|
||||
Outcome::Invalid(err) => self.0.push(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Outcomes {
|
||||
fn message(&self) -> String {
|
||||
iter::once("invalid links found:")
|
||||
.chain(self.0.iter().map(String::as_str))
|
||||
.join("\n - ")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
87
rslib/tests/links.rs
Normal file
87
rslib/tests/links.rs
Normal file
|
@ -0,0 +1,87 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use anki::links::HelpPage;
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::iter;
|
||||
|
||||
use futures::StreamExt;
|
||||
use itertools::Itertools;
|
||||
use linkcheck::{
|
||||
validation::{check_web, Context, Reason},
|
||||
BasicContext,
|
||||
};
|
||||
use reqwest::Url;
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Aggregates [`Outcome`]s by collecting the error messages of the invalid ones.
|
||||
#[derive(Default)]
|
||||
struct Outcomes(Vec<String>);
|
||||
|
||||
enum Outcome {
|
||||
Valid,
|
||||
Invalid(String),
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn check_links() {
|
||||
let ctx = BasicContext::default();
|
||||
let result = futures::stream::iter(HelpPage::iter())
|
||||
.map(|page| check_page(page, &ctx))
|
||||
.buffer_unordered(ctx.concurrency())
|
||||
.collect::<Outcomes>()
|
||||
.await;
|
||||
if !result.0.is_empty() {
|
||||
panic!("{}", result.message());
|
||||
}
|
||||
}
|
||||
|
||||
async fn check_page(page: HelpPage, ctx: &BasicContext) -> Outcome {
|
||||
let link = page.to_link();
|
||||
match Url::parse(&link) {
|
||||
Ok(url) => {
|
||||
if url.as_str() == link {
|
||||
match check_web(&url, ctx).await {
|
||||
Ok(()) => Outcome::Valid,
|
||||
Err(Reason::Dom) => Outcome::Invalid(format!(
|
||||
"'#{}' not found on '{}{}'",
|
||||
url.fragment().unwrap(),
|
||||
url.domain().unwrap(),
|
||||
url.path(),
|
||||
)),
|
||||
Err(Reason::Web(err)) => Outcome::Invalid(err.to_string()),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
Outcome::Invalid(format!(
|
||||
"'{}' is not a valid URL part",
|
||||
page.to_link_suffix(),
|
||||
))
|
||||
}
|
||||
}
|
||||
Err(err) => Outcome::Invalid(err.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<Outcome> for Outcomes {
|
||||
fn extend<T: IntoIterator<Item = Outcome>>(&mut self, items: T) {
|
||||
for outcome in items {
|
||||
match outcome {
|
||||
Outcome::Valid => (),
|
||||
Outcome::Invalid(err) => self.0.push(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Outcomes {
|
||||
fn message(&self) -> String {
|
||||
iter::once("invalid links found:")
|
||||
.chain(self.0.iter().map(String::as_str))
|
||||
.join("\n - ")
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue