Fix most of clippy warnings (#914)

* Fix clippy: cast_lossless

* Fix clippy: match_ref_pats

* Fix clippy: extra_unused_lifetimes

* Fix clippy: needless_lifetimes

* Fix clippy: new_without_default

* Fix clippy: or_fun_call

* Fix clippy: should_implement_trait

* Fix clippy: redundant_closure

* Fix clippy: const_static_lifetime

* Fix clippy: redundant_pattern_matching

* Fix clippy: unused_io_amount

* Fix clippy: string_lit_as_bytes

* Fix clippy: needless_update

* Fix clippy: blacklisted_name

* Fix clippy: collapsible_if

* Fix clippy: match_wild_err_arm

* Fix clippy: single_match

* Fix clippy: useless_vec

* Fix clippy: single_char_pattern

* Fix clippy: float_cmp

* Fix clippy: approx_constant
This commit is contained in:
lzutao 2019-05-07 01:20:58 +07:00 committed by Dylan DPC
parent 345acb8597
commit ab7802a9a9
14 changed files with 85 additions and 74 deletions

View File

@ -28,12 +28,10 @@ fn main() {
if let Some(sub_args) = matches.subcommand_matches("supports") { if let Some(sub_args) = matches.subcommand_matches("supports") {
handle_supports(&preprocessor, sub_args); handle_supports(&preprocessor, sub_args);
} else { } else if let Err(e) = handle_preprocessing(&preprocessor) {
if let Err(e) = handle_preprocessing(&preprocessor) {
eprintln!("{}", e); eprintln!("{}", e);
process::exit(1); process::exit(1);
} }
}
} }
fn handle_preprocessing(pre: &dyn Preprocessor) -> Result<(), Error> { fn handle_preprocessing(pre: &dyn Preprocessor) -> Result<(), Error> {

View File

@ -116,7 +116,7 @@ where
I: IntoIterator<Item = &'a mut BookItem>, I: IntoIterator<Item = &'a mut BookItem>,
{ {
for item in items { for item in items {
if let &mut BookItem::Chapter(ref mut ch) = item { if let BookItem::Chapter(ch) = item {
for_each_mut(func, &mut ch.sub_items); for_each_mut(func, &mut ch.sub_items);
} }
@ -301,7 +301,7 @@ mod tests {
use std::io::Write; use std::io::Write;
use tempfile::{Builder as TempFileBuilder, TempDir}; use tempfile::{Builder as TempFileBuilder, TempDir};
const DUMMY_SRC: &'static str = " const DUMMY_SRC: &str = "
# Dummy Chapter # Dummy Chapter
this is some dummy text. this is some dummy text.
@ -317,7 +317,7 @@ And here is some \
let chapter_path = temp.path().join("chapter_1.md"); let chapter_path = temp.path().join("chapter_1.md");
File::create(&chapter_path) File::create(&chapter_path)
.unwrap() .unwrap()
.write(DUMMY_SRC.as_bytes()) .write_all(DUMMY_SRC.as_bytes())
.unwrap(); .unwrap();
let link = Link::new("Chapter 1", chapter_path); let link = Link::new("Chapter 1", chapter_path);
@ -333,7 +333,7 @@ And here is some \
File::create(&second_path) File::create(&second_path)
.unwrap() .unwrap()
.write_all("Hello World!".as_bytes()) .write_all(b"Hello World!")
.unwrap(); .unwrap();
let mut second = Link::new("Nested Chapter 1", &second_path); let mut second = Link::new("Nested Chapter 1", &second_path);

View File

@ -16,6 +16,7 @@ pub use self::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem
use std::io::Write; use std::io::Write;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use std::string::ToString;
use tempfile::Builder as TempFileBuilder; use tempfile::Builder as TempFileBuilder;
use toml::Value; use toml::Value;
@ -346,7 +347,7 @@ impl MDBook {
fn determine_renderers(config: &Config) -> Vec<Box<Renderer>> { fn determine_renderers(config: &Config) -> Vec<Box<Renderer>> {
let mut renderers: Vec<Box<Renderer>> = Vec::new(); let mut renderers: Vec<Box<Renderer>> = Vec::new();
if let Some(output_table) = config.get("output").and_then(|o| o.as_table()) { if let Some(output_table) = config.get("output").and_then(Value::as_table) {
for (key, table) in output_table.iter() { for (key, table) in output_table.iter() {
// the "html" backend has its own Renderer // the "html" backend has its own Renderer
if key == "html" { if key == "html" {
@ -386,7 +387,7 @@ fn determine_preprocessors(config: &Config) -> Result<Vec<Box<Preprocessor>>> {
preprocessors.extend(default_preprocessors()); preprocessors.extend(default_preprocessors());
} }
if let Some(preprocessor_table) = config.get("preprocessor").and_then(|v| v.as_table()) { if let Some(preprocessor_table) = config.get("preprocessor").and_then(Value::as_table) {
for key in preprocessor_table.keys() { for key in preprocessor_table.keys() {
match key.as_ref() { match key.as_ref() {
"links" => preprocessors.push(Box::new(LinkPreprocessor::new())), "links" => preprocessors.push(Box::new(LinkPreprocessor::new())),
@ -405,8 +406,8 @@ fn determine_preprocessors(config: &Config) -> Result<Vec<Box<Preprocessor>>> {
fn interpret_custom_preprocessor(key: &str, table: &Value) -> Box<CmdPreprocessor> { fn interpret_custom_preprocessor(key: &str, table: &Value) -> Box<CmdPreprocessor> {
let command = table let command = table
.get("command") .get("command")
.and_then(|c| c.as_str()) .and_then(Value::as_str)
.map(|s| s.to_string()) .map(ToString::to_string)
.unwrap_or_else(|| format!("mdbook-{}", key)); .unwrap_or_else(|| format!("mdbook-{}", key));
Box::new(CmdPreprocessor::new(key.to_string(), command.to_string())) Box::new(CmdPreprocessor::new(key.to_string(), command.to_string()))
@ -417,8 +418,8 @@ fn interpret_custom_renderer(key: &str, table: &Value) -> Box<CmdRenderer> {
// prepended by "mdbook-" // prepended by "mdbook-"
let table_dot_command = table let table_dot_command = table
.get("command") .get("command")
.and_then(|c| c.as_str()) .and_then(Value::as_str)
.map(|s| s.to_string()); .map(ToString::to_string);
let command = table_dot_command.unwrap_or_else(|| format!("mdbook-{}", key)); let command = table_dot_command.unwrap_or_else(|| format!("mdbook-{}", key));
@ -443,7 +444,7 @@ fn preprocessor_should_run(preprocessor: &Preprocessor, renderer: &Renderer, cfg
if let Some(Value::Array(ref explicit_renderers)) = cfg.get(&key) { if let Some(Value::Array(ref explicit_renderers)) = cfg.get(&key) {
return explicit_renderers return explicit_renderers
.iter() .iter()
.filter_map(|val| val.as_str()) .filter_map(Value::as_str)
.any(|name| name == renderer_name); .any(|name| name == renderer_name);
} }
@ -453,6 +454,7 @@ fn preprocessor_should_run(preprocessor: &Preprocessor, renderer: &Renderer, cfg
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use std::str::FromStr;
use toml::value::{Table, Value}; use toml::value::{Table, Value};
#[test] #[test]
@ -570,9 +572,9 @@ mod tests {
let html = cfg let html = cfg
.get_preprocessor("links") .get_preprocessor("links")
.and_then(|links| links.get("renderers")) .and_then(|links| links.get("renderers"))
.and_then(|renderers| renderers.as_array()) .and_then(Value::as_array)
.and_then(|renderers| renderers.get(0)) .and_then(|renderers| renderers.get(0))
.and_then(|renderer| renderer.as_str()) .and_then(Value::as_str)
.unwrap(); .unwrap();
assert_eq!(html, "html"); assert_eq!(html, "html");
let html_renderer = HtmlHandlebars::default(); let html_renderer = HtmlHandlebars::default();

View File

@ -31,7 +31,7 @@ pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {
pub fn execute(args: &ArgMatches) -> Result<()> { pub fn execute(args: &ArgMatches) -> Result<()> {
let library_paths: Vec<&str> = args let library_paths: Vec<&str> = args
.values_of("library-path") .values_of("library-path")
.map(|v| v.collect()) .map(std::iter::Iterator::collect)
.unwrap_or_default(); .unwrap_or_default();
let book_dir = get_book_dir(args); let book_dir = get_book_dir(args);
let mut book = MDBook::load(&book_dir)?; let mut book = MDBook::load(&book_dir)?;

View File

@ -13,6 +13,7 @@
//! # use mdbook::errors::*; //! # use mdbook::errors::*;
//! # extern crate toml; //! # extern crate toml;
//! use std::path::PathBuf; //! use std::path::PathBuf;
//! use std::str::FromStr;
//! use mdbook::Config; //! use mdbook::Config;
//! use toml::Value; //! use toml::Value;
//! //!
@ -56,6 +57,7 @@ use std::env;
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::FromStr;
use toml::value::Table; use toml::value::Table;
use toml::{self, Value}; use toml::{self, Value};
use toml_query::delete::TomlValueDeleteExt; use toml_query::delete::TomlValueDeleteExt;
@ -75,12 +77,16 @@ pub struct Config {
rest: Value, rest: Value,
} }
impl Config { impl FromStr for Config {
type Err = Error;
/// Load a `Config` from some string. /// Load a `Config` from some string.
pub fn from_str(src: &str) -> Result<Config> { fn from_str(src: &str) -> Result<Self> {
toml::from_str(src).chain_err(|| Error::from("Invalid configuration file")) toml::from_str(src).chain_err(|| Error::from("Invalid configuration file"))
} }
}
impl Config {
/// Load the configuration file from disk. /// Load the configuration file from disk.
pub fn from_disk<P: AsRef<Path>>(config_file: P) -> Result<Config> { pub fn from_disk<P: AsRef<Path>>(config_file: P) -> Result<Config> {
let mut buffer = String::new(); let mut buffer = String::new();
@ -212,13 +218,13 @@ impl Config {
/// Get the table associated with a particular renderer. /// Get the table associated with a particular renderer.
pub fn get_renderer<I: AsRef<str>>(&self, index: I) -> Option<&Table> { pub fn get_renderer<I: AsRef<str>>(&self, index: I) -> Option<&Table> {
let key = format!("output.{}", index.as_ref()); let key = format!("output.{}", index.as_ref());
self.get(&key).and_then(|v| v.as_table()) self.get(&key).and_then(Value::as_table)
} }
/// Get the table associated with a particular preprocessor. /// Get the table associated with a particular preprocessor.
pub fn get_preprocessor<I: AsRef<str>>(&self, index: I) -> Option<&Table> { pub fn get_preprocessor<I: AsRef<str>>(&self, index: I) -> Option<&Table> {
let key = format!("preprocessor.{}", index.as_ref()); let key = format!("preprocessor.{}", index.as_ref());
self.get(&key).and_then(|v| v.as_table()) self.get(&key).and_then(Value::as_table)
} }
fn from_legacy(mut table: Value) -> Config { fn from_legacy(mut table: Value) -> Config {
@ -560,7 +566,7 @@ impl<'de, T> Updateable<'de> for T where T: Serialize + Deserialize<'de> {}
mod tests { mod tests {
use super::*; use super::*;
const COMPLEX_CONFIG: &'static str = r#" const COMPLEX_CONFIG: &str = r#"
[book] [book]
title = "Some Book" title = "Some Book"
authors = ["Michael-F-Bryan <michaelfbryan@gmail.com>"] authors = ["Michael-F-Bryan <michaelfbryan@gmail.com>"]
@ -601,7 +607,6 @@ mod tests {
description: Some(String::from("A completely useless book")), description: Some(String::from("A completely useless book")),
multilingual: true, multilingual: true,
src: PathBuf::from("source"), src: PathBuf::from("source"),
..Default::default()
}; };
let build_should_be = BuildConfig { let build_should_be = BuildConfig {
build_dir: PathBuf::from("outputs"), build_dir: PathBuf::from("outputs"),
@ -658,10 +663,10 @@ mod tests {
assert_eq!(got, should_be); assert_eq!(got, should_be);
let baz: Vec<bool> = cfg.get_deserialized("output.random.baz").unwrap(); let got_baz: Vec<bool> = cfg.get_deserialized("output.random.baz").unwrap();
let baz_should_be = vec![true, true, false]; let baz_should_be = vec![true, true, false];
assert_eq!(baz, baz_should_be); assert_eq!(got_baz, baz_should_be);
} }
#[test] #[test]
@ -753,7 +758,7 @@ mod tests {
for (src, should_be) in inputs { for (src, should_be) in inputs {
let got = parse_env(src); let got = parse_env(src);
let should_be = should_be.map(|s| s.to_string()); let should_be = should_be.map(ToString::to_string);
assert_eq!(got, should_be); assert_eq!(got, should_be);
} }
@ -783,6 +788,7 @@ mod tests {
} }
#[test] #[test]
#[allow(clippy::approx_constant)]
fn update_config_using_env_var_and_complex_value() { fn update_config_using_env_var_and_complex_value() {
let mut cfg = Config::default(); let mut cfg = Config::default();
let key = "foo-bar.baz"; let key = "foo-bar.baz";

View File

@ -8,6 +8,7 @@ use book::{Book, BookItem};
/// A preprocessor for converting file name `README.md` to `index.md` since /// A preprocessor for converting file name `README.md` to `index.md` since
/// `README.md` is the de facto index file in markdown-based documentation. /// `README.md` is the de facto index file in markdown-based documentation.
#[derive(Default)]
pub struct IndexPreprocessor; pub struct IndexPreprocessor;
impl IndexPreprocessor { impl IndexPreprocessor {
@ -45,7 +46,10 @@ impl Preprocessor for IndexPreprocessor {
fn warn_readme_name_conflict<P: AsRef<Path>>(readme_path: P, index_path: P) { fn warn_readme_name_conflict<P: AsRef<Path>>(readme_path: P, index_path: P) {
let file_name = readme_path.as_ref().file_name().unwrap_or_default(); let file_name = readme_path.as_ref().file_name().unwrap_or_default();
let parent_dir = index_path.as_ref().parent().unwrap_or(index_path.as_ref()); let parent_dir = index_path
.as_ref()
.parent()
.unwrap_or_else(|| index_path.as_ref());
warn!( warn!(
"It seems that there are both {:?} and index.md under \"{}\".", "It seems that there are both {:?} and index.md under \"{}\".",
file_name, file_name,
@ -67,7 +71,7 @@ fn is_readme_file<P: AsRef<Path>>(path: P) -> bool {
RE.is_match( RE.is_match(
path.as_ref() path.as_ref()
.file_stem() .file_stem()
.and_then(|s| s.to_str()) .and_then(std::ffi::OsStr::to_str)
.unwrap_or_default(), .unwrap_or_default(),
) )
} }

View File

@ -13,6 +13,7 @@ const MAX_LINK_NESTED_DEPTH: usize = 10;
/// A preprocessor for expanding the `{{# playpen}}` and `{{# include}}` /// A preprocessor for expanding the `{{# playpen}}` and `{{# include}}`
/// helpers in a chapter. /// helpers in a chapter.
#[derive(Default)]
pub struct LinkPreprocessor; pub struct LinkPreprocessor;
impl LinkPreprocessor { impl LinkPreprocessor {

View File

@ -611,7 +611,7 @@ fn partition_source(s: &str) -> (String, String) {
for line in s.lines() { for line in s.lines() {
let trimline = line.trim(); let trimline = line.trim();
let header = trimline.chars().all(|c| c.is_whitespace()) || trimline.starts_with("#!["); let header = trimline.chars().all(char::is_whitespace) || trimline.starts_with("#![");
if !header || after_header { if !header || after_header {
after_header = true; after_header = true;
after.push_str(line); after.push_str(line);

View File

@ -9,12 +9,9 @@ pub fn theme_option(
) -> Result<(), RenderError> { ) -> Result<(), RenderError> {
trace!("theme_option (handlebars helper)"); trace!("theme_option (handlebars helper)");
let param = h let param = h.param(0).and_then(|v| v.value().as_str()).ok_or_else(|| {
.param(0) RenderError::new("Param 0 with String type is required for theme_option helper.")
.and_then(|v| v.value().as_str()) })?;
.ok_or(RenderError::new(
"Param 0 with String type is required for theme_option helper.",
))?;
let theme_name = rc let theme_name = rc
.evaluate_absolute(ctx, "default_theme", true)? .evaluate_absolute(ctx, "default_theme", true)?

View File

@ -16,11 +16,11 @@ pub struct RenderToc {
impl HelperDef for RenderToc { impl HelperDef for RenderToc {
fn call<'reg: 'rc, 'rc>( fn call<'reg: 'rc, 'rc>(
&self, &self,
_h: &Helper, _h: &Helper<'reg, 'rc>,
_: &Handlebars, _r: &'reg Handlebars,
ctx: &Context, ctx: &'rc Context,
rc: &mut RenderContext, rc: &mut RenderContext<'reg>,
out: &mut Output, out: &mut dyn Output,
) -> Result<(), RenderError> { ) -> Result<(), RenderError> {
// get value from context data // get value from context data
// rc.get_path() is current json parent path, you should always use it like this // rc.get_path() is current json parent path, you should always use it like this

View File

@ -91,7 +91,7 @@ fn render_item(
let p = Parser::new_ext(&chapter.content, opts); let p = Parser::new_ext(&chapter.content, opts);
let mut in_header = false; let mut in_header = false;
let max_section_depth = search_config.heading_split_level as i32; let max_section_depth = i32::from(search_config.heading_split_level);
let mut section_id = None; let mut section_id = None;
let mut heading = String::new(); let mut heading = String::new();
let mut body = String::new(); let mut body = String::new();

View File

@ -1,4 +1,5 @@
use errors::*; use errors::*;
use std::convert::Into;
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::path::{Component, Path, PathBuf}; use std::path::{Component, Path, PathBuf};
@ -28,7 +29,7 @@ pub fn normalize_path(path: &str) -> String {
pub fn write_file<P: AsRef<Path>>(build_dir: &Path, filename: P, content: &[u8]) -> Result<()> { pub fn write_file<P: AsRef<Path>>(build_dir: &Path, filename: P, content: &[u8]) -> Result<()> {
let path = build_dir.join(filename); let path = build_dir.join(filename);
create_file(&path)?.write_all(content).map_err(|e| e.into()) create_file(&path)?.write_all(content).map_err(Into::into)
} }
/// Takes a path and returns a path containing just enough `../` to point to /// Takes a path and returns a path containing just enough `../` to point to
@ -85,7 +86,7 @@ pub fn create_file(path: &Path) -> Result<File> {
fs::create_dir_all(p)?; fs::create_dir_all(p)?;
} }
File::create(path).map_err(|e| e.into()) File::create(path).map_err(Into::into)
} }
/// Removes all the content of a directory but not the directory itself /// Removes all the content of a directory but not the directory itself
@ -196,43 +197,44 @@ mod tests {
fn copy_files_except_ext_test() { fn copy_files_except_ext_test() {
let tmp = match tempfile::TempDir::new() { let tmp = match tempfile::TempDir::new() {
Ok(t) => t, Ok(t) => t,
Err(_) => panic!("Could not create a temp dir"), Err(e) => panic!("Could not create a temp dir: {}", e),
}; };
// Create a couple of files // Create a couple of files
if let Err(_) = fs::File::create(&tmp.path().join("file.txt")) { if let Err(err) = fs::File::create(&tmp.path().join("file.txt")) {
panic!("Could not create file.txt") panic!("Could not create file.txt: {}", err);
} }
if let Err(_) = fs::File::create(&tmp.path().join("file.md")) { if let Err(err) = fs::File::create(&tmp.path().join("file.md")) {
panic!("Could not create file.md") panic!("Could not create file.md: {}", err);
} }
if let Err(_) = fs::File::create(&tmp.path().join("file.png")) { if let Err(err) = fs::File::create(&tmp.path().join("file.png")) {
panic!("Could not create file.png") panic!("Could not create file.png: {}", err);
} }
if let Err(_) = fs::create_dir(&tmp.path().join("sub_dir")) { if let Err(err) = fs::create_dir(&tmp.path().join("sub_dir")) {
panic!("Could not create sub_dir") panic!("Could not create sub_dir: {}", err);
} }
if let Err(_) = fs::File::create(&tmp.path().join("sub_dir/file.png")) { if let Err(err) = fs::File::create(&tmp.path().join("sub_dir/file.png")) {
panic!("Could not create sub_dir/file.png") panic!("Could not create sub_dir/file.png: {}", err);
} }
if let Err(_) = fs::create_dir(&tmp.path().join("sub_dir_exists")) { if let Err(err) = fs::create_dir(&tmp.path().join("sub_dir_exists")) {
panic!("Could not create sub_dir_exists") panic!("Could not create sub_dir_exists: {}", err);
} }
if let Err(_) = fs::File::create(&tmp.path().join("sub_dir_exists/file.txt")) { if let Err(err) = fs::File::create(&tmp.path().join("sub_dir_exists/file.txt")) {
panic!("Could not create sub_dir_exists/file.txt") panic!("Could not create sub_dir_exists/file.txt: {}", err);
} }
// Create output dir // Create output dir
if let Err(_) = fs::create_dir(&tmp.path().join("output")) { if let Err(err) = fs::create_dir(&tmp.path().join("output")) {
panic!("Could not create output") panic!("Could not create output: {}", err);
} }
if let Err(_) = fs::create_dir(&tmp.path().join("output/sub_dir_exists")) { if let Err(err) = fs::create_dir(&tmp.path().join("output/sub_dir_exists")) {
panic!("Could not create output/sub_dir_exists") panic!("Could not create output/sub_dir_exists: {}", err);
} }
match copy_files_except_ext(&tmp.path(), &tmp.path().join("output"), true, &["md"]) { if let Err(e) =
Err(e) => panic!("Error while executing the function:\n{:?}", e), copy_files_except_ext(&tmp.path(), &tmp.path().join("output"), true, &["md"])
Ok(_) => {} {
panic!("Error while executing the function:\n{:?}", e);
} }
// Check if the correct files where created // Check if the correct files where created

View File

@ -14,7 +14,7 @@ use std::borrow::Cow;
pub use self::string::{take_lines, RangeArgument}; pub use self::string::{take_lines, RangeArgument};
/// Replaces multiple consecutive whitespace characters with a single space character. /// Replaces multiple consecutive whitespace characters with a single space character.
pub fn collapse_whitespace<'a>(text: &'a str) -> Cow<'a, str> { pub fn collapse_whitespace(text: &str) -> Cow<'_, str> {
lazy_static! { lazy_static! {
static ref RE: Regex = Regex::new(r"\s\s+").unwrap(); static ref RE: Regex = Regex::new(r"\s\s+").unwrap();
} }

View File

@ -22,15 +22,15 @@ use std::path::Path;
use tempfile::Builder as TempFileBuilder; use tempfile::Builder as TempFileBuilder;
use walkdir::{DirEntry, WalkDir}; use walkdir::{DirEntry, WalkDir};
const BOOK_ROOT: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/dummy_book"); const BOOK_ROOT: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/dummy_book");
const TOC_TOP_LEVEL: &[&'static str] = &[ const TOC_TOP_LEVEL: &[&str] = &[
"1. First Chapter", "1. First Chapter",
"2. Second Chapter", "2. Second Chapter",
"Conclusion", "Conclusion",
"Dummy Book", "Dummy Book",
"Introduction", "Introduction",
]; ];
const TOC_SECOND_LEVEL: &[&'static str] = &[ const TOC_SECOND_LEVEL: &[&str] = &[
"1.1. Nested Chapter", "1.1. Nested Chapter",
"1.2. Includes", "1.2. Includes",
"2.1. Nested Chapter", "2.1. Nested Chapter",
@ -187,7 +187,7 @@ fn chapter_files_were_rendered_to_html() {
let chapter_files = WalkDir::new(&src) let chapter_files = WalkDir::new(&src)
.into_iter() .into_iter()
.filter_entry(|entry| entry_ends_with(entry, ".md")) .filter_entry(|entry| entry_ends_with(entry, ".md"))
.filter_map(|entry| entry.ok()) .filter_map(std::result::Result::ok)
.map(|entry| entry.path().to_path_buf()) .map(|entry| entry.path().to_path_buf())
.filter(|path| path.file_name().and_then(OsStr::to_str) != Some("SUMMARY.md")); .filter(|path| path.file_name().and_then(OsStr::to_str) != Some("SUMMARY.md"));
@ -390,7 +390,7 @@ fn by_default_mdbook_use_index_preprocessor_to_convert_readme_to_index() {
"First README", "First README",
]; ];
assert_contains_strings(&first_index, &expected_strings); assert_contains_strings(&first_index, &expected_strings);
assert_doesnt_contain_strings(&first_index, &vec!["README.html"]); assert_doesnt_contain_strings(&first_index, &["README.html"]);
let second_index = temp.path().join("book").join("second").join("index.html"); let second_index = temp.path().join("book").join("second").join("index.html");
let unexpected_strings = vec!["Second README"]; let unexpected_strings = vec!["Second README"];
@ -428,11 +428,12 @@ mod search {
let index = root.join("book/searchindex.js"); let index = root.join("book/searchindex.js");
let index = file_to_string(index).unwrap(); let index = file_to_string(index).unwrap();
let index = index.trim_start_matches("window.search = "); let index = index.trim_start_matches("window.search = ");
let index = index.trim_end_matches(";"); let index = index.trim_end_matches(';');
serde_json::from_str(&index).unwrap() serde_json::from_str(&index).unwrap()
} }
#[test] #[test]
#[allow(clippy::float_cmp)]
fn book_creates_reasonable_search_index() { fn book_creates_reasonable_search_index() {
let temp = DummyBook::new().build().unwrap(); let temp = DummyBook::new().build().unwrap();
let md = MDBook::load(temp.path()).unwrap(); let md = MDBook::load(temp.path()).unwrap();