diff --git a/examples/nop-preprocessor.rs b/examples/nop-preprocessor.rs index 374ee60f..8ee2c250 100644 --- a/examples/nop-preprocessor.rs +++ b/examples/nop-preprocessor.rs @@ -28,11 +28,9 @@ fn main() { if let Some(sub_args) = matches.subcommand_matches("supports") { handle_supports(&preprocessor, sub_args); - } else { - if let Err(e) = handle_preprocessing(&preprocessor) { - eprintln!("{}", e); - process::exit(1); - } + } else if let Err(e) = handle_preprocessing(&preprocessor) { + eprintln!("{}", e); + process::exit(1); } } diff --git a/src/book/book.rs b/src/book/book.rs index 62af2d92..c98c03c2 100644 --- a/src/book/book.rs +++ b/src/book/book.rs @@ -116,7 +116,7 @@ where I: IntoIterator, { for item in items { - if let &mut BookItem::Chapter(ref mut ch) = item { + if let BookItem::Chapter(ch) = item { for_each_mut(func, &mut ch.sub_items); } @@ -301,7 +301,7 @@ mod tests { use std::io::Write; use tempfile::{Builder as TempFileBuilder, TempDir}; - const DUMMY_SRC: &'static str = " + const DUMMY_SRC: &str = " # Dummy Chapter this is some dummy text. @@ -317,7 +317,7 @@ And here is some \ let chapter_path = temp.path().join("chapter_1.md"); File::create(&chapter_path) .unwrap() - .write(DUMMY_SRC.as_bytes()) + .write_all(DUMMY_SRC.as_bytes()) .unwrap(); let link = Link::new("Chapter 1", chapter_path); @@ -333,7 +333,7 @@ And here is some \ File::create(&second_path) .unwrap() - .write_all("Hello World!".as_bytes()) + .write_all(b"Hello World!") .unwrap(); let mut second = Link::new("Nested Chapter 1", &second_path); diff --git a/src/book/mod.rs b/src/book/mod.rs index 5002c353..41ae50d9 100644 --- a/src/book/mod.rs +++ b/src/book/mod.rs @@ -16,6 +16,7 @@ pub use self::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem use std::io::Write; use std::path::PathBuf; use std::process::Command; +use std::string::ToString; use tempfile::Builder as TempFileBuilder; use toml::Value; @@ -346,7 +347,7 @@ impl MDBook { fn determine_renderers(config: &Config) -> Vec> { let mut renderers: Vec> = Vec::new(); - if let Some(output_table) = config.get("output").and_then(|o| o.as_table()) { + if let Some(output_table) = config.get("output").and_then(Value::as_table) { for (key, table) in output_table.iter() { // the "html" backend has its own Renderer if key == "html" { @@ -386,7 +387,7 @@ fn determine_preprocessors(config: &Config) -> Result>> { preprocessors.extend(default_preprocessors()); } - if let Some(preprocessor_table) = config.get("preprocessor").and_then(|v| v.as_table()) { + if let Some(preprocessor_table) = config.get("preprocessor").and_then(Value::as_table) { for key in preprocessor_table.keys() { match key.as_ref() { "links" => preprocessors.push(Box::new(LinkPreprocessor::new())), @@ -405,8 +406,8 @@ fn determine_preprocessors(config: &Config) -> Result>> { fn interpret_custom_preprocessor(key: &str, table: &Value) -> Box { let command = table .get("command") - .and_then(|c| c.as_str()) - .map(|s| s.to_string()) + .and_then(Value::as_str) + .map(ToString::to_string) .unwrap_or_else(|| format!("mdbook-{}", key)); Box::new(CmdPreprocessor::new(key.to_string(), command.to_string())) @@ -417,8 +418,8 @@ fn interpret_custom_renderer(key: &str, table: &Value) -> Box { // prepended by "mdbook-" let table_dot_command = table .get("command") - .and_then(|c| c.as_str()) - .map(|s| s.to_string()); + .and_then(Value::as_str) + .map(ToString::to_string); let command = table_dot_command.unwrap_or_else(|| format!("mdbook-{}", key)); @@ -443,7 +444,7 @@ fn preprocessor_should_run(preprocessor: &Preprocessor, renderer: &Renderer, cfg if let Some(Value::Array(ref explicit_renderers)) = cfg.get(&key) { return explicit_renderers .iter() - .filter_map(|val| val.as_str()) + .filter_map(Value::as_str) .any(|name| name == renderer_name); } @@ -453,6 +454,7 @@ fn preprocessor_should_run(preprocessor: &Preprocessor, renderer: &Renderer, cfg #[cfg(test)] mod tests { use super::*; + use std::str::FromStr; use toml::value::{Table, Value}; #[test] @@ -570,9 +572,9 @@ mod tests { let html = cfg .get_preprocessor("links") .and_then(|links| links.get("renderers")) - .and_then(|renderers| renderers.as_array()) + .and_then(Value::as_array) .and_then(|renderers| renderers.get(0)) - .and_then(|renderer| renderer.as_str()) + .and_then(Value::as_str) .unwrap(); assert_eq!(html, "html"); let html_renderer = HtmlHandlebars::default(); diff --git a/src/cmd/test.rs b/src/cmd/test.rs index 63e1910d..cad0ae14 100644 --- a/src/cmd/test.rs +++ b/src/cmd/test.rs @@ -31,7 +31,7 @@ pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> { pub fn execute(args: &ArgMatches) -> Result<()> { let library_paths: Vec<&str> = args .values_of("library-path") - .map(|v| v.collect()) + .map(std::iter::Iterator::collect) .unwrap_or_default(); let book_dir = get_book_dir(args); let mut book = MDBook::load(&book_dir)?; diff --git a/src/config.rs b/src/config.rs index c6b37094..8e20bd7e 100644 --- a/src/config.rs +++ b/src/config.rs @@ -13,6 +13,7 @@ //! # use mdbook::errors::*; //! # extern crate toml; //! use std::path::PathBuf; +//! use std::str::FromStr; //! use mdbook::Config; //! use toml::Value; //! @@ -56,6 +57,7 @@ use std::env; use std::fs::File; use std::io::Read; use std::path::{Path, PathBuf}; +use std::str::FromStr; use toml::value::Table; use toml::{self, Value}; use toml_query::delete::TomlValueDeleteExt; @@ -75,12 +77,16 @@ pub struct Config { rest: Value, } -impl Config { +impl FromStr for Config { + type Err = Error; + /// Load a `Config` from some string. - pub fn from_str(src: &str) -> Result { + fn from_str(src: &str) -> Result { toml::from_str(src).chain_err(|| Error::from("Invalid configuration file")) } +} +impl Config { /// Load the configuration file from disk. pub fn from_disk>(config_file: P) -> Result { let mut buffer = String::new(); @@ -212,13 +218,13 @@ impl Config { /// Get the table associated with a particular renderer. pub fn get_renderer>(&self, index: I) -> Option<&Table> { let key = format!("output.{}", index.as_ref()); - self.get(&key).and_then(|v| v.as_table()) + self.get(&key).and_then(Value::as_table) } /// Get the table associated with a particular preprocessor. pub fn get_preprocessor>(&self, index: I) -> Option<&Table> { let key = format!("preprocessor.{}", index.as_ref()); - self.get(&key).and_then(|v| v.as_table()) + self.get(&key).and_then(Value::as_table) } fn from_legacy(mut table: Value) -> Config { @@ -560,7 +566,7 @@ impl<'de, T> Updateable<'de> for T where T: Serialize + Deserialize<'de> {} mod tests { use super::*; - const COMPLEX_CONFIG: &'static str = r#" + const COMPLEX_CONFIG: &str = r#" [book] title = "Some Book" authors = ["Michael-F-Bryan "] @@ -601,7 +607,6 @@ mod tests { description: Some(String::from("A completely useless book")), multilingual: true, src: PathBuf::from("source"), - ..Default::default() }; let build_should_be = BuildConfig { build_dir: PathBuf::from("outputs"), @@ -658,10 +663,10 @@ mod tests { assert_eq!(got, should_be); - let baz: Vec = cfg.get_deserialized("output.random.baz").unwrap(); + let got_baz: Vec = cfg.get_deserialized("output.random.baz").unwrap(); let baz_should_be = vec![true, true, false]; - assert_eq!(baz, baz_should_be); + assert_eq!(got_baz, baz_should_be); } #[test] @@ -753,7 +758,7 @@ mod tests { for (src, should_be) in inputs { let got = parse_env(src); - let should_be = should_be.map(|s| s.to_string()); + let should_be = should_be.map(ToString::to_string); assert_eq!(got, should_be); } @@ -783,6 +788,7 @@ mod tests { } #[test] + #[allow(clippy::approx_constant)] fn update_config_using_env_var_and_complex_value() { let mut cfg = Config::default(); let key = "foo-bar.baz"; diff --git a/src/preprocess/index.rs b/src/preprocess/index.rs index a8818518..d8e00fc1 100644 --- a/src/preprocess/index.rs +++ b/src/preprocess/index.rs @@ -8,6 +8,7 @@ use book::{Book, BookItem}; /// A preprocessor for converting file name `README.md` to `index.md` since /// `README.md` is the de facto index file in markdown-based documentation. +#[derive(Default)] pub struct IndexPreprocessor; impl IndexPreprocessor { @@ -45,7 +46,10 @@ impl Preprocessor for IndexPreprocessor { fn warn_readme_name_conflict>(readme_path: P, index_path: P) { let file_name = readme_path.as_ref().file_name().unwrap_or_default(); - let parent_dir = index_path.as_ref().parent().unwrap_or(index_path.as_ref()); + let parent_dir = index_path + .as_ref() + .parent() + .unwrap_or_else(|| index_path.as_ref()); warn!( "It seems that there are both {:?} and index.md under \"{}\".", file_name, @@ -67,7 +71,7 @@ fn is_readme_file>(path: P) -> bool { RE.is_match( path.as_ref() .file_stem() - .and_then(|s| s.to_str()) + .and_then(std::ffi::OsStr::to_str) .unwrap_or_default(), ) } diff --git a/src/preprocess/links.rs b/src/preprocess/links.rs index 2e3a88e3..d852f706 100644 --- a/src/preprocess/links.rs +++ b/src/preprocess/links.rs @@ -13,6 +13,7 @@ const MAX_LINK_NESTED_DEPTH: usize = 10; /// A preprocessor for expanding the `{{# playpen}}` and `{{# include}}` /// helpers in a chapter. +#[derive(Default)] pub struct LinkPreprocessor; impl LinkPreprocessor { diff --git a/src/renderer/html_handlebars/hbs_renderer.rs b/src/renderer/html_handlebars/hbs_renderer.rs index 956b8125..74358fb0 100644 --- a/src/renderer/html_handlebars/hbs_renderer.rs +++ b/src/renderer/html_handlebars/hbs_renderer.rs @@ -611,7 +611,7 @@ fn partition_source(s: &str) -> (String, String) { for line in s.lines() { let trimline = line.trim(); - let header = trimline.chars().all(|c| c.is_whitespace()) || trimline.starts_with("#!["); + let header = trimline.chars().all(char::is_whitespace) || trimline.starts_with("#!["); if !header || after_header { after_header = true; after.push_str(line); diff --git a/src/renderer/html_handlebars/helpers/theme.rs b/src/renderer/html_handlebars/helpers/theme.rs index 24c0dda1..44a4f85f 100644 --- a/src/renderer/html_handlebars/helpers/theme.rs +++ b/src/renderer/html_handlebars/helpers/theme.rs @@ -9,12 +9,9 @@ pub fn theme_option( ) -> Result<(), RenderError> { trace!("theme_option (handlebars helper)"); - let param = h - .param(0) - .and_then(|v| v.value().as_str()) - .ok_or(RenderError::new( - "Param 0 with String type is required for theme_option helper.", - ))?; + let param = h.param(0).and_then(|v| v.value().as_str()).ok_or_else(|| { + RenderError::new("Param 0 with String type is required for theme_option helper.") + })?; let theme_name = rc .evaluate_absolute(ctx, "default_theme", true)? diff --git a/src/renderer/html_handlebars/helpers/toc.rs b/src/renderer/html_handlebars/helpers/toc.rs index 190a3470..ae085e5f 100644 --- a/src/renderer/html_handlebars/helpers/toc.rs +++ b/src/renderer/html_handlebars/helpers/toc.rs @@ -16,11 +16,11 @@ pub struct RenderToc { impl HelperDef for RenderToc { fn call<'reg: 'rc, 'rc>( &self, - _h: &Helper, - _: &Handlebars, - ctx: &Context, - rc: &mut RenderContext, - out: &mut Output, + _h: &Helper<'reg, 'rc>, + _r: &'reg Handlebars, + ctx: &'rc Context, + rc: &mut RenderContext<'reg>, + out: &mut dyn Output, ) -> Result<(), RenderError> { // get value from context data // rc.get_path() is current json parent path, you should always use it like this diff --git a/src/renderer/html_handlebars/search.rs b/src/renderer/html_handlebars/search.rs index c27977e1..df7a698f 100644 --- a/src/renderer/html_handlebars/search.rs +++ b/src/renderer/html_handlebars/search.rs @@ -91,7 +91,7 @@ fn render_item( let p = Parser::new_ext(&chapter.content, opts); let mut in_header = false; - let max_section_depth = search_config.heading_split_level as i32; + let max_section_depth = i32::from(search_config.heading_split_level); let mut section_id = None; let mut heading = String::new(); let mut body = String::new(); diff --git a/src/utils/fs.rs b/src/utils/fs.rs index 4ac59127..4e0aeb7d 100644 --- a/src/utils/fs.rs +++ b/src/utils/fs.rs @@ -1,4 +1,5 @@ use errors::*; +use std::convert::Into; use std::fs::{self, File}; use std::io::{Read, Write}; use std::path::{Component, Path, PathBuf}; @@ -28,7 +29,7 @@ pub fn normalize_path(path: &str) -> String { pub fn write_file>(build_dir: &Path, filename: P, content: &[u8]) -> Result<()> { let path = build_dir.join(filename); - create_file(&path)?.write_all(content).map_err(|e| e.into()) + create_file(&path)?.write_all(content).map_err(Into::into) } /// Takes a path and returns a path containing just enough `../` to point to @@ -85,7 +86,7 @@ pub fn create_file(path: &Path) -> Result { fs::create_dir_all(p)?; } - File::create(path).map_err(|e| e.into()) + File::create(path).map_err(Into::into) } /// Removes all the content of a directory but not the directory itself @@ -196,43 +197,44 @@ mod tests { fn copy_files_except_ext_test() { let tmp = match tempfile::TempDir::new() { Ok(t) => t, - Err(_) => panic!("Could not create a temp dir"), + Err(e) => panic!("Could not create a temp dir: {}", e), }; // Create a couple of files - if let Err(_) = fs::File::create(&tmp.path().join("file.txt")) { - panic!("Could not create file.txt") + if let Err(err) = fs::File::create(&tmp.path().join("file.txt")) { + panic!("Could not create file.txt: {}", err); } - if let Err(_) = fs::File::create(&tmp.path().join("file.md")) { - panic!("Could not create file.md") + if let Err(err) = fs::File::create(&tmp.path().join("file.md")) { + panic!("Could not create file.md: {}", err); } - if let Err(_) = fs::File::create(&tmp.path().join("file.png")) { - panic!("Could not create file.png") + if let Err(err) = fs::File::create(&tmp.path().join("file.png")) { + panic!("Could not create file.png: {}", err); } - if let Err(_) = fs::create_dir(&tmp.path().join("sub_dir")) { - panic!("Could not create sub_dir") + if let Err(err) = fs::create_dir(&tmp.path().join("sub_dir")) { + panic!("Could not create sub_dir: {}", err); } - if let Err(_) = fs::File::create(&tmp.path().join("sub_dir/file.png")) { - panic!("Could not create sub_dir/file.png") + if let Err(err) = fs::File::create(&tmp.path().join("sub_dir/file.png")) { + panic!("Could not create sub_dir/file.png: {}", err); } - if let Err(_) = fs::create_dir(&tmp.path().join("sub_dir_exists")) { - panic!("Could not create sub_dir_exists") + if let Err(err) = fs::create_dir(&tmp.path().join("sub_dir_exists")) { + panic!("Could not create sub_dir_exists: {}", err); } - if let Err(_) = fs::File::create(&tmp.path().join("sub_dir_exists/file.txt")) { - panic!("Could not create sub_dir_exists/file.txt") + if let Err(err) = fs::File::create(&tmp.path().join("sub_dir_exists/file.txt")) { + panic!("Could not create sub_dir_exists/file.txt: {}", err); } // Create output dir - if let Err(_) = fs::create_dir(&tmp.path().join("output")) { - panic!("Could not create output") + if let Err(err) = fs::create_dir(&tmp.path().join("output")) { + panic!("Could not create output: {}", err); } - if let Err(_) = fs::create_dir(&tmp.path().join("output/sub_dir_exists")) { - panic!("Could not create output/sub_dir_exists") + if let Err(err) = fs::create_dir(&tmp.path().join("output/sub_dir_exists")) { + panic!("Could not create output/sub_dir_exists: {}", err); } - match copy_files_except_ext(&tmp.path(), &tmp.path().join("output"), true, &["md"]) { - Err(e) => panic!("Error while executing the function:\n{:?}", e), - Ok(_) => {} + if let Err(e) = + copy_files_except_ext(&tmp.path(), &tmp.path().join("output"), true, &["md"]) + { + panic!("Error while executing the function:\n{:?}", e); } // Check if the correct files where created diff --git a/src/utils/mod.rs b/src/utils/mod.rs index 2a6324f1..7c7aeb51 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -14,7 +14,7 @@ use std::borrow::Cow; pub use self::string::{take_lines, RangeArgument}; /// Replaces multiple consecutive whitespace characters with a single space character. -pub fn collapse_whitespace<'a>(text: &'a str) -> Cow<'a, str> { +pub fn collapse_whitespace(text: &str) -> Cow<'_, str> { lazy_static! { static ref RE: Regex = Regex::new(r"\s\s+").unwrap(); } diff --git a/tests/rendered_output.rs b/tests/rendered_output.rs index 2449498c..8766ed88 100644 --- a/tests/rendered_output.rs +++ b/tests/rendered_output.rs @@ -22,15 +22,15 @@ use std::path::Path; use tempfile::Builder as TempFileBuilder; use walkdir::{DirEntry, WalkDir}; -const BOOK_ROOT: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/dummy_book"); -const TOC_TOP_LEVEL: &[&'static str] = &[ +const BOOK_ROOT: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/dummy_book"); +const TOC_TOP_LEVEL: &[&str] = &[ "1. First Chapter", "2. Second Chapter", "Conclusion", "Dummy Book", "Introduction", ]; -const TOC_SECOND_LEVEL: &[&'static str] = &[ +const TOC_SECOND_LEVEL: &[&str] = &[ "1.1. Nested Chapter", "1.2. Includes", "2.1. Nested Chapter", @@ -187,7 +187,7 @@ fn chapter_files_were_rendered_to_html() { let chapter_files = WalkDir::new(&src) .into_iter() .filter_entry(|entry| entry_ends_with(entry, ".md")) - .filter_map(|entry| entry.ok()) + .filter_map(std::result::Result::ok) .map(|entry| entry.path().to_path_buf()) .filter(|path| path.file_name().and_then(OsStr::to_str) != Some("SUMMARY.md")); @@ -390,7 +390,7 @@ fn by_default_mdbook_use_index_preprocessor_to_convert_readme_to_index() { "First README", ]; assert_contains_strings(&first_index, &expected_strings); - assert_doesnt_contain_strings(&first_index, &vec!["README.html"]); + assert_doesnt_contain_strings(&first_index, &["README.html"]); let second_index = temp.path().join("book").join("second").join("index.html"); let unexpected_strings = vec!["Second README"]; @@ -428,11 +428,12 @@ mod search { let index = root.join("book/searchindex.js"); let index = file_to_string(index).unwrap(); let index = index.trim_start_matches("window.search = "); - let index = index.trim_end_matches(";"); + let index = index.trim_end_matches(';'); serde_json::from_str(&index).unwrap() } #[test] + #[allow(clippy::float_cmp)] fn book_creates_reasonable_search_index() { let temp = DummyBook::new().build().unwrap(); let md = MDBook::load(temp.path()).unwrap();