Merge pull request #690 from mattico/rustfmt

Remove rustfmt.toml
This commit is contained in:
Matt Ickstadt 2018-07-23 12:47:33 -05:00 committed by GitHub
commit 0ac36f2183
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 428 additions and 419 deletions

View File

@ -24,7 +24,6 @@ mod execs {
}
}
error_chain!{
foreign_links {
Io(std::io::Error);
@ -32,23 +31,25 @@ error_chain!{
}
fn program_exists(program: &str) -> Result<()> {
execs::cmd(program).arg("-v")
.output()
.chain_err(|| format!("Please install '{}'!", program))?;
execs::cmd(program)
.arg("-v")
.output()
.chain_err(|| format!("Please install '{}'!", program))?;
Ok(())
}
fn npm_package_exists(package: &str) -> Result<()> {
let status = execs::cmd("npm").args(&["list", "-g"])
.arg(package)
.output();
let status = execs::cmd("npm")
.args(&["list", "-g"])
.arg(package)
.output();
match status {
Ok(ref out) if out.status.success() => Ok(()),
_ => {
bail!("Missing npm package '{0}' install with: 'npm -g install {0}'",
package)
}
_ => bail!(
"Missing npm package '{0}' install with: 'npm -g install {0}'",
package
),
}
}
@ -81,13 +82,14 @@ fn run() -> Result<()> {
let theme_dir = Path::new(&manifest_dir).join("src/theme/");
let stylus_dir = theme_dir.join("stylus/book.styl");
if !execs::cmd("stylus").arg(stylus_dir)
.arg("--out")
.arg(theme_dir)
.arg("--use")
.arg("nib")
.status()?
.success()
if !execs::cmd("stylus")
.arg(stylus_dir)
.arg("--out")
.arg(theme_dir)
.arg("--use")
.arg("nib")
.status()?
.success()
{
bail!("Stylus encountered an error");
}

View File

@ -3,15 +3,15 @@ extern crate mdbook;
extern crate pulldown_cmark;
extern crate pulldown_cmark_to_cmark;
use mdbook::errors::{Error, Result};
use mdbook::MDBook;
use mdbook::book::{Book, BookItem, Chapter};
use mdbook::errors::{Error, Result};
use mdbook::preprocess::{Preprocessor, PreprocessorContext};
use mdbook::MDBook;
use pulldown_cmark::{Event, Parser, Tag};
use pulldown_cmark_to_cmark::fmt::cmark;
use std::ffi::OsString;
use std::env::{args, args_os};
use std::ffi::OsString;
use std::process;
struct Deemphasize;

View File

@ -1 +0,0 @@
format_strings = true

View File

@ -1,7 +1,7 @@
use std::path::PathBuf;
use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::errors::Result;
use mdbook::MDBook;
use std::path::PathBuf;
use {get_book_dir, open};
// Create clap subcommand arguments

View File

@ -1,9 +1,9 @@
use clap::{App, ArgMatches, SubCommand};
use get_book_dir;
use mdbook::errors::*;
use mdbook::MDBook;
use std::fs;
use std::path::PathBuf;
use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::errors::*;
use get_book_dir;
// Create clap subcommand arguments
pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {

View File

@ -1,11 +1,11 @@
use clap::{App, ArgMatches, SubCommand};
use get_book_dir;
use mdbook::config;
use mdbook::errors::Result;
use mdbook::MDBook;
use std::io;
use std::io::Write;
use std::process::Command;
use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::errors::Result;
use mdbook::config;
use get_book_dir;
// Create clap subcommand arguments
pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {

View File

@ -8,22 +8,22 @@ extern crate log;
extern crate mdbook;
extern crate open;
use chrono::Local;
use clap::{App, AppSettings, ArgMatches};
use env_logger::Builder;
use log::LevelFilter;
use mdbook::utils;
use std::env;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::io::Write;
use clap::{App, AppSettings, ArgMatches};
use chrono::Local;
use log::LevelFilter;
use env_logger::Builder;
use mdbook::utils;
use std::path::{Path, PathBuf};
pub mod build;
pub mod clean;
pub mod init;
pub mod test;
#[cfg(feature = "serve")]
pub mod serve;
pub mod test;
#[cfg(feature = "watch")]
pub mod watch;

View File

@ -2,16 +2,17 @@ extern crate iron;
extern crate staticfile;
extern crate ws;
use std;
use self::iron::{status, AfterMiddleware, Chain, Iron, IronError, IronResult, Request, Response,
Set};
use self::iron::{
status, AfterMiddleware, Chain, Iron, IronError, IronResult, Request, Response, Set,
};
use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::utils;
use mdbook::errors::*;
use {get_book_dir, open};
use mdbook::utils;
use mdbook::MDBook;
use std;
#[cfg(feature = "watch")]
use watch;
use {get_book_dir, open};
struct ErrorRecover;

View File

@ -1,22 +1,20 @@
use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::errors::Result;
use get_book_dir;
use mdbook::errors::Result;
use mdbook::MDBook;
// Create clap subcommand arguments
pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("test")
.about("Test that code samples compile")
.arg_from_usage(
"-L, --library-path [DIR]... 'directory to add to crate search path'",
)
.arg_from_usage("-L, --library-path [DIR]... 'directory to add to crate search path'")
}
// test command implementation
pub fn execute(args: &ArgMatches) -> Result<()> {
let library_paths: Vec<&str> = args.values_of("library-path")
.map(|v| v.collect())
.unwrap_or_default();
.map(|v| v.collect())
.unwrap_or_default();
let book_dir = get_book_dir(args);
let mut book = MDBook::load(&book_dir)?;

View File

@ -1,13 +1,13 @@
extern crate notify;
use std::path::Path;
use self::notify::Watcher;
use std::time::Duration;
use std::sync::mpsc::channel;
use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::utils;
use mdbook::errors::Result;
use mdbook::utils;
use mdbook::MDBook;
use std::path::Path;
use std::sync::mpsc::channel;
use std::time::Duration;
use {get_book_dir, open};
// Create clap subcommand arguments
@ -48,8 +48,8 @@ pub fn trigger_on_change<F>(book: &MDBook, closure: F)
where
F: Fn(&Path, &Path),
{
use self::notify::RecursiveMode::*;
use self::notify::DebouncedEvent::*;
use self::notify::RecursiveMode::*;
// Create a channel to receive the events.
let (tx, rx) = channel();

View File

@ -1,8 +1,8 @@
use std::fmt::{self, Display, Formatter};
use std::path::{Path, PathBuf};
use std::collections::VecDeque;
use std::fmt::{self, Display, Formatter};
use std::fs::{self, File};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use super::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem};
use config::BuildConfig;
@ -297,8 +297,8 @@ impl Display for Chapter {
#[cfg(test)]
mod tests {
use super::*;
use tempfile::{TempDir, Builder as TempFileBuilder};
use std::io::Write;
use tempfile::{Builder as TempFileBuilder, TempDir};
const DUMMY_SRC: &'static str = "
# Dummy Chapter
@ -404,14 +404,12 @@ And here is some \
..Default::default()
};
let should_be = Book {
sections: vec![
BookItem::Chapter(Chapter {
name: String::from("Chapter 1"),
content: String::from(DUMMY_SRC),
path: PathBuf::from("chapter_1.md"),
..Default::default()
}),
],
sections: vec![BookItem::Chapter(Chapter {
name: String::from("Chapter 1"),
content: String::from(DUMMY_SRC),
path: PathBuf::from("chapter_1.md"),
..Default::default()
})],
..Default::default()
};
@ -535,13 +533,11 @@ And here is some \
fn cant_load_chapters_with_an_empty_path() {
let (_, temp) = dummy_link();
let summary = Summary {
numbered_chapters: vec![
SummaryItem::Link(Link {
name: String::from("Empty"),
location: PathBuf::from(""),
..Default::default()
}),
],
numbered_chapters: vec![SummaryItem::Link(Link {
name: String::from("Empty"),
location: PathBuf::from(""),
..Default::default()
})],
..Default::default()
};
@ -556,13 +552,11 @@ And here is some \
fs::create_dir(&dir).unwrap();
let summary = Summary {
numbered_chapters: vec![
SummaryItem::Link(Link {
name: String::from("nested"),
location: dir,
..Default::default()
}),
],
numbered_chapters: vec![SummaryItem::Link(Link {
name: String::from("nested"),
location: dir,
..Default::default()
})],
..Default::default()
};

View File

@ -1,12 +1,12 @@
use std::fs::{self, File};
use std::path::PathBuf;
use std::io::Write;
use std::path::PathBuf;
use toml;
use config::Config;
use super::MDBook;
use theme;
use config::Config;
use errors::*;
use theme;
/// A helper for setting up a new book and its directory structure.
#[derive(Debug, Clone, PartialEq)]

View File

@ -5,29 +5,24 @@
//!
//! [1]: ../index.html
mod summary;
mod book;
mod init;
mod summary;
pub use self::book::{load_book, Book, BookItem, BookItems, Chapter};
pub use self::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem};
pub use self::init::BookBuilder;
pub use self::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem};
use std::path::PathBuf;
use std::io::Write;
use std::path::PathBuf;
use std::process::Command;
use tempfile::Builder as TempFileBuilder;
use toml::Value;
use utils;
use renderer::{CmdRenderer, HtmlHandlebars, RenderContext, Renderer};
use preprocess::{
LinkPreprocessor,
IndexPreprocessor,
Preprocessor,
PreprocessorContext
};
use errors::*;
use preprocess::{IndexPreprocessor, LinkPreprocessor, Preprocessor, PreprocessorContext};
use renderer::{CmdRenderer, HtmlHandlebars, RenderContext, Renderer};
use utils;
use config::Config;

View File

@ -1,10 +1,10 @@
use errors::*;
use memchr::{self, Memchr};
use pulldown_cmark::{self, Event, Tag};
use std::fmt::{self, Display, Formatter};
use std::iter::FromIterator;
use std::ops::{Deref, DerefMut};
use std::path::{Path, PathBuf};
use memchr::{self, Memchr};
use pulldown_cmark::{self, Event, Tag};
use errors::*;
/// Parse the text from a `SUMMARY.md` file into a sort of "recipe" to be
/// used when loading a book from disk.
@ -164,33 +164,34 @@ struct SummaryParser<'a> {
/// use pattern matching and you won't get errors because `take_while()`
/// moves `$stream` out of self.
macro_rules! collect_events {
($stream:expr, start $delimiter:pat) => {
($stream:expr,start $delimiter:pat) => {
collect_events!($stream, Event::Start($delimiter))
};
($stream:expr, end $delimiter:pat) => {
($stream:expr,end $delimiter:pat) => {
collect_events!($stream, Event::End($delimiter))
};
($stream:expr, $delimiter:pat) => {
{
let mut events = Vec::new();
($stream:expr, $delimiter:pat) => {{
let mut events = Vec::new();
loop {
let event = $stream.next();
trace!("Next event: {:?}", event);
loop {
let event = $stream.next();
trace!("Next event: {:?}", event);
match event {
Some($delimiter) => break,
Some(other) => events.push(other),
None => {
debug!("Reached end of stream without finding the closing pattern, {}", stringify!($delimiter));
break;
}
match event {
Some($delimiter) => break,
Some(other) => events.push(other),
None => {
debug!(
"Reached end of stream without finding the closing pattern, {}",
stringify!($delimiter)
);
break;
}
}
events
}
}
events
}};
}
impl<'a> SummaryParser<'a> {
@ -659,14 +660,12 @@ mod tests {
name: String::from("First"),
location: PathBuf::from("./first.md"),
number: Some(SectionNumber(vec![1])),
nested_items: vec![
SummaryItem::Link(Link {
name: String::from("Nested"),
location: PathBuf::from("./nested.md"),
number: Some(SectionNumber(vec![1, 1])),
nested_items: Vec::new(),
}),
],
nested_items: vec![SummaryItem::Link(Link {
name: String::from("Nested"),
location: PathBuf::from("./nested.md"),
number: Some(SectionNumber(vec![1, 1])),
nested_items: Vec::new(),
})],
}),
SummaryItem::Link(Link {
name: String::from("Second"),

View File

@ -50,17 +50,17 @@
#![deny(missing_docs)]
use std::path::{Path, PathBuf};
use std::fs::File;
use std::io::Read;
use std::env;
use toml::{self, Value};
use toml::value::Table;
use toml_query::read::TomlValueReadExt;
use toml_query::insert::TomlValueInsertExt;
use toml_query::delete::TomlValueDeleteExt;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json;
use std::env;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use toml::value::Table;
use toml::{self, Value};
use toml_query::delete::TomlValueDeleteExt;
use toml_query::insert::TomlValueInsertExt;
use toml_query::read::TomlValueReadExt;
use errors::*;
@ -217,9 +217,10 @@ impl Config {
// figure out what try_into() deserializes to.
macro_rules! get_and_insert {
($table:expr, $key:expr => $out:expr) => {
let got = $table.as_table_mut()
.and_then(|t| t.remove($key))
.and_then(|v| v.try_into().ok());
let got = $table
.as_table_mut()
.and_then(|t| t.remove($key))
.and_then(|v| v.try_into().ok());
if let Some(value) = got {
$out = value;
}

View File

@ -107,17 +107,17 @@ extern crate toml_query;
#[macro_use]
extern crate pretty_assertions;
pub mod preprocess;
pub mod book;
pub mod config;
pub mod preprocess;
pub mod renderer;
pub mod theme;
pub mod utils;
pub use book::MDBook;
pub use book::BookItem;
pub use renderer::Renderer;
pub use book::MDBook;
pub use config::Config;
pub use renderer::Renderer;
/// The error types used through out this crate.
pub mod errors {

View File

@ -1,5 +1,5 @@
use std::path::Path;
use regex::Regex;
use std::path::Path;
use errors::*;
@ -27,8 +27,7 @@ impl Preprocessor for IndexPreprocessor {
book.for_each_mut(|section: &mut BookItem| {
if let BookItem::Chapter(ref mut ch) = *section {
if is_readme_file(&ch.path) {
let index_md = source_dir
.join(ch.path.with_file_name("index.md"));
let index_md = source_dir.join(ch.path.with_file_name("index.md"));
if index_md.exists() {
warn_readme_name_conflict(&ch.path, &index_md);
}
@ -45,8 +44,15 @@ impl Preprocessor for IndexPreprocessor {
fn warn_readme_name_conflict<P: AsRef<Path>>(readme_path: P, index_path: P) {
let file_name = readme_path.as_ref().file_name().unwrap_or_default();
let parent_dir = index_path.as_ref().parent().unwrap_or(index_path.as_ref());
warn!("It seems that there are both {:?} and index.md under \"{}\".", file_name, parent_dir.display());
warn!("mdbook converts {:?} into index.html by default. It may cause", file_name);
warn!(
"It seems that there are both {:?} and index.md under \"{}\".",
file_name,
parent_dir.display()
);
warn!(
"mdbook converts {:?} into index.html by default. It may cause",
file_name
);
warn!("unexpected behavior if putting both files under the same directory.");
warn!("To solve the warning, try to rearrange the book structure or disable");
warn!("\"index\" preprocessor to stop the conversion.");
@ -60,7 +66,7 @@ fn is_readme_file<P: AsRef<Path>>(path: P) -> bool {
path.as_ref()
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or_default()
.unwrap_or_default(),
)
}

View File

@ -1,9 +1,9 @@
use errors::*;
use regex::{CaptureMatches, Captures, Regex};
use std::ops::{Range, RangeFrom, RangeFull, RangeTo};
use std::path::{Path, PathBuf};
use regex::{CaptureMatches, Captures, Regex};
use utils::fs::file_to_string;
use utils::take_lines;
use errors::*;
use super::{Preprocessor, PreprocessorContext};
use book::{Book, BookItem};
@ -62,12 +62,18 @@ fn replace_all<P: AsRef<Path>>(s: &str, path: P, source: &P, depth: usize) -> St
Ok(new_content) => {
if depth < MAX_LINK_NESTED_DEPTH {
if let Some(rel_path) = playpen.link.relative_path(path) {
replaced.push_str(&replace_all(&new_content, rel_path, &source.to_path_buf(), depth + 1));
replaced.push_str(&replace_all(
&new_content,
rel_path,
&source.to_path_buf(),
depth + 1,
));
}
}
else {
error!("Stack depth exceeded in {}. Check for cyclic includes",
source.display());
} else {
error!(
"Stack depth exceeded in {}. Check for cyclic includes",
source.display()
);
}
previous_end_index = playpen.end_index;
}
@ -103,7 +109,7 @@ impl<'a> LinkType<'a> {
LinkType::IncludeRangeFrom(p, _) => Some(return_relative_path(base, &p)),
LinkType::IncludeRangeTo(p, _) => Some(return_relative_path(base, &p)),
LinkType::IncludeRangeFull(p, _) => Some(return_relative_path(base, &p)),
LinkType::Playpen(p,_) => Some(return_relative_path(base, &p))
LinkType::Playpen(p, _) => Some(return_relative_path(base, &p)),
}
}
}
@ -241,15 +247,16 @@ fn find_links(contents: &str) -> LinkIter {
// lazily compute following regex
// r"\\\{\{#.*\}\}|\{\{#([a-zA-Z0-9]+)\s*([a-zA-Z0-9_.\-:/\\\s]+)\}\}")?;
lazy_static! {
static ref RE: Regex = Regex::new(r"(?x) # insignificant whitespace mode
\\\{\{\#.*\}\} # match escaped link
| # or
\{\{\s* # link opening parens and whitespace
\#([a-zA-Z0-9]+) # link type
\s+ # separating whitespace
([a-zA-Z0-9\s_.\-:/\\]+) # link target path and space separated properties
\s*\}\} # whitespace and link closing parens
").unwrap();
static ref RE: Regex = Regex::new(
r"(?x) # insignificant whitespace mode
\\\{\{\#.*\}\} # match escaped link
| # or
\{\{\s* # link opening parens and whitespace
\#([a-zA-Z0-9]+) # link type
\s+ # separating whitespace
([a-zA-Z0-9\s_.\-:/\\]+) # link target path and space separated properties
\s*\}\} # whitespace and link closing parens"
).unwrap();
}
LinkIter(RE.captures_iter(contents))
}
@ -319,14 +326,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res);
assert_eq!(
res,
vec![
Link {
start_index: 22,
end_index: 48,
link: LinkType::IncludeRange(PathBuf::from("file.rs"), 9..20),
link_text: "{{#include file.rs:10:20}}",
},
]
vec![Link {
start_index: 22,
end_index: 48,
link: LinkType::IncludeRange(PathBuf::from("file.rs"), 9..20),
link_text: "{{#include file.rs:10:20}}",
}]
);
}
@ -337,14 +342,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res);
assert_eq!(
res,
vec![
Link {
start_index: 22,
end_index: 45,
link: LinkType::IncludeRange(PathBuf::from("file.rs"), 9..10),
link_text: "{{#include file.rs:10}}",
},
]
vec![Link {
start_index: 22,
end_index: 45,
link: LinkType::IncludeRange(PathBuf::from("file.rs"), 9..10),
link_text: "{{#include file.rs:10}}",
}]
);
}
@ -355,14 +358,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res);
assert_eq!(
res,
vec![
Link {
start_index: 22,
end_index: 46,
link: LinkType::IncludeRangeFrom(PathBuf::from("file.rs"), 9..),
link_text: "{{#include file.rs:10:}}",
},
]
vec![Link {
start_index: 22,
end_index: 46,
link: LinkType::IncludeRangeFrom(PathBuf::from("file.rs"), 9..),
link_text: "{{#include file.rs:10:}}",
}]
);
}
@ -373,14 +374,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res);
assert_eq!(
res,
vec![
Link {
start_index: 22,
end_index: 46,
link: LinkType::IncludeRangeTo(PathBuf::from("file.rs"), ..20),
link_text: "{{#include file.rs::20}}",
},
]
vec![Link {
start_index: 22,
end_index: 46,
link: LinkType::IncludeRangeTo(PathBuf::from("file.rs"), ..20),
link_text: "{{#include file.rs::20}}",
}]
);
}
@ -391,14 +390,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res);
assert_eq!(
res,
vec![
Link {
start_index: 22,
end_index: 44,
link: LinkType::IncludeRangeFull(PathBuf::from("file.rs"), ..),
link_text: "{{#include file.rs::}}",
},
]
vec![Link {
start_index: 22,
end_index: 44,
link: LinkType::IncludeRangeFull(PathBuf::from("file.rs"), ..),
link_text: "{{#include file.rs::}}",
}]
);
}
@ -409,14 +406,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res);
assert_eq!(
res,
vec![
Link {
start_index: 22,
end_index: 42,
link: LinkType::IncludeRangeFull(PathBuf::from("file.rs"), ..),
link_text: "{{#include file.rs}}",
},
]
vec![Link {
start_index: 22,
end_index: 42,
link: LinkType::IncludeRangeFull(PathBuf::from("file.rs"), ..),
link_text: "{{#include file.rs}}",
}]
);
}
@ -429,14 +424,12 @@ mod tests {
assert_eq!(
res,
vec![
Link {
start_index: 38,
end_index: 68,
link: LinkType::Escaped,
link_text: "\\{{#playpen file.rs editable}}",
},
]
vec![Link {
start_index: 38,
end_index: 68,
link: LinkType::Escaped,
link_text: "\\{{#playpen file.rs editable}}",
}]
);
}

View File

@ -1,10 +1,10 @@
//! Book preprocessing.
pub use self::links::LinkPreprocessor;
pub use self::index::IndexPreprocessor;
pub use self::links::LinkPreprocessor;
mod links;
mod index;
mod links;
use book::Book;
use config::Config;

View File

@ -132,8 +132,11 @@ impl HtmlHandlebars {
) -> Result<()> {
use utils::fs::write_file;
write_file(destination, ".nojekyll",
b"This file makes sure that Github Pages doesn't process mdBook's output.")?;
write_file(
destination,
".nojekyll",
b"This file makes sure that Github Pages doesn't process mdBook's output.",
)?;
write_file(destination, "book.js", &theme.js)?;
write_file(destination, "book.css", &theme.css)?;
@ -450,7 +453,10 @@ fn make_data(
if cfg!(feature = "search") {
let search = search.unwrap_or_default();
data.insert("search_enabled".to_owned(), json!(search.enable));
data.insert("search_js".to_owned(), json!(search.enable && search.copy_js));
data.insert(
"search_js".to_owned(),
json!(search.enable && search.copy_js),
);
} else if search.is_some() {
warn!("mdBook compiled without search support, ignoring `output.html.search` table");
warn!(
@ -513,7 +519,7 @@ fn build_header_links(html: &str) -> String {
fn wrap_header_with_link(
level: usize,
content: &str,
id_counter: &mut HashMap<String, usize>
id_counter: &mut HashMap<String, usize>,
) -> String {
let raw_id = utils::id_from_content(content);
@ -534,7 +540,6 @@ fn wrap_header_with_link(
)
}
// The rust book uses annotations for rustdoc to test code snippets,
// like the following:
// ```rust,should_panic
@ -574,7 +579,8 @@ fn add_playpen_pre(html: &str, playpen_config: &Playpen) -> String {
{
// wrap the contents in an external pre block
if playpen_config.editable && classes.contains("editable")
|| text.contains("fn main") || text.contains("quick_main!")
|| text.contains("fn main")
|| text.contains("quick_main!")
{
format!("<pre class=\"playpen\">{}</pre>", text)
} else {

View File

@ -1,2 +1,2 @@
pub mod toc;
pub mod navigation;
pub mod toc;

View File

@ -1,8 +1,8 @@
use std::path::Path;
use std::collections::BTreeMap;
use std::path::Path;
use serde_json;
use handlebars::{Context, Handlebars, Helper, RenderContext, RenderError, Renderable};
use serde_json;
use utils;
@ -90,12 +90,14 @@ fn render(
let mut context = BTreeMap::new();
let base_path = rc.evaluate_absolute("path", false)?
.as_str()
.ok_or_else(|| RenderError::new("Type error for `path`, string expected"))?
.replace("\"", "");
.as_str()
.ok_or_else(|| RenderError::new("Type error for `path`, string expected"))?
.replace("\"", "");
context.insert("path_to_root".to_owned(),
json!(utils::fs::path_to_root(&base_path)));
context.insert(
"path_to_root".to_owned(),
json!(utils::fs::path_to_root(&base_path)),
);
chapter
.get("name")

View File

@ -1,11 +1,11 @@
use std::path::Path;
use std::collections::BTreeMap;
use std::path::Path;
use utils;
use serde_json;
use handlebars::{Handlebars, Helper, HelperDef, RenderContext, RenderError};
use pulldown_cmark::{html, Event, Parser, Tag};
use serde_json;
// Handlebars helper to construct TOC
#[derive(Clone, Copy)]
@ -79,7 +79,8 @@ impl HelperDef for RenderToc {
.replace("\\", "/");
// Add link
rc.writer.write_all(&utils::fs::path_to_root(&current).as_bytes())?;
rc.writer
.write_all(&utils::fs::path_to_root(&current).as_bytes())?;
rc.writer.write_all(tmp.as_bytes())?;
rc.writer.write_all(b"\"")?;

View File

@ -5,15 +5,15 @@ use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
use std::path::Path;
use self::elasticlunr::Index;
use pulldown_cmark::*;
use serde_json;
use self::elasticlunr::Index;
use book::{Book, BookItem};
use config::Search;
use errors::*;
use utils;
use theme::searcher;
use utils;
/// Creates all files required for search.
pub fn create_files(search_config: &Search, destination: &Path, book: &Book) -> Result<()> {
@ -32,7 +32,11 @@ pub fn create_files(search_config: &Search, destination: &Path, book: &Book) ->
if search_config.copy_js {
utils::fs::write_file(destination, "searchindex.json", index.as_bytes())?;
utils::fs::write_file(destination, "searchindex.js", format!("window.search = {};", index).as_bytes())?;
utils::fs::write_file(
destination,
"searchindex.js",
format!("window.search = {};", index).as_bytes(),
)?;
utils::fs::write_file(destination, "searcher.js", searcher::JS)?;
utils::fs::write_file(destination, "mark.min.js", searcher::MARK_JS)?;
utils::fs::write_file(destination, "elasticlunr.min.js", searcher::ELASTICLUNR_JS)?;
@ -45,8 +49,8 @@ pub fn create_files(search_config: &Search, destination: &Path, book: &Book) ->
/// Uses the given arguments to construct a search document, then inserts it to the given index.
fn add_doc(
index: &mut Index,
doc_urls: &mut Vec<String>,
anchor_base: &str,
doc_urls: &mut Vec<String>,
anchor_base: &str,
section_id: &Option<String>,
items: &[&str],
) {
@ -166,8 +170,8 @@ fn render_item(
}
fn write_to_json(index: Index, search_config: &Search, doc_urls: Vec<String>) -> Result<String> {
use std::collections::BTreeMap;
use self::elasticlunr::config::{SearchBool, SearchOptions, SearchOptionsField};
use std::collections::BTreeMap;
#[derive(Serialize)]
struct ResultsOptions {

View File

@ -15,16 +15,16 @@ pub use self::html_handlebars::HtmlHandlebars;
mod html_handlebars;
use serde_json;
use shlex::Shlex;
use std::fs;
use std::io::{self, Read};
use std::path::PathBuf;
use std::process::{Command, Stdio};
use serde_json;
use shlex::Shlex;
use errors::*;
use config::Config;
use book::Book;
use config::Config;
use errors::*;
const MDBOOK_VERSION: &str = env!("CARGO_PKG_VERSION");
@ -162,17 +162,21 @@ impl Renderer for CmdRenderer {
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.current_dir(&ctx.destination)
.spawn() {
Ok(c) => c,
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {
warn!("The command wasn't found, is the \"{}\" backend installed?", self.name);
warn!("\tCommand: {}", self.cmd);
return Ok(());
}
Err(e) => {
return Err(e).chain_err(|| "Unable to start the backend")?;
}
};
.spawn()
{
Ok(c) => c,
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {
warn!(
"The command wasn't found, is the \"{}\" backend installed?",
self.name
);
warn!("\tCommand: {}", self.cmd);
return Ok(());
}
Err(e) => {
return Err(e).chain_err(|| "Unable to start the backend")?;
}
};
{
let mut stdin = child.stdin.take().expect("Child has stdin");

View File

@ -5,9 +5,9 @@ pub mod playpen_editor;
#[cfg(feature = "search")]
pub mod searcher;
use std::path::Path;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use errors::*;
@ -34,7 +34,6 @@ pub static FONT_AWESOME_WOFF2: &'static [u8] =
include_bytes!("FontAwesome/fonts/fontawesome-webfont.woff2");
pub static FONT_AWESOME_OTF: &'static [u8] = include_bytes!("FontAwesome/fonts/FontAwesome.otf");
/// The `Theme` struct should be used instead of the static variables because
/// the `new()` method will look if the user has a theme directory in their
/// source folder and use the users theme instead of the default.
@ -78,8 +77,14 @@ impl Theme {
(theme_dir.join("highlight.js"), &mut theme.highlight_js),
(theme_dir.join("clipboard.min.js"), &mut theme.clipboard_js),
(theme_dir.join("highlight.css"), &mut theme.highlight_css),
(theme_dir.join("tomorrow-night.css"), &mut theme.tomorrow_night_css),
(theme_dir.join("ayu-highlight.css"), &mut theme.ayu_highlight_css),
(
theme_dir.join("tomorrow-night.css"),
&mut theme.tomorrow_night_css,
),
(
theme_dir.join("ayu-highlight.css"),
&mut theme.ayu_highlight_css,
),
];
for (filename, dest) in files {
@ -130,12 +135,11 @@ fn load_file_contents<P: AsRef<Path>>(filename: P, dest: &mut Vec<u8>) -> Result
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::Builder as TempFileBuilder;
use std::path::PathBuf;
use tempfile::Builder as TempFileBuilder;
#[test]
fn theme_uses_defaults_with_nonexistent_src_dir() {

View File

@ -25,16 +25,10 @@ pub fn normalize_path(path: &str) -> String {
}
/// Write the given data to a file, creating it first if necessary
pub fn write_file<P: AsRef<Path>>(
build_dir: &Path,
filename: P,
content: &[u8],
) -> Result<()> {
pub fn write_file<P: AsRef<Path>>(build_dir: &Path, filename: P, content: &[u8]) -> Result<()> {
let path = build_dir.join(filename);
create_file(&path)?
.write_all(content)
.map_err(|e| e.into())
create_file(&path)?.write_all(content).map_err(|e| e.into())
}
/// Takes a path and returns a path containing just enough `../` to point to

View File

@ -5,12 +5,13 @@ mod string;
use errors::Error;
use regex::Regex;
use pulldown_cmark::{html, Event, Options, Parser, Tag, OPTION_ENABLE_FOOTNOTES,
OPTION_ENABLE_TABLES};
use pulldown_cmark::{
html, Event, Options, Parser, Tag, OPTION_ENABLE_FOOTNOTES, OPTION_ENABLE_TABLES,
};
use std::borrow::Cow;
pub use self::string::{RangeArgument, take_lines};
pub use self::string::{take_lines, RangeArgument};
/// Replaces multiple consecutive whitespace characters with a single space character.
pub fn collapse_whitespace<'a>(text: &'a str) -> Cow<'a, str> {
@ -35,7 +36,10 @@ pub fn normalize_id(content: &str) -> String {
})
.collect::<String>();
// Ensure that the first character is [A-Za-z]
if ret.chars().next().map_or(false, |c| !c.is_ascii_alphabetic()) {
if ret.chars()
.next()
.map_or(false, |c| !c.is_ascii_alphabetic())
{
ret.insert(0, 'a');
}
ret
@ -47,17 +51,19 @@ pub fn id_from_content(content: &str) -> String {
let mut content = content.to_string();
// Skip any tags or html-encoded stuff
const REPL_SUB: &[&str] = &["<em>",
"</em>",
"<code>",
"</code>",
"<strong>",
"</strong>",
"&lt;",
"&gt;",
"&amp;",
"&#39;",
"&quot;"];
const REPL_SUB: &[&str] = &[
"<em>",
"</em>",
"<code>",
"</code>",
"<strong>",
"</strong>",
"&lt;",
"&gt;",
"&amp;",
"&#39;",
"&quot;",
];
for sub in REPL_SUB {
content = content.replace(sub, "");
}
@ -69,7 +75,6 @@ pub fn id_from_content(content: &str) -> String {
}
fn adjust_links(event: Event) -> Event {
lazy_static! {
static ref HTTP_LINK: Regex = Regex::new("^https?://").unwrap();
static ref MD_LINK: Regex = Regex::new("(?P<link>.*).md(?P<anchor>#.*)?").unwrap();
@ -79,22 +84,20 @@ fn adjust_links(event: Event) -> Event {
Event::Start(Tag::Link(dest, title)) => {
if !HTTP_LINK.is_match(&dest) {
if let Some(caps) = MD_LINK.captures(&dest) {
let mut html_link = [&caps["link"], ".html"].concat();
if let Some(anchor) = caps.name("anchor") {
html_link.push_str(anchor.as_str());
}
return Event::Start(Tag::Link(Cow::from(html_link), title))
return Event::Start(Tag::Link(Cow::from(html_link), title));
}
}
Event::Start(Tag::Link(dest, title))
},
_ => event
}
_ => event,
}
}
/// Wrapper around the pulldown-cmark parser for rendering markdown to HTML.
@ -108,8 +111,8 @@ pub fn render_markdown(text: &str, curly_quotes: bool) -> String {
let p = Parser::new_ext(text, opts);
let mut converter = EventQuoteConverter::new(curly_quotes);
let events = p.map(clean_codeblock_headers)
.map(adjust_links)
.map(|event| converter.convert(event));
.map(adjust_links)
.map(|event| converter.convert(event));
html::push_html(&mut s, events);
s
@ -161,36 +164,36 @@ fn clean_codeblock_headers(event: Event) -> Event {
}
}
fn convert_quotes_to_curly(original_text: &str) -> String {
// We'll consider the start to be "whitespace".
let mut preceded_by_whitespace = true;
original_text.chars()
.map(|original_char| {
let converted_char = match original_char {
'\'' => {
if preceded_by_whitespace {
''
} else {
''
original_text
.chars()
.map(|original_char| {
let converted_char = match original_char {
'\'' => {
if preceded_by_whitespace {
''
} else {
''
}
}
}
'"' => {
if preceded_by_whitespace {
'“'
} else {
'”'
'"' => {
if preceded_by_whitespace {
'“'
} else {
'”'
}
}
}
_ => original_char,
};
_ => original_char,
};
preceded_by_whitespace = original_char.is_whitespace();
preceded_by_whitespace = original_char.is_whitespace();
converted_char
})
.collect()
converted_char
})
.collect()
}
/// Prints a "backtrace" of some `Error`.
@ -209,13 +212,22 @@ mod tests {
#[test]
fn preserves_external_links() {
assert_eq!(render_markdown("[example](https://www.rust-lang.org/)", false), "<p><a href=\"https://www.rust-lang.org/\">example</a></p>\n");
assert_eq!(
render_markdown("[example](https://www.rust-lang.org/)", false),
"<p><a href=\"https://www.rust-lang.org/\">example</a></p>\n"
);
}
#[test]
fn it_can_adjust_markdown_links() {
assert_eq!(render_markdown("[example](example.md)", false), "<p><a href=\"example.html\">example</a></p>\n");
assert_eq!(render_markdown("[example_anchor](example.md#anchor)", false), "<p><a href=\"example.html#anchor\">example_anchor</a></p>\n");
assert_eq!(
render_markdown("[example](example.md)", false),
"<p><a href=\"example.html\">example</a></p>\n"
);
assert_eq!(
render_markdown("[example_anchor](example.md#anchor)", false),
"<p><a href=\"example.html#anchor\">example_anchor</a></p>\n"
);
}
#[test]
@ -316,18 +328,26 @@ more text with spaces
#[test]
fn it_generates_anchors() {
assert_eq!(id_from_content("## `--passes`: add more rustdoc passes"),
"a--passes-add-more-rustdoc-passes");
assert_eq!(id_from_content("## Method-call expressions"),
"method-call-expressions");
assert_eq!(
id_from_content("## `--passes`: add more rustdoc passes"),
"a--passes-add-more-rustdoc-passes"
);
assert_eq!(
id_from_content("## Method-call expressions"),
"method-call-expressions"
);
}
#[test]
fn it_normalizes_ids() {
assert_eq!(normalize_id("`--passes`: add more rustdoc passes"),
"a--passes-add-more-rustdoc-passes");
assert_eq!(normalize_id("Method-call 🐙 expressions \u{1f47c}"),
"method-call--expressions-");
assert_eq!(
normalize_id("`--passes`: add more rustdoc passes"),
"a--passes-add-more-rustdoc-passes"
);
assert_eq!(
normalize_id("Method-call 🐙 expressions \u{1f47c}"),
"method-call--expressions-"
);
assert_eq!(normalize_id("_-_12345"), "a_-_12345");
assert_eq!(normalize_id("12345"), "a12345");
assert_eq!(normalize_id(""), "");
@ -339,14 +359,18 @@ more text with spaces
#[test]
fn it_converts_single_quotes() {
assert_eq!(convert_quotes_to_curly("'one', 'two'"),
"one, two");
assert_eq!(
convert_quotes_to_curly("'one', 'two'"),
"one, two"
);
}
#[test]
fn it_converts_double_quotes() {
assert_eq!(convert_quotes_to_curly(r#""one", "two""#),
"“one”, “two”");
assert_eq!(
convert_quotes_to_curly(r#""one", "two""#),
"“one”, “two”"
);
}
#[test]

View File

@ -1,5 +1,5 @@
use std::ops::{Range, RangeFrom, RangeFull, RangeTo};
use itertools::Itertools;
use std::ops::{Range, RangeFrom, RangeFull, RangeTo};
// This trait is already contained in the standard lib, however it is unstable.
// TODO: Remove when the `collections_range` feature stabilises

View File

@ -3,11 +3,11 @@
extern crate mdbook;
extern crate tempfile;
#[cfg(not(windows))]
use std::path::Path;
use tempfile::{TempDir, Builder as TempFileBuilder};
use mdbook::config::Config;
use mdbook::MDBook;
#[cfg(not(windows))]
use std::path::Path;
use tempfile::{Builder as TempFileBuilder, TempDir};
#[test]
fn passing_alternate_backend() {
@ -52,8 +52,8 @@ fn tee_command<P: AsRef<Path>>(out_file: P) -> String {
#[test]
#[cfg(not(windows))]
fn backends_receive_render_context_via_stdin() {
use std::fs::File;
use mdbook::renderer::RenderContext;
use std::fs::File;
let temp = TempFileBuilder::new().prefix("output").tempdir().unwrap();
let out_file = temp.path().join("out.txt");

View File

@ -7,19 +7,18 @@ extern crate mdbook;
extern crate tempfile;
extern crate walkdir;
use std::path::Path;
use std::fs::{self, File};
use std::io::{Read, Write};
use mdbook::errors::*;
use mdbook::utils::fs::file_to_string;
use std::fs::{self, File};
use std::io::{Read, Write};
use std::path::Path;
// The funny `self::` here is because we've got an `extern crate ...` and are
// in a submodule
use self::tempfile::{TempDir, Builder as TempFileBuilder};
use self::mdbook::MDBook;
use self::tempfile::{Builder as TempFileBuilder, TempDir};
use self::walkdir::WalkDir;
/// Create a dummy book in a temporary directory, using the contents of
/// `SUMMARY_MD` as a guide.
///
@ -47,13 +46,16 @@ impl DummyBook {
/// Write a book to a temporary directory using the provided settings.
pub fn build(&self) -> Result<TempDir> {
let temp = TempFileBuilder::new().prefix("dummy_book").tempdir().chain_err(|| "Unable to create temp directory")?;
let temp = TempFileBuilder::new()
.prefix("dummy_book")
.tempdir()
.chain_err(|| "Unable to create temp directory")?;
let dummy_book_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/dummy_book");
recursive_copy(&dummy_book_root, temp.path()).chain_err(|| {
"Couldn't copy files into a \
temporary directory"
})?;
"Couldn't copy files into a \
temporary directory"
})?;
let sub_pattern = if self.passing_test { "true" } else { "false" };
let file_containing_test = temp.path().join("src/first/nested.md");
@ -77,11 +79,13 @@ pub fn assert_contains_strings<P: AsRef<Path>>(filename: P, strings: &[&str]) {
let content = file_to_string(filename).expect("Couldn't read the file's contents");
for s in strings {
assert!(content.contains(s),
"Searching for {:?} in {}\n\n{}",
s,
filename.display(),
content);
assert!(
content.contains(s),
"Searching for {:?} in {}\n\n{}",
s,
filename.display(),
content
);
}
}
@ -90,15 +94,16 @@ pub fn assert_doesnt_contain_strings<P: AsRef<Path>>(filename: P, strings: &[&st
let content = file_to_string(filename).expect("Couldn't read the file's contents");
for s in strings {
assert!(!content.contains(s),
"Found {:?} in {}\n\n{}",
s,
filename.display(),
content);
assert!(
!content.contains(s),
"Found {:?} in {}\n\n{}",
s,
filename.display(),
content
);
}
}
/// Recursively copy an entire directory tree to somewhere else (a la `cp -r`).
fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()> {
let from = from.as_ref();
@ -108,9 +113,9 @@ fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()>
let entry = entry.chain_err(|| "Unable to inspect directory entry")?;
let original_location = entry.path();
let relative = original_location.strip_prefix(&from)
.expect("`original_location` is inside the `from` \
directory");
let relative = original_location
.strip_prefix(&from)
.expect("`original_location` is inside the `from` directory");
let new_location = to.join(relative);
if original_location.is_file() {
@ -118,9 +123,8 @@ fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()>
fs::create_dir_all(parent).chain_err(|| "Couldn't create directory")?;
}
fs::copy(&original_location, &new_location).chain_err(|| {
"Unable to copy file contents"
})?;
fs::copy(&original_location, &new_location)
.chain_err(|| "Unable to copy file contents")?;
}
}
@ -129,7 +133,7 @@ fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()>
pub fn new_copy_of_example_book() -> Result<TempDir> {
let temp = TempFileBuilder::new().prefix("book-example").tempdir()?;
let book_example = Path::new(env!("CARGO_MANIFEST_DIR")).join("book-example");
recursive_copy(book_example, temp.path())?;

View File

@ -1,13 +1,12 @@
extern crate mdbook;
extern crate tempfile;
use std::path::PathBuf;
use std::fs;
use mdbook::MDBook;
use mdbook::config::Config;
use mdbook::MDBook;
use std::fs;
use std::path::PathBuf;
use tempfile::Builder as TempFileBuilder;
/// Run `mdbook init` in an empty directory and make sure the default files
/// are created.
#[test]

View File

@ -5,10 +5,10 @@ extern crate env_logger;
extern crate error_chain;
extern crate mdbook;
use mdbook::book;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use mdbook::book;
macro_rules! summary_md_test {
($name:ident, $filename:expr) => {

View File

@ -9,18 +9,18 @@ mod dummy_book;
use dummy_book::{assert_contains_strings, assert_doesnt_contain_strings, DummyBook};
use mdbook::config::Config;
use mdbook::errors::*;
use mdbook::utils::fs::{file_to_string, write_file};
use mdbook::MDBook;
use select::document::Document;
use select::predicate::{Class, Name, Predicate};
use std::ffi::OsStr;
use std::fs;
use std::io::Write;
use std::path::Path;
use std::ffi::OsStr;
use walkdir::{DirEntry, WalkDir};
use select::document::Document;
use select::predicate::{Class, Name, Predicate};
use tempfile::Builder as TempFileBuilder;
use mdbook::errors::*;
use mdbook::utils::fs::{file_to_string, write_file};
use mdbook::config::Config;
use mdbook::MDBook;
use walkdir::{DirEntry, WalkDir};
const BOOK_ROOT: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/dummy_book");
const TOC_TOP_LEVEL: &[&'static str] = &[
@ -29,7 +29,8 @@ const TOC_TOP_LEVEL: &[&'static str] = &[
"Conclusion",
"Introduction",
];
const TOC_SECOND_LEVEL: &[&'static str] = &["1.1. Nested Chapter", "1.2. Includes", "1.3. Recursive"];
const TOC_SECOND_LEVEL: &[&'static str] =
&["1.1. Nested Chapter", "1.2. Includes", "1.3. Recursive"];
/// Make sure you can load the dummy book and build it without panicking.
#[test]
@ -98,16 +99,12 @@ fn check_correct_cross_links_in_nested_dir() {
assert_contains_strings(
first.join("index.html"),
&[
r##"href="#some-section" id="some-section""##,
],
&[r##"href="#some-section" id="some-section""##],
);
assert_contains_strings(
first.join("nested.html"),
&[
r##"href="#some-section" id="some-section""##,
],
&[r##"href="#some-section" id="some-section""##],
);
}
@ -357,14 +354,12 @@ fn book_with_a_reserved_filename_does_not_build() {
fn by_default_mdbook_use_index_preprocessor_to_convert_readme_to_index() {
let temp = DummyBook::new().build().unwrap();
let mut cfg = Config::default();
cfg.set("book.src", "src2").expect("Couldn't set config.book.src to \"src2\".");
cfg.set("book.src", "src2")
.expect("Couldn't set config.book.src to \"src2\".");
let md = MDBook::load_with_config(temp.path(), cfg).unwrap();
md.build().unwrap();
let first_index = temp.path()
.join("book")
.join("first")
.join("index.html");
let first_index = temp.path().join("book").join("first").join("index.html");
let expected_strings = vec![
r#"href="../first/index.html""#,
r#"href="../second/index.html""#,
@ -373,13 +368,8 @@ fn by_default_mdbook_use_index_preprocessor_to_convert_readme_to_index() {
assert_contains_strings(&first_index, &expected_strings);
assert_doesnt_contain_strings(&first_index, &vec!["README.html"]);
let second_index = temp.path()
.join("book")
.join("second")
.join("index.html");
let unexpected_strings = vec![
"Second README",
];
let second_index = temp.path().join("book").join("second").join("index.html");
let unexpected_strings = vec!["Second README"];
assert_doesnt_contain_strings(&second_index, &unexpected_strings);
}
@ -404,11 +394,11 @@ fn theme_dir_overrides_work_correctly() {
#[cfg(feature = "search")]
mod search {
extern crate serde_json;
use std::fs::File;
use std::path::Path;
use dummy_book::DummyBook;
use mdbook::utils::fs::file_to_string;
use mdbook::MDBook;
use dummy_book::DummyBook;
use std::fs::File;
use std::path::Path;
fn read_book_index(root: &Path) -> serde_json::Value {
let index = root.join("book/searchindex.js");
@ -427,12 +417,8 @@ mod search {
let index = read_book_index(temp.path());
let doc_urls = index["doc_urls"].as_array().unwrap();
let get_doc_ref = |url: &str| -> String {
doc_urls.iter()
.position(|s| s == url)
.unwrap()
.to_string()
};
let get_doc_ref =
|url: &str| -> String { doc_urls.iter().position(|s| s == url).unwrap().to_string() };
let first_chapter = get_doc_ref("first/index.html#first-chapter");
let introduction = get_doc_ref("intro.html#introduction");
@ -453,14 +439,8 @@ mod search {
docs[&summary]["body"],
"Introduction First Chapter Nested Chapter Includes Recursive Second Chapter Conclusion"
);
assert_eq!(
docs[&summary]["breadcrumbs"],
"First Chapter » Summary"
);
assert_eq!(
docs[&conclusion]["body"],
"I put &lt;HTML&gt; in here!"
);
assert_eq!(docs[&summary]["breadcrumbs"], "First Chapter » Summary");
assert_eq!(docs[&conclusion]["body"], "I put &lt;HTML&gt; in here!");
}
// Setting this to `true` may cause issues with `cargo watch`,

View File

@ -4,11 +4,11 @@ mod dummy_book;
use dummy_book::DummyBook;
use mdbook::MDBook;
use mdbook::preprocess::{Preprocessor, PreprocessorContext};
use mdbook::book::Book;
use mdbook::config::Config;
use mdbook::errors::*;
use mdbook::preprocess::{Preprocessor, PreprocessorContext};
use mdbook::MDBook;
use std::sync::{Arc, Mutex};
@ -30,7 +30,6 @@ fn mdbook_detects_book_with_failing_tests() {
#[test]
fn mdbook_runs_preprocessors() {
let has_run: Arc<Mutex<bool>> = Arc::new(Mutex::new(false));
struct DummyPreprocessor(Arc<Mutex<bool>>);