Merge pull request #690 from mattico/rustfmt

Remove rustfmt.toml
This commit is contained in:
Matt Ickstadt 2018-07-23 12:47:33 -05:00 committed by GitHub
commit 0ac36f2183
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 428 additions and 419 deletions

View File

@ -24,7 +24,6 @@ mod execs {
} }
} }
error_chain!{ error_chain!{
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
@ -32,23 +31,25 @@ error_chain!{
} }
fn program_exists(program: &str) -> Result<()> { fn program_exists(program: &str) -> Result<()> {
execs::cmd(program).arg("-v") execs::cmd(program)
.output() .arg("-v")
.chain_err(|| format!("Please install '{}'!", program))?; .output()
.chain_err(|| format!("Please install '{}'!", program))?;
Ok(()) Ok(())
} }
fn npm_package_exists(package: &str) -> Result<()> { fn npm_package_exists(package: &str) -> Result<()> {
let status = execs::cmd("npm").args(&["list", "-g"]) let status = execs::cmd("npm")
.arg(package) .args(&["list", "-g"])
.output(); .arg(package)
.output();
match status { match status {
Ok(ref out) if out.status.success() => Ok(()), Ok(ref out) if out.status.success() => Ok(()),
_ => { _ => bail!(
bail!("Missing npm package '{0}' install with: 'npm -g install {0}'", "Missing npm package '{0}' install with: 'npm -g install {0}'",
package) package
} ),
} }
} }
@ -81,13 +82,14 @@ fn run() -> Result<()> {
let theme_dir = Path::new(&manifest_dir).join("src/theme/"); let theme_dir = Path::new(&manifest_dir).join("src/theme/");
let stylus_dir = theme_dir.join("stylus/book.styl"); let stylus_dir = theme_dir.join("stylus/book.styl");
if !execs::cmd("stylus").arg(stylus_dir) if !execs::cmd("stylus")
.arg("--out") .arg(stylus_dir)
.arg(theme_dir) .arg("--out")
.arg("--use") .arg(theme_dir)
.arg("nib") .arg("--use")
.status()? .arg("nib")
.success() .status()?
.success()
{ {
bail!("Stylus encountered an error"); bail!("Stylus encountered an error");
} }

View File

@ -3,15 +3,15 @@ extern crate mdbook;
extern crate pulldown_cmark; extern crate pulldown_cmark;
extern crate pulldown_cmark_to_cmark; extern crate pulldown_cmark_to_cmark;
use mdbook::errors::{Error, Result};
use mdbook::MDBook;
use mdbook::book::{Book, BookItem, Chapter}; use mdbook::book::{Book, BookItem, Chapter};
use mdbook::errors::{Error, Result};
use mdbook::preprocess::{Preprocessor, PreprocessorContext}; use mdbook::preprocess::{Preprocessor, PreprocessorContext};
use mdbook::MDBook;
use pulldown_cmark::{Event, Parser, Tag}; use pulldown_cmark::{Event, Parser, Tag};
use pulldown_cmark_to_cmark::fmt::cmark; use pulldown_cmark_to_cmark::fmt::cmark;
use std::ffi::OsString;
use std::env::{args, args_os}; use std::env::{args, args_os};
use std::ffi::OsString;
use std::process; use std::process;
struct Deemphasize; struct Deemphasize;

View File

@ -1 +0,0 @@
format_strings = true

View File

@ -1,7 +1,7 @@
use std::path::PathBuf;
use clap::{App, ArgMatches, SubCommand}; use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::errors::Result; use mdbook::errors::Result;
use mdbook::MDBook;
use std::path::PathBuf;
use {get_book_dir, open}; use {get_book_dir, open};
// Create clap subcommand arguments // Create clap subcommand arguments

View File

@ -1,9 +1,9 @@
use clap::{App, ArgMatches, SubCommand};
use get_book_dir;
use mdbook::errors::*;
use mdbook::MDBook;
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::errors::*;
use get_book_dir;
// Create clap subcommand arguments // Create clap subcommand arguments
pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> { pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {

View File

@ -1,11 +1,11 @@
use clap::{App, ArgMatches, SubCommand};
use get_book_dir;
use mdbook::config;
use mdbook::errors::Result;
use mdbook::MDBook;
use std::io; use std::io;
use std::io::Write; use std::io::Write;
use std::process::Command; use std::process::Command;
use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::errors::Result;
use mdbook::config;
use get_book_dir;
// Create clap subcommand arguments // Create clap subcommand arguments
pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> { pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {

View File

@ -8,22 +8,22 @@ extern crate log;
extern crate mdbook; extern crate mdbook;
extern crate open; extern crate open;
use chrono::Local;
use clap::{App, AppSettings, ArgMatches};
use env_logger::Builder;
use log::LevelFilter;
use mdbook::utils;
use std::env; use std::env;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::io::Write; use std::io::Write;
use clap::{App, AppSettings, ArgMatches}; use std::path::{Path, PathBuf};
use chrono::Local;
use log::LevelFilter;
use env_logger::Builder;
use mdbook::utils;
pub mod build; pub mod build;
pub mod clean; pub mod clean;
pub mod init; pub mod init;
pub mod test;
#[cfg(feature = "serve")] #[cfg(feature = "serve")]
pub mod serve; pub mod serve;
pub mod test;
#[cfg(feature = "watch")] #[cfg(feature = "watch")]
pub mod watch; pub mod watch;

View File

@ -2,16 +2,17 @@ extern crate iron;
extern crate staticfile; extern crate staticfile;
extern crate ws; extern crate ws;
use std; use self::iron::{
use self::iron::{status, AfterMiddleware, Chain, Iron, IronError, IronResult, Request, Response, status, AfterMiddleware, Chain, Iron, IronError, IronResult, Request, Response, Set,
Set}; };
use clap::{App, ArgMatches, SubCommand}; use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::utils;
use mdbook::errors::*; use mdbook::errors::*;
use {get_book_dir, open}; use mdbook::utils;
use mdbook::MDBook;
use std;
#[cfg(feature = "watch")] #[cfg(feature = "watch")]
use watch; use watch;
use {get_book_dir, open};
struct ErrorRecover; struct ErrorRecover;

View File

@ -1,22 +1,20 @@
use clap::{App, ArgMatches, SubCommand}; use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::errors::Result;
use get_book_dir; use get_book_dir;
use mdbook::errors::Result;
use mdbook::MDBook;
// Create clap subcommand arguments // Create clap subcommand arguments
pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> { pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("test") SubCommand::with_name("test")
.about("Test that code samples compile") .about("Test that code samples compile")
.arg_from_usage( .arg_from_usage("-L, --library-path [DIR]... 'directory to add to crate search path'")
"-L, --library-path [DIR]... 'directory to add to crate search path'",
)
} }
// test command implementation // test command implementation
pub fn execute(args: &ArgMatches) -> Result<()> { pub fn execute(args: &ArgMatches) -> Result<()> {
let library_paths: Vec<&str> = args.values_of("library-path") let library_paths: Vec<&str> = args.values_of("library-path")
.map(|v| v.collect()) .map(|v| v.collect())
.unwrap_or_default(); .unwrap_or_default();
let book_dir = get_book_dir(args); let book_dir = get_book_dir(args);
let mut book = MDBook::load(&book_dir)?; let mut book = MDBook::load(&book_dir)?;

View File

@ -1,13 +1,13 @@
extern crate notify; extern crate notify;
use std::path::Path;
use self::notify::Watcher; use self::notify::Watcher;
use std::time::Duration;
use std::sync::mpsc::channel;
use clap::{App, ArgMatches, SubCommand}; use clap::{App, ArgMatches, SubCommand};
use mdbook::MDBook;
use mdbook::utils;
use mdbook::errors::Result; use mdbook::errors::Result;
use mdbook::utils;
use mdbook::MDBook;
use std::path::Path;
use std::sync::mpsc::channel;
use std::time::Duration;
use {get_book_dir, open}; use {get_book_dir, open};
// Create clap subcommand arguments // Create clap subcommand arguments
@ -48,8 +48,8 @@ pub fn trigger_on_change<F>(book: &MDBook, closure: F)
where where
F: Fn(&Path, &Path), F: Fn(&Path, &Path),
{ {
use self::notify::RecursiveMode::*;
use self::notify::DebouncedEvent::*; use self::notify::DebouncedEvent::*;
use self::notify::RecursiveMode::*;
// Create a channel to receive the events. // Create a channel to receive the events.
let (tx, rx) = channel(); let (tx, rx) = channel();

View File

@ -1,8 +1,8 @@
use std::fmt::{self, Display, Formatter};
use std::path::{Path, PathBuf};
use std::collections::VecDeque; use std::collections::VecDeque;
use std::fmt::{self, Display, Formatter};
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use super::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem}; use super::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem};
use config::BuildConfig; use config::BuildConfig;
@ -297,8 +297,8 @@ impl Display for Chapter {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use tempfile::{TempDir, Builder as TempFileBuilder};
use std::io::Write; use std::io::Write;
use tempfile::{Builder as TempFileBuilder, TempDir};
const DUMMY_SRC: &'static str = " const DUMMY_SRC: &'static str = "
# Dummy Chapter # Dummy Chapter
@ -404,14 +404,12 @@ And here is some \
..Default::default() ..Default::default()
}; };
let should_be = Book { let should_be = Book {
sections: vec![ sections: vec![BookItem::Chapter(Chapter {
BookItem::Chapter(Chapter { name: String::from("Chapter 1"),
name: String::from("Chapter 1"), content: String::from(DUMMY_SRC),
content: String::from(DUMMY_SRC), path: PathBuf::from("chapter_1.md"),
path: PathBuf::from("chapter_1.md"), ..Default::default()
..Default::default() })],
}),
],
..Default::default() ..Default::default()
}; };
@ -535,13 +533,11 @@ And here is some \
fn cant_load_chapters_with_an_empty_path() { fn cant_load_chapters_with_an_empty_path() {
let (_, temp) = dummy_link(); let (_, temp) = dummy_link();
let summary = Summary { let summary = Summary {
numbered_chapters: vec![ numbered_chapters: vec![SummaryItem::Link(Link {
SummaryItem::Link(Link { name: String::from("Empty"),
name: String::from("Empty"), location: PathBuf::from(""),
location: PathBuf::from(""), ..Default::default()
..Default::default() })],
}),
],
..Default::default() ..Default::default()
}; };
@ -556,13 +552,11 @@ And here is some \
fs::create_dir(&dir).unwrap(); fs::create_dir(&dir).unwrap();
let summary = Summary { let summary = Summary {
numbered_chapters: vec![ numbered_chapters: vec![SummaryItem::Link(Link {
SummaryItem::Link(Link { name: String::from("nested"),
name: String::from("nested"), location: dir,
location: dir, ..Default::default()
..Default::default() })],
}),
],
..Default::default() ..Default::default()
}; };

View File

@ -1,12 +1,12 @@
use std::fs::{self, File}; use std::fs::{self, File};
use std::path::PathBuf;
use std::io::Write; use std::io::Write;
use std::path::PathBuf;
use toml; use toml;
use config::Config;
use super::MDBook; use super::MDBook;
use theme; use config::Config;
use errors::*; use errors::*;
use theme;
/// A helper for setting up a new book and its directory structure. /// A helper for setting up a new book and its directory structure.
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]

View File

@ -5,29 +5,24 @@
//! //!
//! [1]: ../index.html //! [1]: ../index.html
mod summary;
mod book; mod book;
mod init; mod init;
mod summary;
pub use self::book::{load_book, Book, BookItem, BookItems, Chapter}; pub use self::book::{load_book, Book, BookItem, BookItems, Chapter};
pub use self::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem};
pub use self::init::BookBuilder; pub use self::init::BookBuilder;
pub use self::summary::{parse_summary, Link, SectionNumber, Summary, SummaryItem};
use std::path::PathBuf;
use std::io::Write; use std::io::Write;
use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use tempfile::Builder as TempFileBuilder; use tempfile::Builder as TempFileBuilder;
use toml::Value; use toml::Value;
use utils;
use renderer::{CmdRenderer, HtmlHandlebars, RenderContext, Renderer};
use preprocess::{
LinkPreprocessor,
IndexPreprocessor,
Preprocessor,
PreprocessorContext
};
use errors::*; use errors::*;
use preprocess::{IndexPreprocessor, LinkPreprocessor, Preprocessor, PreprocessorContext};
use renderer::{CmdRenderer, HtmlHandlebars, RenderContext, Renderer};
use utils;
use config::Config; use config::Config;

View File

@ -1,10 +1,10 @@
use errors::*;
use memchr::{self, Memchr};
use pulldown_cmark::{self, Event, Tag};
use std::fmt::{self, Display, Formatter}; use std::fmt::{self, Display, Formatter};
use std::iter::FromIterator; use std::iter::FromIterator;
use std::ops::{Deref, DerefMut}; use std::ops::{Deref, DerefMut};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use memchr::{self, Memchr};
use pulldown_cmark::{self, Event, Tag};
use errors::*;
/// Parse the text from a `SUMMARY.md` file into a sort of "recipe" to be /// Parse the text from a `SUMMARY.md` file into a sort of "recipe" to be
/// used when loading a book from disk. /// used when loading a book from disk.
@ -164,33 +164,34 @@ struct SummaryParser<'a> {
/// use pattern matching and you won't get errors because `take_while()` /// use pattern matching and you won't get errors because `take_while()`
/// moves `$stream` out of self. /// moves `$stream` out of self.
macro_rules! collect_events { macro_rules! collect_events {
($stream:expr, start $delimiter:pat) => { ($stream:expr,start $delimiter:pat) => {
collect_events!($stream, Event::Start($delimiter)) collect_events!($stream, Event::Start($delimiter))
}; };
($stream:expr, end $delimiter:pat) => { ($stream:expr,end $delimiter:pat) => {
collect_events!($stream, Event::End($delimiter)) collect_events!($stream, Event::End($delimiter))
}; };
($stream:expr, $delimiter:pat) => { ($stream:expr, $delimiter:pat) => {{
{ let mut events = Vec::new();
let mut events = Vec::new();
loop { loop {
let event = $stream.next(); let event = $stream.next();
trace!("Next event: {:?}", event); trace!("Next event: {:?}", event);
match event { match event {
Some($delimiter) => break, Some($delimiter) => break,
Some(other) => events.push(other), Some(other) => events.push(other),
None => { None => {
debug!("Reached end of stream without finding the closing pattern, {}", stringify!($delimiter)); debug!(
break; "Reached end of stream without finding the closing pattern, {}",
} stringify!($delimiter)
);
break;
} }
} }
events
} }
}
events
}};
} }
impl<'a> SummaryParser<'a> { impl<'a> SummaryParser<'a> {
@ -659,14 +660,12 @@ mod tests {
name: String::from("First"), name: String::from("First"),
location: PathBuf::from("./first.md"), location: PathBuf::from("./first.md"),
number: Some(SectionNumber(vec![1])), number: Some(SectionNumber(vec![1])),
nested_items: vec![ nested_items: vec![SummaryItem::Link(Link {
SummaryItem::Link(Link { name: String::from("Nested"),
name: String::from("Nested"), location: PathBuf::from("./nested.md"),
location: PathBuf::from("./nested.md"), number: Some(SectionNumber(vec![1, 1])),
number: Some(SectionNumber(vec![1, 1])), nested_items: Vec::new(),
nested_items: Vec::new(), })],
}),
],
}), }),
SummaryItem::Link(Link { SummaryItem::Link(Link {
name: String::from("Second"), name: String::from("Second"),

View File

@ -50,17 +50,17 @@
#![deny(missing_docs)] #![deny(missing_docs)]
use std::path::{Path, PathBuf};
use std::fs::File;
use std::io::Read;
use std::env;
use toml::{self, Value};
use toml::value::Table;
use toml_query::read::TomlValueReadExt;
use toml_query::insert::TomlValueInsertExt;
use toml_query::delete::TomlValueDeleteExt;
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json; use serde_json;
use std::env;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use toml::value::Table;
use toml::{self, Value};
use toml_query::delete::TomlValueDeleteExt;
use toml_query::insert::TomlValueInsertExt;
use toml_query::read::TomlValueReadExt;
use errors::*; use errors::*;
@ -217,9 +217,10 @@ impl Config {
// figure out what try_into() deserializes to. // figure out what try_into() deserializes to.
macro_rules! get_and_insert { macro_rules! get_and_insert {
($table:expr, $key:expr => $out:expr) => { ($table:expr, $key:expr => $out:expr) => {
let got = $table.as_table_mut() let got = $table
.and_then(|t| t.remove($key)) .as_table_mut()
.and_then(|v| v.try_into().ok()); .and_then(|t| t.remove($key))
.and_then(|v| v.try_into().ok());
if let Some(value) = got { if let Some(value) = got {
$out = value; $out = value;
} }

View File

@ -107,17 +107,17 @@ extern crate toml_query;
#[macro_use] #[macro_use]
extern crate pretty_assertions; extern crate pretty_assertions;
pub mod preprocess;
pub mod book; pub mod book;
pub mod config; pub mod config;
pub mod preprocess;
pub mod renderer; pub mod renderer;
pub mod theme; pub mod theme;
pub mod utils; pub mod utils;
pub use book::MDBook;
pub use book::BookItem; pub use book::BookItem;
pub use renderer::Renderer; pub use book::MDBook;
pub use config::Config; pub use config::Config;
pub use renderer::Renderer;
/// The error types used through out this crate. /// The error types used through out this crate.
pub mod errors { pub mod errors {

View File

@ -1,5 +1,5 @@
use std::path::Path;
use regex::Regex; use regex::Regex;
use std::path::Path;
use errors::*; use errors::*;
@ -27,8 +27,7 @@ impl Preprocessor for IndexPreprocessor {
book.for_each_mut(|section: &mut BookItem| { book.for_each_mut(|section: &mut BookItem| {
if let BookItem::Chapter(ref mut ch) = *section { if let BookItem::Chapter(ref mut ch) = *section {
if is_readme_file(&ch.path) { if is_readme_file(&ch.path) {
let index_md = source_dir let index_md = source_dir.join(ch.path.with_file_name("index.md"));
.join(ch.path.with_file_name("index.md"));
if index_md.exists() { if index_md.exists() {
warn_readme_name_conflict(&ch.path, &index_md); warn_readme_name_conflict(&ch.path, &index_md);
} }
@ -45,8 +44,15 @@ impl Preprocessor for IndexPreprocessor {
fn warn_readme_name_conflict<P: AsRef<Path>>(readme_path: P, index_path: P) { fn warn_readme_name_conflict<P: AsRef<Path>>(readme_path: P, index_path: P) {
let file_name = readme_path.as_ref().file_name().unwrap_or_default(); let file_name = readme_path.as_ref().file_name().unwrap_or_default();
let parent_dir = index_path.as_ref().parent().unwrap_or(index_path.as_ref()); let parent_dir = index_path.as_ref().parent().unwrap_or(index_path.as_ref());
warn!("It seems that there are both {:?} and index.md under \"{}\".", file_name, parent_dir.display()); warn!(
warn!("mdbook converts {:?} into index.html by default. It may cause", file_name); "It seems that there are both {:?} and index.md under \"{}\".",
file_name,
parent_dir.display()
);
warn!(
"mdbook converts {:?} into index.html by default. It may cause",
file_name
);
warn!("unexpected behavior if putting both files under the same directory."); warn!("unexpected behavior if putting both files under the same directory.");
warn!("To solve the warning, try to rearrange the book structure or disable"); warn!("To solve the warning, try to rearrange the book structure or disable");
warn!("\"index\" preprocessor to stop the conversion."); warn!("\"index\" preprocessor to stop the conversion.");
@ -60,7 +66,7 @@ fn is_readme_file<P: AsRef<Path>>(path: P) -> bool {
path.as_ref() path.as_ref()
.file_stem() .file_stem()
.and_then(|s| s.to_str()) .and_then(|s| s.to_str())
.unwrap_or_default() .unwrap_or_default(),
) )
} }

View File

@ -1,9 +1,9 @@
use errors::*;
use regex::{CaptureMatches, Captures, Regex};
use std::ops::{Range, RangeFrom, RangeFull, RangeTo}; use std::ops::{Range, RangeFrom, RangeFull, RangeTo};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use regex::{CaptureMatches, Captures, Regex};
use utils::fs::file_to_string; use utils::fs::file_to_string;
use utils::take_lines; use utils::take_lines;
use errors::*;
use super::{Preprocessor, PreprocessorContext}; use super::{Preprocessor, PreprocessorContext};
use book::{Book, BookItem}; use book::{Book, BookItem};
@ -62,12 +62,18 @@ fn replace_all<P: AsRef<Path>>(s: &str, path: P, source: &P, depth: usize) -> St
Ok(new_content) => { Ok(new_content) => {
if depth < MAX_LINK_NESTED_DEPTH { if depth < MAX_LINK_NESTED_DEPTH {
if let Some(rel_path) = playpen.link.relative_path(path) { if let Some(rel_path) = playpen.link.relative_path(path) {
replaced.push_str(&replace_all(&new_content, rel_path, &source.to_path_buf(), depth + 1)); replaced.push_str(&replace_all(
&new_content,
rel_path,
&source.to_path_buf(),
depth + 1,
));
} }
} } else {
else { error!(
error!("Stack depth exceeded in {}. Check for cyclic includes", "Stack depth exceeded in {}. Check for cyclic includes",
source.display()); source.display()
);
} }
previous_end_index = playpen.end_index; previous_end_index = playpen.end_index;
} }
@ -103,7 +109,7 @@ impl<'a> LinkType<'a> {
LinkType::IncludeRangeFrom(p, _) => Some(return_relative_path(base, &p)), LinkType::IncludeRangeFrom(p, _) => Some(return_relative_path(base, &p)),
LinkType::IncludeRangeTo(p, _) => Some(return_relative_path(base, &p)), LinkType::IncludeRangeTo(p, _) => Some(return_relative_path(base, &p)),
LinkType::IncludeRangeFull(p, _) => Some(return_relative_path(base, &p)), LinkType::IncludeRangeFull(p, _) => Some(return_relative_path(base, &p)),
LinkType::Playpen(p,_) => Some(return_relative_path(base, &p)) LinkType::Playpen(p, _) => Some(return_relative_path(base, &p)),
} }
} }
} }
@ -241,15 +247,16 @@ fn find_links(contents: &str) -> LinkIter {
// lazily compute following regex // lazily compute following regex
// r"\\\{\{#.*\}\}|\{\{#([a-zA-Z0-9]+)\s*([a-zA-Z0-9_.\-:/\\\s]+)\}\}")?; // r"\\\{\{#.*\}\}|\{\{#([a-zA-Z0-9]+)\s*([a-zA-Z0-9_.\-:/\\\s]+)\}\}")?;
lazy_static! { lazy_static! {
static ref RE: Regex = Regex::new(r"(?x) # insignificant whitespace mode static ref RE: Regex = Regex::new(
\\\{\{\#.*\}\} # match escaped link r"(?x) # insignificant whitespace mode
| # or \\\{\{\#.*\}\} # match escaped link
\{\{\s* # link opening parens and whitespace | # or
\#([a-zA-Z0-9]+) # link type \{\{\s* # link opening parens and whitespace
\s+ # separating whitespace \#([a-zA-Z0-9]+) # link type
([a-zA-Z0-9\s_.\-:/\\]+) # link target path and space separated properties \s+ # separating whitespace
\s*\}\} # whitespace and link closing parens ([a-zA-Z0-9\s_.\-:/\\]+) # link target path and space separated properties
").unwrap(); \s*\}\} # whitespace and link closing parens"
).unwrap();
} }
LinkIter(RE.captures_iter(contents)) LinkIter(RE.captures_iter(contents))
} }
@ -319,14 +326,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res); println!("\nOUTPUT: {:?}\n", res);
assert_eq!( assert_eq!(
res, res,
vec![ vec![Link {
Link { start_index: 22,
start_index: 22, end_index: 48,
end_index: 48, link: LinkType::IncludeRange(PathBuf::from("file.rs"), 9..20),
link: LinkType::IncludeRange(PathBuf::from("file.rs"), 9..20), link_text: "{{#include file.rs:10:20}}",
link_text: "{{#include file.rs:10:20}}", }]
},
]
); );
} }
@ -337,14 +342,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res); println!("\nOUTPUT: {:?}\n", res);
assert_eq!( assert_eq!(
res, res,
vec![ vec![Link {
Link { start_index: 22,
start_index: 22, end_index: 45,
end_index: 45, link: LinkType::IncludeRange(PathBuf::from("file.rs"), 9..10),
link: LinkType::IncludeRange(PathBuf::from("file.rs"), 9..10), link_text: "{{#include file.rs:10}}",
link_text: "{{#include file.rs:10}}", }]
},
]
); );
} }
@ -355,14 +358,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res); println!("\nOUTPUT: {:?}\n", res);
assert_eq!( assert_eq!(
res, res,
vec![ vec![Link {
Link { start_index: 22,
start_index: 22, end_index: 46,
end_index: 46, link: LinkType::IncludeRangeFrom(PathBuf::from("file.rs"), 9..),
link: LinkType::IncludeRangeFrom(PathBuf::from("file.rs"), 9..), link_text: "{{#include file.rs:10:}}",
link_text: "{{#include file.rs:10:}}", }]
},
]
); );
} }
@ -373,14 +374,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res); println!("\nOUTPUT: {:?}\n", res);
assert_eq!( assert_eq!(
res, res,
vec![ vec![Link {
Link { start_index: 22,
start_index: 22, end_index: 46,
end_index: 46, link: LinkType::IncludeRangeTo(PathBuf::from("file.rs"), ..20),
link: LinkType::IncludeRangeTo(PathBuf::from("file.rs"), ..20), link_text: "{{#include file.rs::20}}",
link_text: "{{#include file.rs::20}}", }]
},
]
); );
} }
@ -391,14 +390,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res); println!("\nOUTPUT: {:?}\n", res);
assert_eq!( assert_eq!(
res, res,
vec![ vec![Link {
Link { start_index: 22,
start_index: 22, end_index: 44,
end_index: 44, link: LinkType::IncludeRangeFull(PathBuf::from("file.rs"), ..),
link: LinkType::IncludeRangeFull(PathBuf::from("file.rs"), ..), link_text: "{{#include file.rs::}}",
link_text: "{{#include file.rs::}}", }]
},
]
); );
} }
@ -409,14 +406,12 @@ mod tests {
println!("\nOUTPUT: {:?}\n", res); println!("\nOUTPUT: {:?}\n", res);
assert_eq!( assert_eq!(
res, res,
vec![ vec![Link {
Link { start_index: 22,
start_index: 22, end_index: 42,
end_index: 42, link: LinkType::IncludeRangeFull(PathBuf::from("file.rs"), ..),
link: LinkType::IncludeRangeFull(PathBuf::from("file.rs"), ..), link_text: "{{#include file.rs}}",
link_text: "{{#include file.rs}}", }]
},
]
); );
} }
@ -429,14 +424,12 @@ mod tests {
assert_eq!( assert_eq!(
res, res,
vec![ vec![Link {
Link { start_index: 38,
start_index: 38, end_index: 68,
end_index: 68, link: LinkType::Escaped,
link: LinkType::Escaped, link_text: "\\{{#playpen file.rs editable}}",
link_text: "\\{{#playpen file.rs editable}}", }]
},
]
); );
} }

View File

@ -1,10 +1,10 @@
//! Book preprocessing. //! Book preprocessing.
pub use self::links::LinkPreprocessor;
pub use self::index::IndexPreprocessor; pub use self::index::IndexPreprocessor;
pub use self::links::LinkPreprocessor;
mod links;
mod index; mod index;
mod links;
use book::Book; use book::Book;
use config::Config; use config::Config;

View File

@ -132,8 +132,11 @@ impl HtmlHandlebars {
) -> Result<()> { ) -> Result<()> {
use utils::fs::write_file; use utils::fs::write_file;
write_file(destination, ".nojekyll", write_file(
b"This file makes sure that Github Pages doesn't process mdBook's output.")?; destination,
".nojekyll",
b"This file makes sure that Github Pages doesn't process mdBook's output.",
)?;
write_file(destination, "book.js", &theme.js)?; write_file(destination, "book.js", &theme.js)?;
write_file(destination, "book.css", &theme.css)?; write_file(destination, "book.css", &theme.css)?;
@ -450,7 +453,10 @@ fn make_data(
if cfg!(feature = "search") { if cfg!(feature = "search") {
let search = search.unwrap_or_default(); let search = search.unwrap_or_default();
data.insert("search_enabled".to_owned(), json!(search.enable)); data.insert("search_enabled".to_owned(), json!(search.enable));
data.insert("search_js".to_owned(), json!(search.enable && search.copy_js)); data.insert(
"search_js".to_owned(),
json!(search.enable && search.copy_js),
);
} else if search.is_some() { } else if search.is_some() {
warn!("mdBook compiled without search support, ignoring `output.html.search` table"); warn!("mdBook compiled without search support, ignoring `output.html.search` table");
warn!( warn!(
@ -513,7 +519,7 @@ fn build_header_links(html: &str) -> String {
fn wrap_header_with_link( fn wrap_header_with_link(
level: usize, level: usize,
content: &str, content: &str,
id_counter: &mut HashMap<String, usize> id_counter: &mut HashMap<String, usize>,
) -> String { ) -> String {
let raw_id = utils::id_from_content(content); let raw_id = utils::id_from_content(content);
@ -534,7 +540,6 @@ fn wrap_header_with_link(
) )
} }
// The rust book uses annotations for rustdoc to test code snippets, // The rust book uses annotations for rustdoc to test code snippets,
// like the following: // like the following:
// ```rust,should_panic // ```rust,should_panic
@ -574,7 +579,8 @@ fn add_playpen_pre(html: &str, playpen_config: &Playpen) -> String {
{ {
// wrap the contents in an external pre block // wrap the contents in an external pre block
if playpen_config.editable && classes.contains("editable") if playpen_config.editable && classes.contains("editable")
|| text.contains("fn main") || text.contains("quick_main!") || text.contains("fn main")
|| text.contains("quick_main!")
{ {
format!("<pre class=\"playpen\">{}</pre>", text) format!("<pre class=\"playpen\">{}</pre>", text)
} else { } else {

View File

@ -1,2 +1,2 @@
pub mod toc;
pub mod navigation; pub mod navigation;
pub mod toc;

View File

@ -1,8 +1,8 @@
use std::path::Path;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::path::Path;
use serde_json;
use handlebars::{Context, Handlebars, Helper, RenderContext, RenderError, Renderable}; use handlebars::{Context, Handlebars, Helper, RenderContext, RenderError, Renderable};
use serde_json;
use utils; use utils;
@ -90,12 +90,14 @@ fn render(
let mut context = BTreeMap::new(); let mut context = BTreeMap::new();
let base_path = rc.evaluate_absolute("path", false)? let base_path = rc.evaluate_absolute("path", false)?
.as_str() .as_str()
.ok_or_else(|| RenderError::new("Type error for `path`, string expected"))? .ok_or_else(|| RenderError::new("Type error for `path`, string expected"))?
.replace("\"", ""); .replace("\"", "");
context.insert("path_to_root".to_owned(), context.insert(
json!(utils::fs::path_to_root(&base_path))); "path_to_root".to_owned(),
json!(utils::fs::path_to_root(&base_path)),
);
chapter chapter
.get("name") .get("name")

View File

@ -1,11 +1,11 @@
use std::path::Path;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::path::Path;
use utils; use utils;
use serde_json;
use handlebars::{Handlebars, Helper, HelperDef, RenderContext, RenderError}; use handlebars::{Handlebars, Helper, HelperDef, RenderContext, RenderError};
use pulldown_cmark::{html, Event, Parser, Tag}; use pulldown_cmark::{html, Event, Parser, Tag};
use serde_json;
// Handlebars helper to construct TOC // Handlebars helper to construct TOC
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -79,7 +79,8 @@ impl HelperDef for RenderToc {
.replace("\\", "/"); .replace("\\", "/");
// Add link // Add link
rc.writer.write_all(&utils::fs::path_to_root(&current).as_bytes())?; rc.writer
.write_all(&utils::fs::path_to_root(&current).as_bytes())?;
rc.writer.write_all(tmp.as_bytes())?; rc.writer.write_all(tmp.as_bytes())?;
rc.writer.write_all(b"\"")?; rc.writer.write_all(b"\"")?;

View File

@ -5,15 +5,15 @@ use std::borrow::Cow;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::path::Path; use std::path::Path;
use self::elasticlunr::Index;
use pulldown_cmark::*; use pulldown_cmark::*;
use serde_json; use serde_json;
use self::elasticlunr::Index;
use book::{Book, BookItem}; use book::{Book, BookItem};
use config::Search; use config::Search;
use errors::*; use errors::*;
use utils;
use theme::searcher; use theme::searcher;
use utils;
/// Creates all files required for search. /// Creates all files required for search.
pub fn create_files(search_config: &Search, destination: &Path, book: &Book) -> Result<()> { pub fn create_files(search_config: &Search, destination: &Path, book: &Book) -> Result<()> {
@ -32,7 +32,11 @@ pub fn create_files(search_config: &Search, destination: &Path, book: &Book) ->
if search_config.copy_js { if search_config.copy_js {
utils::fs::write_file(destination, "searchindex.json", index.as_bytes())?; utils::fs::write_file(destination, "searchindex.json", index.as_bytes())?;
utils::fs::write_file(destination, "searchindex.js", format!("window.search = {};", index).as_bytes())?; utils::fs::write_file(
destination,
"searchindex.js",
format!("window.search = {};", index).as_bytes(),
)?;
utils::fs::write_file(destination, "searcher.js", searcher::JS)?; utils::fs::write_file(destination, "searcher.js", searcher::JS)?;
utils::fs::write_file(destination, "mark.min.js", searcher::MARK_JS)?; utils::fs::write_file(destination, "mark.min.js", searcher::MARK_JS)?;
utils::fs::write_file(destination, "elasticlunr.min.js", searcher::ELASTICLUNR_JS)?; utils::fs::write_file(destination, "elasticlunr.min.js", searcher::ELASTICLUNR_JS)?;
@ -45,8 +49,8 @@ pub fn create_files(search_config: &Search, destination: &Path, book: &Book) ->
/// Uses the given arguments to construct a search document, then inserts it to the given index. /// Uses the given arguments to construct a search document, then inserts it to the given index.
fn add_doc( fn add_doc(
index: &mut Index, index: &mut Index,
doc_urls: &mut Vec<String>, doc_urls: &mut Vec<String>,
anchor_base: &str, anchor_base: &str,
section_id: &Option<String>, section_id: &Option<String>,
items: &[&str], items: &[&str],
) { ) {
@ -166,8 +170,8 @@ fn render_item(
} }
fn write_to_json(index: Index, search_config: &Search, doc_urls: Vec<String>) -> Result<String> { fn write_to_json(index: Index, search_config: &Search, doc_urls: Vec<String>) -> Result<String> {
use std::collections::BTreeMap;
use self::elasticlunr::config::{SearchBool, SearchOptions, SearchOptionsField}; use self::elasticlunr::config::{SearchBool, SearchOptions, SearchOptionsField};
use std::collections::BTreeMap;
#[derive(Serialize)] #[derive(Serialize)]
struct ResultsOptions { struct ResultsOptions {

View File

@ -15,16 +15,16 @@ pub use self::html_handlebars::HtmlHandlebars;
mod html_handlebars; mod html_handlebars;
use serde_json;
use shlex::Shlex;
use std::fs; use std::fs;
use std::io::{self, Read}; use std::io::{self, Read};
use std::path::PathBuf; use std::path::PathBuf;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use serde_json;
use shlex::Shlex;
use errors::*;
use config::Config;
use book::Book; use book::Book;
use config::Config;
use errors::*;
const MDBOOK_VERSION: &str = env!("CARGO_PKG_VERSION"); const MDBOOK_VERSION: &str = env!("CARGO_PKG_VERSION");
@ -162,17 +162,21 @@ impl Renderer for CmdRenderer {
.stdout(Stdio::inherit()) .stdout(Stdio::inherit())
.stderr(Stdio::inherit()) .stderr(Stdio::inherit())
.current_dir(&ctx.destination) .current_dir(&ctx.destination)
.spawn() { .spawn()
Ok(c) => c, {
Err(ref e) if e.kind() == io::ErrorKind::NotFound => { Ok(c) => c,
warn!("The command wasn't found, is the \"{}\" backend installed?", self.name); Err(ref e) if e.kind() == io::ErrorKind::NotFound => {
warn!("\tCommand: {}", self.cmd); warn!(
return Ok(()); "The command wasn't found, is the \"{}\" backend installed?",
} self.name
Err(e) => { );
return Err(e).chain_err(|| "Unable to start the backend")?; warn!("\tCommand: {}", self.cmd);
} return Ok(());
}; }
Err(e) => {
return Err(e).chain_err(|| "Unable to start the backend")?;
}
};
{ {
let mut stdin = child.stdin.take().expect("Child has stdin"); let mut stdin = child.stdin.take().expect("Child has stdin");

View File

@ -5,9 +5,9 @@ pub mod playpen_editor;
#[cfg(feature = "search")] #[cfg(feature = "search")]
pub mod searcher; pub mod searcher;
use std::path::Path;
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
use std::path::Path;
use errors::*; use errors::*;
@ -34,7 +34,6 @@ pub static FONT_AWESOME_WOFF2: &'static [u8] =
include_bytes!("FontAwesome/fonts/fontawesome-webfont.woff2"); include_bytes!("FontAwesome/fonts/fontawesome-webfont.woff2");
pub static FONT_AWESOME_OTF: &'static [u8] = include_bytes!("FontAwesome/fonts/FontAwesome.otf"); pub static FONT_AWESOME_OTF: &'static [u8] = include_bytes!("FontAwesome/fonts/FontAwesome.otf");
/// The `Theme` struct should be used instead of the static variables because /// The `Theme` struct should be used instead of the static variables because
/// the `new()` method will look if the user has a theme directory in their /// the `new()` method will look if the user has a theme directory in their
/// source folder and use the users theme instead of the default. /// source folder and use the users theme instead of the default.
@ -78,8 +77,14 @@ impl Theme {
(theme_dir.join("highlight.js"), &mut theme.highlight_js), (theme_dir.join("highlight.js"), &mut theme.highlight_js),
(theme_dir.join("clipboard.min.js"), &mut theme.clipboard_js), (theme_dir.join("clipboard.min.js"), &mut theme.clipboard_js),
(theme_dir.join("highlight.css"), &mut theme.highlight_css), (theme_dir.join("highlight.css"), &mut theme.highlight_css),
(theme_dir.join("tomorrow-night.css"), &mut theme.tomorrow_night_css), (
(theme_dir.join("ayu-highlight.css"), &mut theme.ayu_highlight_css), theme_dir.join("tomorrow-night.css"),
&mut theme.tomorrow_night_css,
),
(
theme_dir.join("ayu-highlight.css"),
&mut theme.ayu_highlight_css,
),
]; ];
for (filename, dest) in files { for (filename, dest) in files {
@ -130,12 +135,11 @@ fn load_file_contents<P: AsRef<Path>>(filename: P, dest: &mut Vec<u8>) -> Result
Ok(()) Ok(())
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use tempfile::Builder as TempFileBuilder;
use std::path::PathBuf; use std::path::PathBuf;
use tempfile::Builder as TempFileBuilder;
#[test] #[test]
fn theme_uses_defaults_with_nonexistent_src_dir() { fn theme_uses_defaults_with_nonexistent_src_dir() {

View File

@ -25,16 +25,10 @@ pub fn normalize_path(path: &str) -> String {
} }
/// Write the given data to a file, creating it first if necessary /// Write the given data to a file, creating it first if necessary
pub fn write_file<P: AsRef<Path>>( pub fn write_file<P: AsRef<Path>>(build_dir: &Path, filename: P, content: &[u8]) -> Result<()> {
build_dir: &Path,
filename: P,
content: &[u8],
) -> Result<()> {
let path = build_dir.join(filename); let path = build_dir.join(filename);
create_file(&path)? create_file(&path)?.write_all(content).map_err(|e| e.into())
.write_all(content)
.map_err(|e| e.into())
} }
/// Takes a path and returns a path containing just enough `../` to point to /// Takes a path and returns a path containing just enough `../` to point to

View File

@ -5,12 +5,13 @@ mod string;
use errors::Error; use errors::Error;
use regex::Regex; use regex::Regex;
use pulldown_cmark::{html, Event, Options, Parser, Tag, OPTION_ENABLE_FOOTNOTES, use pulldown_cmark::{
OPTION_ENABLE_TABLES}; html, Event, Options, Parser, Tag, OPTION_ENABLE_FOOTNOTES, OPTION_ENABLE_TABLES,
};
use std::borrow::Cow; use std::borrow::Cow;
pub use self::string::{RangeArgument, take_lines}; pub use self::string::{take_lines, RangeArgument};
/// Replaces multiple consecutive whitespace characters with a single space character. /// Replaces multiple consecutive whitespace characters with a single space character.
pub fn collapse_whitespace<'a>(text: &'a str) -> Cow<'a, str> { pub fn collapse_whitespace<'a>(text: &'a str) -> Cow<'a, str> {
@ -35,7 +36,10 @@ pub fn normalize_id(content: &str) -> String {
}) })
.collect::<String>(); .collect::<String>();
// Ensure that the first character is [A-Za-z] // Ensure that the first character is [A-Za-z]
if ret.chars().next().map_or(false, |c| !c.is_ascii_alphabetic()) { if ret.chars()
.next()
.map_or(false, |c| !c.is_ascii_alphabetic())
{
ret.insert(0, 'a'); ret.insert(0, 'a');
} }
ret ret
@ -47,17 +51,19 @@ pub fn id_from_content(content: &str) -> String {
let mut content = content.to_string(); let mut content = content.to_string();
// Skip any tags or html-encoded stuff // Skip any tags or html-encoded stuff
const REPL_SUB: &[&str] = &["<em>", const REPL_SUB: &[&str] = &[
"</em>", "<em>",
"<code>", "</em>",
"</code>", "<code>",
"<strong>", "</code>",
"</strong>", "<strong>",
"&lt;", "</strong>",
"&gt;", "&lt;",
"&amp;", "&gt;",
"&#39;", "&amp;",
"&quot;"]; "&#39;",
"&quot;",
];
for sub in REPL_SUB { for sub in REPL_SUB {
content = content.replace(sub, ""); content = content.replace(sub, "");
} }
@ -69,7 +75,6 @@ pub fn id_from_content(content: &str) -> String {
} }
fn adjust_links(event: Event) -> Event { fn adjust_links(event: Event) -> Event {
lazy_static! { lazy_static! {
static ref HTTP_LINK: Regex = Regex::new("^https?://").unwrap(); static ref HTTP_LINK: Regex = Regex::new("^https?://").unwrap();
static ref MD_LINK: Regex = Regex::new("(?P<link>.*).md(?P<anchor>#.*)?").unwrap(); static ref MD_LINK: Regex = Regex::new("(?P<link>.*).md(?P<anchor>#.*)?").unwrap();
@ -79,22 +84,20 @@ fn adjust_links(event: Event) -> Event {
Event::Start(Tag::Link(dest, title)) => { Event::Start(Tag::Link(dest, title)) => {
if !HTTP_LINK.is_match(&dest) { if !HTTP_LINK.is_match(&dest) {
if let Some(caps) = MD_LINK.captures(&dest) { if let Some(caps) = MD_LINK.captures(&dest) {
let mut html_link = [&caps["link"], ".html"].concat(); let mut html_link = [&caps["link"], ".html"].concat();
if let Some(anchor) = caps.name("anchor") { if let Some(anchor) = caps.name("anchor") {
html_link.push_str(anchor.as_str()); html_link.push_str(anchor.as_str());
} }
return Event::Start(Tag::Link(Cow::from(html_link), title)) return Event::Start(Tag::Link(Cow::from(html_link), title));
} }
} }
Event::Start(Tag::Link(dest, title)) Event::Start(Tag::Link(dest, title))
}, }
_ => event _ => event,
} }
} }
/// Wrapper around the pulldown-cmark parser for rendering markdown to HTML. /// Wrapper around the pulldown-cmark parser for rendering markdown to HTML.
@ -108,8 +111,8 @@ pub fn render_markdown(text: &str, curly_quotes: bool) -> String {
let p = Parser::new_ext(text, opts); let p = Parser::new_ext(text, opts);
let mut converter = EventQuoteConverter::new(curly_quotes); let mut converter = EventQuoteConverter::new(curly_quotes);
let events = p.map(clean_codeblock_headers) let events = p.map(clean_codeblock_headers)
.map(adjust_links) .map(adjust_links)
.map(|event| converter.convert(event)); .map(|event| converter.convert(event));
html::push_html(&mut s, events); html::push_html(&mut s, events);
s s
@ -161,36 +164,36 @@ fn clean_codeblock_headers(event: Event) -> Event {
} }
} }
fn convert_quotes_to_curly(original_text: &str) -> String { fn convert_quotes_to_curly(original_text: &str) -> String {
// We'll consider the start to be "whitespace". // We'll consider the start to be "whitespace".
let mut preceded_by_whitespace = true; let mut preceded_by_whitespace = true;
original_text.chars() original_text
.map(|original_char| { .chars()
let converted_char = match original_char { .map(|original_char| {
'\'' => { let converted_char = match original_char {
if preceded_by_whitespace { '\'' => {
'' if preceded_by_whitespace {
} else { ''
'' } else {
''
}
} }
} '"' => {
'"' => { if preceded_by_whitespace {
if preceded_by_whitespace { '“'
'“' } else {
} else { '”'
'”' }
} }
} _ => original_char,
_ => original_char, };
};
preceded_by_whitespace = original_char.is_whitespace(); preceded_by_whitespace = original_char.is_whitespace();
converted_char converted_char
}) })
.collect() .collect()
} }
/// Prints a "backtrace" of some `Error`. /// Prints a "backtrace" of some `Error`.
@ -209,13 +212,22 @@ mod tests {
#[test] #[test]
fn preserves_external_links() { fn preserves_external_links() {
assert_eq!(render_markdown("[example](https://www.rust-lang.org/)", false), "<p><a href=\"https://www.rust-lang.org/\">example</a></p>\n"); assert_eq!(
render_markdown("[example](https://www.rust-lang.org/)", false),
"<p><a href=\"https://www.rust-lang.org/\">example</a></p>\n"
);
} }
#[test] #[test]
fn it_can_adjust_markdown_links() { fn it_can_adjust_markdown_links() {
assert_eq!(render_markdown("[example](example.md)", false), "<p><a href=\"example.html\">example</a></p>\n"); assert_eq!(
assert_eq!(render_markdown("[example_anchor](example.md#anchor)", false), "<p><a href=\"example.html#anchor\">example_anchor</a></p>\n"); render_markdown("[example](example.md)", false),
"<p><a href=\"example.html\">example</a></p>\n"
);
assert_eq!(
render_markdown("[example_anchor](example.md#anchor)", false),
"<p><a href=\"example.html#anchor\">example_anchor</a></p>\n"
);
} }
#[test] #[test]
@ -316,18 +328,26 @@ more text with spaces
#[test] #[test]
fn it_generates_anchors() { fn it_generates_anchors() {
assert_eq!(id_from_content("## `--passes`: add more rustdoc passes"), assert_eq!(
"a--passes-add-more-rustdoc-passes"); id_from_content("## `--passes`: add more rustdoc passes"),
assert_eq!(id_from_content("## Method-call expressions"), "a--passes-add-more-rustdoc-passes"
"method-call-expressions"); );
assert_eq!(
id_from_content("## Method-call expressions"),
"method-call-expressions"
);
} }
#[test] #[test]
fn it_normalizes_ids() { fn it_normalizes_ids() {
assert_eq!(normalize_id("`--passes`: add more rustdoc passes"), assert_eq!(
"a--passes-add-more-rustdoc-passes"); normalize_id("`--passes`: add more rustdoc passes"),
assert_eq!(normalize_id("Method-call 🐙 expressions \u{1f47c}"), "a--passes-add-more-rustdoc-passes"
"method-call--expressions-"); );
assert_eq!(
normalize_id("Method-call 🐙 expressions \u{1f47c}"),
"method-call--expressions-"
);
assert_eq!(normalize_id("_-_12345"), "a_-_12345"); assert_eq!(normalize_id("_-_12345"), "a_-_12345");
assert_eq!(normalize_id("12345"), "a12345"); assert_eq!(normalize_id("12345"), "a12345");
assert_eq!(normalize_id(""), ""); assert_eq!(normalize_id(""), "");
@ -339,14 +359,18 @@ more text with spaces
#[test] #[test]
fn it_converts_single_quotes() { fn it_converts_single_quotes() {
assert_eq!(convert_quotes_to_curly("'one', 'two'"), assert_eq!(
"one, two"); convert_quotes_to_curly("'one', 'two'"),
"one, two"
);
} }
#[test] #[test]
fn it_converts_double_quotes() { fn it_converts_double_quotes() {
assert_eq!(convert_quotes_to_curly(r#""one", "two""#), assert_eq!(
"“one”, “two”"); convert_quotes_to_curly(r#""one", "two""#),
"“one”, “two”"
);
} }
#[test] #[test]

View File

@ -1,5 +1,5 @@
use std::ops::{Range, RangeFrom, RangeFull, RangeTo};
use itertools::Itertools; use itertools::Itertools;
use std::ops::{Range, RangeFrom, RangeFull, RangeTo};
// This trait is already contained in the standard lib, however it is unstable. // This trait is already contained in the standard lib, however it is unstable.
// TODO: Remove when the `collections_range` feature stabilises // TODO: Remove when the `collections_range` feature stabilises

View File

@ -3,11 +3,11 @@
extern crate mdbook; extern crate mdbook;
extern crate tempfile; extern crate tempfile;
#[cfg(not(windows))]
use std::path::Path;
use tempfile::{TempDir, Builder as TempFileBuilder};
use mdbook::config::Config; use mdbook::config::Config;
use mdbook::MDBook; use mdbook::MDBook;
#[cfg(not(windows))]
use std::path::Path;
use tempfile::{Builder as TempFileBuilder, TempDir};
#[test] #[test]
fn passing_alternate_backend() { fn passing_alternate_backend() {
@ -52,8 +52,8 @@ fn tee_command<P: AsRef<Path>>(out_file: P) -> String {
#[test] #[test]
#[cfg(not(windows))] #[cfg(not(windows))]
fn backends_receive_render_context_via_stdin() { fn backends_receive_render_context_via_stdin() {
use std::fs::File;
use mdbook::renderer::RenderContext; use mdbook::renderer::RenderContext;
use std::fs::File;
let temp = TempFileBuilder::new().prefix("output").tempdir().unwrap(); let temp = TempFileBuilder::new().prefix("output").tempdir().unwrap();
let out_file = temp.path().join("out.txt"); let out_file = temp.path().join("out.txt");

View File

@ -7,19 +7,18 @@ extern crate mdbook;
extern crate tempfile; extern crate tempfile;
extern crate walkdir; extern crate walkdir;
use std::path::Path;
use std::fs::{self, File};
use std::io::{Read, Write};
use mdbook::errors::*; use mdbook::errors::*;
use mdbook::utils::fs::file_to_string; use mdbook::utils::fs::file_to_string;
use std::fs::{self, File};
use std::io::{Read, Write};
use std::path::Path;
// The funny `self::` here is because we've got an `extern crate ...` and are // The funny `self::` here is because we've got an `extern crate ...` and are
// in a submodule // in a submodule
use self::tempfile::{TempDir, Builder as TempFileBuilder};
use self::mdbook::MDBook; use self::mdbook::MDBook;
use self::tempfile::{Builder as TempFileBuilder, TempDir};
use self::walkdir::WalkDir; use self::walkdir::WalkDir;
/// Create a dummy book in a temporary directory, using the contents of /// Create a dummy book in a temporary directory, using the contents of
/// `SUMMARY_MD` as a guide. /// `SUMMARY_MD` as a guide.
/// ///
@ -47,13 +46,16 @@ impl DummyBook {
/// Write a book to a temporary directory using the provided settings. /// Write a book to a temporary directory using the provided settings.
pub fn build(&self) -> Result<TempDir> { pub fn build(&self) -> Result<TempDir> {
let temp = TempFileBuilder::new().prefix("dummy_book").tempdir().chain_err(|| "Unable to create temp directory")?; let temp = TempFileBuilder::new()
.prefix("dummy_book")
.tempdir()
.chain_err(|| "Unable to create temp directory")?;
let dummy_book_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/dummy_book"); let dummy_book_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/dummy_book");
recursive_copy(&dummy_book_root, temp.path()).chain_err(|| { recursive_copy(&dummy_book_root, temp.path()).chain_err(|| {
"Couldn't copy files into a \ "Couldn't copy files into a \
temporary directory" temporary directory"
})?; })?;
let sub_pattern = if self.passing_test { "true" } else { "false" }; let sub_pattern = if self.passing_test { "true" } else { "false" };
let file_containing_test = temp.path().join("src/first/nested.md"); let file_containing_test = temp.path().join("src/first/nested.md");
@ -77,11 +79,13 @@ pub fn assert_contains_strings<P: AsRef<Path>>(filename: P, strings: &[&str]) {
let content = file_to_string(filename).expect("Couldn't read the file's contents"); let content = file_to_string(filename).expect("Couldn't read the file's contents");
for s in strings { for s in strings {
assert!(content.contains(s), assert!(
"Searching for {:?} in {}\n\n{}", content.contains(s),
s, "Searching for {:?} in {}\n\n{}",
filename.display(), s,
content); filename.display(),
content
);
} }
} }
@ -90,15 +94,16 @@ pub fn assert_doesnt_contain_strings<P: AsRef<Path>>(filename: P, strings: &[&st
let content = file_to_string(filename).expect("Couldn't read the file's contents"); let content = file_to_string(filename).expect("Couldn't read the file's contents");
for s in strings { for s in strings {
assert!(!content.contains(s), assert!(
"Found {:?} in {}\n\n{}", !content.contains(s),
s, "Found {:?} in {}\n\n{}",
filename.display(), s,
content); filename.display(),
content
);
} }
} }
/// Recursively copy an entire directory tree to somewhere else (a la `cp -r`). /// Recursively copy an entire directory tree to somewhere else (a la `cp -r`).
fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()> { fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()> {
let from = from.as_ref(); let from = from.as_ref();
@ -108,9 +113,9 @@ fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()>
let entry = entry.chain_err(|| "Unable to inspect directory entry")?; let entry = entry.chain_err(|| "Unable to inspect directory entry")?;
let original_location = entry.path(); let original_location = entry.path();
let relative = original_location.strip_prefix(&from) let relative = original_location
.expect("`original_location` is inside the `from` \ .strip_prefix(&from)
directory"); .expect("`original_location` is inside the `from` directory");
let new_location = to.join(relative); let new_location = to.join(relative);
if original_location.is_file() { if original_location.is_file() {
@ -118,9 +123,8 @@ fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()>
fs::create_dir_all(parent).chain_err(|| "Couldn't create directory")?; fs::create_dir_all(parent).chain_err(|| "Couldn't create directory")?;
} }
fs::copy(&original_location, &new_location).chain_err(|| { fs::copy(&original_location, &new_location)
"Unable to copy file contents" .chain_err(|| "Unable to copy file contents")?;
})?;
} }
} }
@ -129,7 +133,7 @@ fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()>
pub fn new_copy_of_example_book() -> Result<TempDir> { pub fn new_copy_of_example_book() -> Result<TempDir> {
let temp = TempFileBuilder::new().prefix("book-example").tempdir()?; let temp = TempFileBuilder::new().prefix("book-example").tempdir()?;
let book_example = Path::new(env!("CARGO_MANIFEST_DIR")).join("book-example"); let book_example = Path::new(env!("CARGO_MANIFEST_DIR")).join("book-example");
recursive_copy(book_example, temp.path())?; recursive_copy(book_example, temp.path())?;

View File

@ -1,13 +1,12 @@
extern crate mdbook; extern crate mdbook;
extern crate tempfile; extern crate tempfile;
use std::path::PathBuf;
use std::fs;
use mdbook::MDBook;
use mdbook::config::Config; use mdbook::config::Config;
use mdbook::MDBook;
use std::fs;
use std::path::PathBuf;
use tempfile::Builder as TempFileBuilder; use tempfile::Builder as TempFileBuilder;
/// Run `mdbook init` in an empty directory and make sure the default files /// Run `mdbook init` in an empty directory and make sure the default files
/// are created. /// are created.
#[test] #[test]

View File

@ -5,10 +5,10 @@ extern crate env_logger;
extern crate error_chain; extern crate error_chain;
extern crate mdbook; extern crate mdbook;
use mdbook::book;
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
use std::path::Path; use std::path::Path;
use mdbook::book;
macro_rules! summary_md_test { macro_rules! summary_md_test {
($name:ident, $filename:expr) => { ($name:ident, $filename:expr) => {

View File

@ -9,18 +9,18 @@ mod dummy_book;
use dummy_book::{assert_contains_strings, assert_doesnt_contain_strings, DummyBook}; use dummy_book::{assert_contains_strings, assert_doesnt_contain_strings, DummyBook};
use mdbook::config::Config;
use mdbook::errors::*;
use mdbook::utils::fs::{file_to_string, write_file};
use mdbook::MDBook;
use select::document::Document;
use select::predicate::{Class, Name, Predicate};
use std::ffi::OsStr;
use std::fs; use std::fs;
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
use std::ffi::OsStr;
use walkdir::{DirEntry, WalkDir};
use select::document::Document;
use select::predicate::{Class, Name, Predicate};
use tempfile::Builder as TempFileBuilder; use tempfile::Builder as TempFileBuilder;
use mdbook::errors::*; use walkdir::{DirEntry, WalkDir};
use mdbook::utils::fs::{file_to_string, write_file};
use mdbook::config::Config;
use mdbook::MDBook;
const BOOK_ROOT: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/dummy_book"); const BOOK_ROOT: &'static str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/dummy_book");
const TOC_TOP_LEVEL: &[&'static str] = &[ const TOC_TOP_LEVEL: &[&'static str] = &[
@ -29,7 +29,8 @@ const TOC_TOP_LEVEL: &[&'static str] = &[
"Conclusion", "Conclusion",
"Introduction", "Introduction",
]; ];
const TOC_SECOND_LEVEL: &[&'static str] = &["1.1. Nested Chapter", "1.2. Includes", "1.3. Recursive"]; const TOC_SECOND_LEVEL: &[&'static str] =
&["1.1. Nested Chapter", "1.2. Includes", "1.3. Recursive"];
/// Make sure you can load the dummy book and build it without panicking. /// Make sure you can load the dummy book and build it without panicking.
#[test] #[test]
@ -98,16 +99,12 @@ fn check_correct_cross_links_in_nested_dir() {
assert_contains_strings( assert_contains_strings(
first.join("index.html"), first.join("index.html"),
&[ &[r##"href="#some-section" id="some-section""##],
r##"href="#some-section" id="some-section""##,
],
); );
assert_contains_strings( assert_contains_strings(
first.join("nested.html"), first.join("nested.html"),
&[ &[r##"href="#some-section" id="some-section""##],
r##"href="#some-section" id="some-section""##,
],
); );
} }
@ -357,14 +354,12 @@ fn book_with_a_reserved_filename_does_not_build() {
fn by_default_mdbook_use_index_preprocessor_to_convert_readme_to_index() { fn by_default_mdbook_use_index_preprocessor_to_convert_readme_to_index() {
let temp = DummyBook::new().build().unwrap(); let temp = DummyBook::new().build().unwrap();
let mut cfg = Config::default(); let mut cfg = Config::default();
cfg.set("book.src", "src2").expect("Couldn't set config.book.src to \"src2\"."); cfg.set("book.src", "src2")
.expect("Couldn't set config.book.src to \"src2\".");
let md = MDBook::load_with_config(temp.path(), cfg).unwrap(); let md = MDBook::load_with_config(temp.path(), cfg).unwrap();
md.build().unwrap(); md.build().unwrap();
let first_index = temp.path() let first_index = temp.path().join("book").join("first").join("index.html");
.join("book")
.join("first")
.join("index.html");
let expected_strings = vec![ let expected_strings = vec![
r#"href="../first/index.html""#, r#"href="../first/index.html""#,
r#"href="../second/index.html""#, r#"href="../second/index.html""#,
@ -373,13 +368,8 @@ fn by_default_mdbook_use_index_preprocessor_to_convert_readme_to_index() {
assert_contains_strings(&first_index, &expected_strings); assert_contains_strings(&first_index, &expected_strings);
assert_doesnt_contain_strings(&first_index, &vec!["README.html"]); assert_doesnt_contain_strings(&first_index, &vec!["README.html"]);
let second_index = temp.path() let second_index = temp.path().join("book").join("second").join("index.html");
.join("book") let unexpected_strings = vec!["Second README"];
.join("second")
.join("index.html");
let unexpected_strings = vec![
"Second README",
];
assert_doesnt_contain_strings(&second_index, &unexpected_strings); assert_doesnt_contain_strings(&second_index, &unexpected_strings);
} }
@ -404,11 +394,11 @@ fn theme_dir_overrides_work_correctly() {
#[cfg(feature = "search")] #[cfg(feature = "search")]
mod search { mod search {
extern crate serde_json; extern crate serde_json;
use std::fs::File; use dummy_book::DummyBook;
use std::path::Path;
use mdbook::utils::fs::file_to_string; use mdbook::utils::fs::file_to_string;
use mdbook::MDBook; use mdbook::MDBook;
use dummy_book::DummyBook; use std::fs::File;
use std::path::Path;
fn read_book_index(root: &Path) -> serde_json::Value { fn read_book_index(root: &Path) -> serde_json::Value {
let index = root.join("book/searchindex.js"); let index = root.join("book/searchindex.js");
@ -427,12 +417,8 @@ mod search {
let index = read_book_index(temp.path()); let index = read_book_index(temp.path());
let doc_urls = index["doc_urls"].as_array().unwrap(); let doc_urls = index["doc_urls"].as_array().unwrap();
let get_doc_ref = |url: &str| -> String { let get_doc_ref =
doc_urls.iter() |url: &str| -> String { doc_urls.iter().position(|s| s == url).unwrap().to_string() };
.position(|s| s == url)
.unwrap()
.to_string()
};
let first_chapter = get_doc_ref("first/index.html#first-chapter"); let first_chapter = get_doc_ref("first/index.html#first-chapter");
let introduction = get_doc_ref("intro.html#introduction"); let introduction = get_doc_ref("intro.html#introduction");
@ -453,14 +439,8 @@ mod search {
docs[&summary]["body"], docs[&summary]["body"],
"Introduction First Chapter Nested Chapter Includes Recursive Second Chapter Conclusion" "Introduction First Chapter Nested Chapter Includes Recursive Second Chapter Conclusion"
); );
assert_eq!( assert_eq!(docs[&summary]["breadcrumbs"], "First Chapter » Summary");
docs[&summary]["breadcrumbs"], assert_eq!(docs[&conclusion]["body"], "I put &lt;HTML&gt; in here!");
"First Chapter » Summary"
);
assert_eq!(
docs[&conclusion]["body"],
"I put &lt;HTML&gt; in here!"
);
} }
// Setting this to `true` may cause issues with `cargo watch`, // Setting this to `true` may cause issues with `cargo watch`,

View File

@ -4,11 +4,11 @@ mod dummy_book;
use dummy_book::DummyBook; use dummy_book::DummyBook;
use mdbook::MDBook;
use mdbook::preprocess::{Preprocessor, PreprocessorContext};
use mdbook::book::Book; use mdbook::book::Book;
use mdbook::config::Config; use mdbook::config::Config;
use mdbook::errors::*; use mdbook::errors::*;
use mdbook::preprocess::{Preprocessor, PreprocessorContext};
use mdbook::MDBook;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
@ -30,7 +30,6 @@ fn mdbook_detects_book_with_failing_tests() {
#[test] #[test]
fn mdbook_runs_preprocessors() { fn mdbook_runs_preprocessors() {
let has_run: Arc<Mutex<bool>> = Arc::new(Mutex::new(false)); let has_run: Arc<Mutex<bool>> = Arc::new(Mutex::new(false));
struct DummyPreprocessor(Arc<Mutex<bool>>); struct DummyPreprocessor(Arc<Mutex<bool>>);