Update dependencies. (#1211)

* Removed the itertools dependency

* Removed an unused feature flag

* Stubbed out a toml_query replacement

* Update dependencies.

* Bump env_logger.

* Use warp instead of iron for http server.

Iron does not appear to be maintained anymore. warp/hyper seems to be
reasonably maintained. Unfortunately this takes a few seconds more
to compile, but shouldn't be too bad.

One benefit is that there is no longer a need for a separate websocket
port, which makes it easier to run multiple servers at once.

* Update pulldown-cmark to 0.7

* Switch from error-chain to anyhow.

* Bump MSRV to 1.39.

* Update elasticlunr-rs.

Co-authored-by: Michael Bryan <michaelfbryan@gmail.com>
This commit is contained in:
Eric Huss 2020-05-20 14:32:00 -07:00 committed by GitHub
parent 5d5c55e619
commit 6c4c3448e3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 1328 additions and 1137 deletions

View File

@ -31,7 +31,7 @@ jobs:
rust: stable rust: stable
- build: msrv - build: msrv
os: ubuntu-latest os: ubuntu-latest
rust: 1.35.0 rust: 1.39.0
steps: steps:
- uses: actions/checkout@master - uses: actions/checkout@master
- name: Install Rust - name: Install Rust

1936
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -16,17 +16,16 @@ repository = "https://github.com/rust-lang/mdBook"
description = "Creates a book from markdown files" description = "Creates a book from markdown files"
[dependencies] [dependencies]
anyhow = "1.0.28"
chrono = "0.4" chrono = "0.4"
clap = "2.24" clap = "2.24"
env_logger = "0.6" env_logger = "0.7.1"
error-chain = "0.12"
handlebars = "3.0" handlebars = "3.0"
itertools = "0.8"
lazy_static = "1.0" lazy_static = "1.0"
log = "0.4" log = "0.4"
memchr = "2.0" memchr = "2.0"
open = "1.1" open = "1.1"
pulldown-cmark = "0.6.1" pulldown-cmark = "0.7.0"
regex = "1.0.0" regex = "1.0.0"
serde = "1.0" serde = "1.0"
serde_derive = "1.0" serde_derive = "1.0"
@ -34,16 +33,15 @@ serde_json = "1.0"
shlex = "0.1" shlex = "0.1"
tempfile = "3.0" tempfile = "3.0"
toml = "0.5.1" toml = "0.5.1"
toml-query = "0.9"
# Watch feature # Watch feature
notify = { version = "4.0", optional = true } notify = { version = "4.0", optional = true }
gitignore = { version = "1.0", optional = true } gitignore = { version = "1.0", optional = true }
# Serve feature # Serve feature
iron = { version = "0.6", optional = true } futures-util = { version = "0.3.4", optional = true }
staticfile = { version = "0.5", optional = true } tokio = { version = "0.2.18", features = ["macros"], optional = true }
ws = { version = "0.9", optional = true} warp = { version = "0.2.2", default-features = false, features = ["websocket"], optional = true }
# Search feature # Search feature
elasticlunr-rs = { version = "2.3", optional = true, default-features = false } elasticlunr-rs = { version = "2.3", optional = true, default-features = false }
@ -55,11 +53,9 @@ pretty_assertions = "0.6"
walkdir = "2.0" walkdir = "2.0"
[features] [features]
default = ["output", "watch", "serve", "search"] default = ["watch", "serve", "search"]
debug = []
output = []
watch = ["notify", "gitignore"] watch = ["notify", "gitignore"]
serve = ["iron", "staticfile", "ws"] serve = ["futures-util", "tokio", "warp"]
search = ["elasticlunr-rs", "ammonia"] search = ["elasticlunr-rs", "ammonia"]
[[bin]] [[bin]]

View File

@ -24,7 +24,7 @@ There are multiple ways to install mdBook.
2. **From Crates.io** 2. **From Crates.io**
This requires at least [Rust] 1.35 and Cargo to be installed. Once you have installed This requires at least [Rust] 1.39 and Cargo to be installed. Once you have installed
Rust, type the following in the terminal: Rust, type the following in the terminal:
``` ```

View File

@ -87,7 +87,7 @@ mod nop_lib {
// particular config value // particular config value
if let Some(nop_cfg) = ctx.config.get_preprocessor(self.name()) { if let Some(nop_cfg) = ctx.config.get_preprocessor(self.name()) {
if nop_cfg.contains_key("blow-up") { if nop_cfg.contains_key("blow-up") {
return Err("Boom!!1!".into()); anyhow::bail!("Boom!!1!");
} }
} }

View File

@ -15,13 +15,13 @@ pub fn load_book<P: AsRef<Path>>(src_dir: P, cfg: &BuildConfig) -> Result<Book>
let mut summary_content = String::new(); let mut summary_content = String::new();
File::open(summary_md) File::open(summary_md)
.chain_err(|| "Couldn't open SUMMARY.md")? .with_context(|| "Couldn't open SUMMARY.md")?
.read_to_string(&mut summary_content)?; .read_to_string(&mut summary_content)?;
let summary = parse_summary(&summary_content).chain_err(|| "Summary parsing failed")?; let summary = parse_summary(&summary_content).with_context(|| "Summary parsing failed")?;
if cfg.create_missing { if cfg.create_missing {
create_missing(&src_dir, &summary).chain_err(|| "Unable to create missing chapters")?; create_missing(&src_dir, &summary).with_context(|| "Unable to create missing chapters")?;
} }
load_book_from_disk(&summary, src_dir) load_book_from_disk(&summary, src_dir)
@ -257,11 +257,12 @@ fn load_chapter<P: AsRef<Path>>(
}; };
let mut f = File::open(&location) let mut f = File::open(&location)
.chain_err(|| format!("Chapter file not found, {}", link_location.display()))?; .with_context(|| format!("Chapter file not found, {}", link_location.display()))?;
let mut content = String::new(); let mut content = String::new();
f.read_to_string(&mut content) f.read_to_string(&mut content).with_context(|| {
.chain_err(|| format!("Unable to read \"{}\" ({})", link.name, location.display()))?; format!("Unable to read \"{}\" ({})", link.name, location.display())
})?;
let stripped = location let stripped = location
.strip_prefix(&src_dir) .strip_prefix(&src_dir)

View File

@ -64,19 +64,19 @@ impl BookBuilder {
info!("Creating a new book with stub content"); info!("Creating a new book with stub content");
self.create_directory_structure() self.create_directory_structure()
.chain_err(|| "Unable to create directory structure")?; .with_context(|| "Unable to create directory structure")?;
self.create_stub_files() self.create_stub_files()
.chain_err(|| "Unable to create stub files")?; .with_context(|| "Unable to create stub files")?;
if self.create_gitignore { if self.create_gitignore {
self.build_gitignore() self.build_gitignore()
.chain_err(|| "Unable to create .gitignore")?; .with_context(|| "Unable to create .gitignore")?;
} }
if self.copy_theme { if self.copy_theme {
self.copy_across_theme() self.copy_across_theme()
.chain_err(|| "Unable to copy across the theme")?; .with_context(|| "Unable to copy across the theme")?;
} }
self.write_book_toml()?; self.write_book_toml()?;
@ -97,12 +97,12 @@ impl BookBuilder {
fn write_book_toml(&self) -> Result<()> { fn write_book_toml(&self) -> Result<()> {
debug!("Writing book.toml"); debug!("Writing book.toml");
let book_toml = self.root.join("book.toml"); let book_toml = self.root.join("book.toml");
let cfg = toml::to_vec(&self.config).chain_err(|| "Unable to serialize the config")?; let cfg = toml::to_vec(&self.config).with_context(|| "Unable to serialize the config")?;
File::create(book_toml) File::create(book_toml)
.chain_err(|| "Couldn't create book.toml")? .with_context(|| "Couldn't create book.toml")?
.write_all(&cfg) .write_all(&cfg)
.chain_err(|| "Unable to write config to book.toml")?; .with_context(|| "Unable to write config to book.toml")?;
Ok(()) Ok(())
} }
@ -174,13 +174,14 @@ impl BookBuilder {
let summary = src_dir.join("SUMMARY.md"); let summary = src_dir.join("SUMMARY.md");
if !summary.exists() { if !summary.exists() {
trace!("No summary found creating stub summary and chapter_1.md."); trace!("No summary found creating stub summary and chapter_1.md.");
let mut f = File::create(&summary).chain_err(|| "Unable to create SUMMARY.md")?; let mut f = File::create(&summary).with_context(|| "Unable to create SUMMARY.md")?;
writeln!(f, "# Summary")?; writeln!(f, "# Summary")?;
writeln!(f)?; writeln!(f)?;
writeln!(f, "- [Chapter 1](./chapter_1.md)")?; writeln!(f, "- [Chapter 1](./chapter_1.md)")?;
let chapter_1 = src_dir.join("chapter_1.md"); let chapter_1 = src_dir.join("chapter_1.md");
let mut f = File::create(&chapter_1).chain_err(|| "Unable to create chapter_1.md")?; let mut f =
File::create(&chapter_1).with_context(|| "Unable to create chapter_1.md")?;
writeln!(f, "# Chapter 1")?; writeln!(f, "# Chapter 1")?;
} else { } else {
trace!("Existing summary found, no need to create stub files."); trace!("Existing summary found, no need to create stub files.");

View File

@ -215,7 +215,7 @@ impl MDBook {
renderer renderer
.render(&render_context) .render(&render_context)
.chain_err(|| "Rendering failed") .with_context(|| "Rendering failed")
} }
/// You can change the default renderer to another one by using this method. /// You can change the default renderer to another one by using this method.
@ -282,10 +282,12 @@ impl MDBook {
let output = cmd.output()?; let output = cmd.output()?;
if !output.status.success() { if !output.status.success() {
bail!(ErrorKind::Subprocess( bail!(
"Rustdoc returned an error".to_string(), "rustdoc returned an error:\n\
output \n--- stdout\n{}\n--- stderr\n{}",
)); String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
} }
} }
} }

View File

@ -236,13 +236,13 @@ impl<'a> SummaryParser<'a> {
let prefix_chapters = self let prefix_chapters = self
.parse_affix(true) .parse_affix(true)
.chain_err(|| "There was an error parsing the prefix chapters")?; .with_context(|| "There was an error parsing the prefix chapters")?;
let numbered_chapters = self let numbered_chapters = self
.parse_parts() .parse_parts()
.chain_err(|| "There was an error parsing the numbered chapters")?; .with_context(|| "There was an error parsing the numbered chapters")?;
let suffix_chapters = self let suffix_chapters = self
.parse_affix(false) .parse_affix(false)
.chain_err(|| "There was an error parsing the suffix chapters")?; .with_context(|| "There was an error parsing the suffix chapters")?;
Ok(Summary { Ok(Summary {
title, title,
@ -320,7 +320,7 @@ impl<'a> SummaryParser<'a> {
// Parse the rest of the part. // Parse the rest of the part.
let numbered_chapters = self let numbered_chapters = self
.parse_numbered(&mut root_items, &mut root_number) .parse_numbered(&mut root_items, &mut root_number)
.chain_err(|| "There was an error parsing the numbered chapters")?; .with_context(|| "There was an error parsing the numbered chapters")?;
if let Some(title) = title { if let Some(title) = title {
parts.push(SummaryItem::PartTitle(title)); parts.push(SummaryItem::PartTitle(title));
@ -514,8 +514,12 @@ impl<'a> SummaryParser<'a> {
fn parse_error<D: Display>(&self, msg: D) -> Error { fn parse_error<D: Display>(&self, msg: D) -> Error {
let (line, col) = self.current_location(); let (line, col) = self.current_location();
anyhow::anyhow!(
ErrorKind::ParseError(line, col, msg.to_string()).into() "failed to parse SUMMARY.md line {}, column {}: {}",
line,
col,
msg
)
} }
/// Try to parse the title line. /// Try to parse the title line.
@ -553,10 +557,9 @@ fn get_last_link(links: &mut [SummaryItem]) -> Result<(usize, &mut Link)> {
.filter_map(|(i, item)| item.maybe_link_mut().map(|l| (i, l))) .filter_map(|(i, item)| item.maybe_link_mut().map(|l| (i, l)))
.rev() .rev()
.next() .next()
.ok_or_else(|| { .ok_or_else(||
"Unable to get last link because the list of SummaryItems doesn't contain any Links" anyhow::anyhow!("Unable to get last link because the list of SummaryItems doesn't contain any Links")
.into() )
})
} }
/// Removes the styling from a list of Markdown events and returns just the /// Removes the styling from a list of Markdown events and returns just the

View File

@ -1,6 +1,6 @@
use crate::get_book_dir; use crate::get_book_dir;
use anyhow::Context;
use clap::{App, ArgMatches, SubCommand}; use clap::{App, ArgMatches, SubCommand};
use mdbook::errors::*;
use mdbook::MDBook; use mdbook::MDBook;
use std::fs; use std::fs;
@ -31,7 +31,8 @@ pub fn execute(args: &ArgMatches) -> mdbook::errors::Result<()> {
}; };
if dir_to_remove.exists() { if dir_to_remove.exists() {
fs::remove_dir_all(&dir_to_remove).chain_err(|| "Unable to remove the build directory")?; fs::remove_dir_all(&dir_to_remove)
.with_context(|| "Unable to remove the build directory")?;
} }
Ok(()) Ok(())

View File

@ -2,15 +2,19 @@
use super::watch; use super::watch;
use crate::{get_book_dir, open}; use crate::{get_book_dir, open};
use clap::{App, Arg, ArgMatches, SubCommand}; use clap::{App, Arg, ArgMatches, SubCommand};
use iron::headers; use futures_util::sink::SinkExt;
use iron::{status, AfterMiddleware, Chain, Iron, IronError, IronResult, Request, Response, Set}; use futures_util::StreamExt;
use mdbook::errors::*; use mdbook::errors::*;
use mdbook::utils; use mdbook::utils;
use mdbook::MDBook; use mdbook::MDBook;
use std::net::{SocketAddr, ToSocketAddrs};
use std::path::PathBuf;
use tokio::sync::broadcast;
use warp::ws::Message;
use warp::Filter;
struct ErrorRecover; /// The HTTP endpoint for the websocket used to trigger reloads when a file changes.
const LIVE_RELOAD_ENDPOINT: &str = "__livereload";
struct NoCache;
// Create clap subcommand arguments // Create clap subcommand arguments
pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> { pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {
@ -43,42 +47,21 @@ pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {
.empty_values(false) .empty_values(false)
.help("Port to use for HTTP connections"), .help("Port to use for HTTP connections"),
) )
.arg(
Arg::with_name("websocket-hostname")
.long("websocket-hostname")
.takes_value(true)
.empty_values(false)
.help(
"Hostname to connect to for WebSockets connections (Defaults to the HTTP hostname)",
),
)
.arg(
Arg::with_name("websocket-port")
.short("w")
.long("websocket-port")
.takes_value(true)
.default_value("3001")
.empty_values(false)
.help("Port to use for WebSockets livereload connections"),
)
.arg_from_usage("-o, --open 'Opens the book server in a web browser'") .arg_from_usage("-o, --open 'Opens the book server in a web browser'")
} }
// Watch command implementation // Serve command implementation
pub fn execute(args: &ArgMatches) -> Result<()> { pub fn execute(args: &ArgMatches) -> Result<()> {
let book_dir = get_book_dir(args); let book_dir = get_book_dir(args);
let mut book = MDBook::load(&book_dir)?; let mut book = MDBook::load(&book_dir)?;
let port = args.value_of("port").unwrap(); let port = args.value_of("port").unwrap();
let ws_port = args.value_of("websocket-port").unwrap();
let hostname = args.value_of("hostname").unwrap(); let hostname = args.value_of("hostname").unwrap();
let public_address = args.value_of("websocket-hostname").unwrap_or(hostname);
let open_browser = args.is_present("open"); let open_browser = args.is_present("open");
let address = format!("{}:{}", hostname, port); let address = format!("{}:{}", hostname, port);
let ws_address = format!("{}:{}", hostname, ws_port);
let livereload_url = format!("ws://{}:{}", public_address, ws_port); let livereload_url = format!("ws://{}/{}", address, LIVE_RELOAD_ENDPOINT);
book.config book.config
.set("output.html.livereload-url", &livereload_url)?; .set("output.html.livereload-url", &livereload_url)?;
@ -88,20 +71,18 @@ pub fn execute(args: &ArgMatches) -> Result<()> {
book.build()?; book.build()?;
let mut chain = Chain::new(staticfile::Static::new(book.build_dir_for("html"))); let sockaddr: SocketAddr = address
chain.link_after(NoCache); .to_socket_addrs()?
chain.link_after(ErrorRecover); .next()
let _iron = Iron::new(chain) .ok_or_else(|| anyhow::anyhow!("no address found for {}", address))?;
.http(&*address) let build_dir = book.build_dir_for("html");
.chain_err(|| "Unable to launch the server")?;
let ws_server = // A channel used to broadcast to any websockets to reload when a file changes.
ws::WebSocket::new(|_| |_| Ok(())).chain_err(|| "Unable to start the websocket")?; let (tx, _rx) = tokio::sync::broadcast::channel::<Message>(100);
let broadcaster = ws_server.broadcaster(); let reload_tx = tx.clone();
let thread_handle = std::thread::spawn(move || {
std::thread::spawn(move || { serve(build_dir, sockaddr, reload_tx);
ws_server.listen(&*ws_address).unwrap();
}); });
let serving_url = format!("http://{}", address); let serving_url = format!("http://{}", address);
@ -117,7 +98,6 @@ pub fn execute(args: &ArgMatches) -> Result<()> {
info!("Building book..."); info!("Building book...");
// FIXME: This area is really ugly because we need to re-set livereload :( // FIXME: This area is really ugly because we need to re-set livereload :(
let result = MDBook::load(&book_dir) let result = MDBook::load(&book_dir)
.and_then(|mut b| { .and_then(|mut b| {
b.config b.config
@ -130,30 +110,39 @@ pub fn execute(args: &ArgMatches) -> Result<()> {
error!("Unable to load the book"); error!("Unable to load the book");
utils::log_backtrace(&e); utils::log_backtrace(&e);
} else { } else {
let _ = broadcaster.send("reload"); let _ = tx.send(Message::text("reload"));
} }
}); });
let _ = thread_handle.join();
Ok(()) Ok(())
} }
impl AfterMiddleware for NoCache { #[tokio::main]
fn after(&self, _: &mut Request, mut res: Response) -> IronResult<Response> { async fn serve(build_dir: PathBuf, address: SocketAddr, reload_tx: broadcast::Sender<Message>) {
res.headers.set(headers::CacheControl(vec![ // A warp Filter which captures `reload_tx` and provides an `rx` copy to
headers::CacheDirective::NoStore, // receive reload messages.
headers::CacheDirective::MaxAge(0u32), let sender = warp::any().map(move || reload_tx.subscribe());
]));
Ok(res) // A warp Filter to handle the livereload endpoint. This upgrades to a
} // websocket, and then waits for any filesystem change notifications, and
} // relays them over the websocket.
let livereload = warp::path(LIVE_RELOAD_ENDPOINT)
impl AfterMiddleware for ErrorRecover { .and(warp::ws())
fn catch(&self, _: &mut Request, err: IronError) -> IronResult<Response> { .and(sender)
match err.response.status { .map(|ws: warp::ws::Ws, mut rx: broadcast::Receiver<Message>| {
// each error will result in 404 response ws.on_upgrade(move |ws| async move {
Some(_) => Ok(err.response.set(status::NotFound)), let (mut user_ws_tx, _user_ws_rx) = ws.split();
_ => Err(err), trace!("websocket got connection");
} if let Ok(m) = rx.recv().await {
} trace!("notify of reload");
let _ = user_ws_tx.send(m).await;
}
})
});
// A warp Filter that serves from the filesystem.
let book_route = warp::fs::dir(build_dir);
let routes = livereload.or(book_route);
warp::serve(routes).run(address).await;
} }

View File

@ -57,12 +57,9 @@ use std::path::{Path, PathBuf};
use std::str::FromStr; use std::str::FromStr;
use toml::value::Table; use toml::value::Table;
use toml::{self, Value}; use toml::{self, Value};
use toml_query::delete::TomlValueDeleteExt;
use toml_query::insert::TomlValueInsertExt;
use toml_query::read::TomlValueReadExt;
use crate::errors::*; use crate::errors::*;
use crate::utils; use crate::utils::{self, toml_ext::TomlExt};
/// The overall configuration object for MDBook, essentially an in-memory /// The overall configuration object for MDBook, essentially an in-memory
/// representation of `book.toml`. /// representation of `book.toml`.
@ -82,7 +79,7 @@ impl FromStr for Config {
/// Load a `Config` from some string. /// Load a `Config` from some string.
fn from_str(src: &str) -> Result<Self> { fn from_str(src: &str) -> Result<Self> {
toml::from_str(src).chain_err(|| Error::from("Invalid configuration file")) toml::from_str(src).with_context(|| "Invalid configuration file")
} }
} }
@ -91,9 +88,9 @@ impl Config {
pub fn from_disk<P: AsRef<Path>>(config_file: P) -> Result<Config> { pub fn from_disk<P: AsRef<Path>>(config_file: P) -> Result<Config> {
let mut buffer = String::new(); let mut buffer = String::new();
File::open(config_file) File::open(config_file)
.chain_err(|| "Unable to open the configuration file")? .with_context(|| "Unable to open the configuration file")?
.read_to_string(&mut buffer) .read_to_string(&mut buffer)
.chain_err(|| "Couldn't read the file")?; .with_context(|| "Couldn't read the file")?;
Config::from_str(&buffer) Config::from_str(&buffer)
} }
@ -163,15 +160,12 @@ impl Config {
/// `output.html.playpen` will fetch the "playpen" out of the html output /// `output.html.playpen` will fetch the "playpen" out of the html output
/// table). /// table).
pub fn get(&self, key: &str) -> Option<&Value> { pub fn get(&self, key: &str) -> Option<&Value> {
self.rest.read(key).unwrap_or(None) self.rest.read(key)
} }
/// Fetch a value from the `Config` so you can mutate it. /// Fetch a value from the `Config` so you can mutate it.
pub fn get_mut(&mut self, key: &str) -> Option<&mut Value> { pub fn get_mut(&mut self, key: &str) -> Option<&mut Value> {
match self.rest.read_mut(key) { self.rest.read_mut(key)
Ok(inner) => inner,
Err(_) => None,
}
} }
/// Convenience method for getting the html renderer's configuration. /// Convenience method for getting the html renderer's configuration.
@ -182,11 +176,14 @@ impl Config {
/// HTML renderer is refactored to be less coupled to `mdbook` internals. /// HTML renderer is refactored to be less coupled to `mdbook` internals.
#[doc(hidden)] #[doc(hidden)]
pub fn html_config(&self) -> Option<HtmlConfig> { pub fn html_config(&self) -> Option<HtmlConfig> {
match self.get_deserialized_opt("output.html") { match self
.get_deserialized_opt("output.html")
.with_context(|| "Parsing configuration [output.html]")
{
Ok(Some(config)) => Some(config), Ok(Some(config)) => Some(config),
Ok(None) => None, Ok(None) => None,
Err(e) => { Err(e) => {
utils::log_backtrace(&e.chain_err(|| "Parsing configuration [output.html]")); utils::log_backtrace(&e);
None None
} }
} }
@ -214,7 +211,7 @@ impl Config {
value value
.clone() .clone()
.try_into() .try_into()
.chain_err(|| "Couldn't deserialize the value") .with_context(|| "Couldn't deserialize the value")
}) })
.transpose() .transpose()
} }
@ -226,17 +223,15 @@ impl Config {
pub fn set<S: Serialize, I: AsRef<str>>(&mut self, index: I, value: S) -> Result<()> { pub fn set<S: Serialize, I: AsRef<str>>(&mut self, index: I, value: S) -> Result<()> {
let index = index.as_ref(); let index = index.as_ref();
let value = let value = Value::try_from(value)
Value::try_from(value).chain_err(|| "Unable to represent the item as a JSON Value")?; .with_context(|| "Unable to represent the item as a JSON Value")?;
if index.starts_with("book.") { if index.starts_with("book.") {
self.book.update_value(&index[5..], value); self.book.update_value(&index[5..], value);
} else if index.starts_with("build.") { } else if index.starts_with("build.") {
self.build.update_value(&index[6..], value); self.build.update_value(&index[6..], value);
} else { } else {
self.rest self.rest.insert(index, value);
.insert(index, value)
.map_err(ErrorKind::TomlQueryError)?;
} }
Ok(()) Ok(())
@ -277,7 +272,7 @@ impl Config {
get_and_insert!(table, "source" => cfg.book.src); get_and_insert!(table, "source" => cfg.book.src);
get_and_insert!(table, "description" => cfg.book.description); get_and_insert!(table, "description" => cfg.book.description);
if let Ok(Some(dest)) = table.delete("output.html.destination") { if let Some(dest) = table.delete("output.html.destination") {
if let Ok(destination) = dest.try_into() { if let Ok(destination) = dest.try_into() {
cfg.build.build_dir = destination; cfg.build.build_dir = destination;
} }
@ -363,8 +358,8 @@ impl Serialize for Config {
}; };
let rust_config = Value::try_from(&self.rust).expect("should always be serializable"); let rust_config = Value::try_from(&self.rust).expect("should always be serializable");
table.insert("book", book_config).expect("unreachable"); table.insert("book", book_config);
table.insert("rust", rust_config).expect("unreachable"); table.insert("rust", rust_config);
table.serialize(s) table.serialize(s)
} }
} }
@ -391,7 +386,7 @@ fn is_legacy_format(table: &Value) -> bool {
]; ];
for item in &legacy_items { for item in &legacy_items {
if let Ok(Some(_)) = table.read(item) { if table.read(item).is_some() {
return true; return true;
} }
} }

View File

@ -84,8 +84,6 @@
#![deny(rust_2018_idioms)] #![deny(rust_2018_idioms)]
#![allow(clippy::comparison_chain)] #![allow(clippy::comparison_chain)]
#[macro_use]
extern crate error_chain;
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
#[macro_use] #[macro_use]
@ -119,48 +117,6 @@ pub use crate::renderer::Renderer;
/// The error types used through out this crate. /// The error types used through out this crate.
pub mod errors { pub mod errors {
use std::path::PathBuf; pub(crate) use anyhow::{bail, ensure, Context};
pub use anyhow::{Error, Result};
error_chain! {
foreign_links {
Io(std::io::Error) #[doc = "A wrapper around `std::io::Error`"];
HandlebarsRender(handlebars::RenderError) #[doc = "Handlebars rendering failed"];
HandlebarsTemplate(Box<handlebars::TemplateError>) #[doc = "Unable to parse the template"];
Utf8(std::string::FromUtf8Error) #[doc = "Invalid UTF-8"];
SerdeJson(serde_json::Error) #[doc = "JSON conversion failed"];
}
errors {
/// A subprocess exited with an unsuccessful return code.
Subprocess(message: String, output: std::process::Output) {
description("A subprocess failed")
display("{}: {}", message, String::from_utf8_lossy(&output.stdout))
}
/// An error was encountered while parsing the `SUMMARY.md` file.
ParseError(line: usize, col: usize, message: String) {
description("A SUMMARY.md parsing error")
display("Error at line {}, column {}: {}", line, col, message)
}
/// The user tried to use a reserved filename.
ReservedFilenameError(filename: PathBuf) {
description("Reserved Filename")
display("{} is reserved for internal use", filename.display())
}
/// Error with a TOML file.
TomlQueryError(inner: toml_query::error::Error) {
description("toml_query error")
display("{}", inner)
}
}
}
// Box to halve the size of Error
impl From<handlebars::TemplateError> for Error {
fn from(e: handlebars::TemplateError) -> Error {
From::from(Box::new(e))
}
}
} }

View File

@ -43,7 +43,7 @@ impl CmdPreprocessor {
/// A convenience function custom preprocessors can use to parse the input /// A convenience function custom preprocessors can use to parse the input
/// written to `stdin` by a `CmdRenderer`. /// written to `stdin` by a `CmdRenderer`.
pub fn parse_input<R: Read>(reader: R) -> Result<(PreprocessorContext, Book)> { pub fn parse_input<R: Read>(reader: R) -> Result<(PreprocessorContext, Book)> {
serde_json::from_reader(reader).chain_err(|| "Unable to parse the input") serde_json::from_reader(reader).with_context(|| "Unable to parse the input")
} }
fn write_input_to_child(&self, child: &mut Child, book: &Book, ctx: &PreprocessorContext) { fn write_input_to_child(&self, child: &mut Child, book: &Book, ctx: &PreprocessorContext) {
@ -100,7 +100,7 @@ impl Preprocessor for CmdPreprocessor {
.stdout(Stdio::piped()) .stdout(Stdio::piped())
.stderr(Stdio::inherit()) .stderr(Stdio::inherit())
.spawn() .spawn()
.chain_err(|| { .with_context(|| {
format!( format!(
"Unable to start the \"{}\" preprocessor. Is it installed?", "Unable to start the \"{}\" preprocessor. Is it installed?",
self.name() self.name()
@ -111,7 +111,7 @@ impl Preprocessor for CmdPreprocessor {
let output = child let output = child
.wait_with_output() .wait_with_output()
.chain_err(|| "Error waiting for the preprocessor to complete")?; .with_context(|| "Error waiting for the preprocessor to complete")?;
trace!("{} exited with output: {:?}", self.cmd, output); trace!("{} exited with output: {:?}", self.cmd, output);
ensure!( ensure!(
@ -119,7 +119,8 @@ impl Preprocessor for CmdPreprocessor {
"The preprocessor exited unsuccessfully" "The preprocessor exited unsuccessfully"
); );
serde_json::from_slice(&output.stdout).chain_err(|| "Unable to parse the preprocessed book") serde_json::from_slice(&output.stdout)
.with_context(|| "Unable to parse the preprocessed book")
} }
fn supports_renderer(&self, renderer: &str) -> bool { fn supports_renderer(&self, renderer: &str) -> bool {

View File

@ -95,7 +95,7 @@ where
} }
Err(e) => { Err(e) => {
error!("Error updating \"{}\", {}", link.link_text, e); error!("Error updating \"{}\", {}", link.link_text, e);
for cause in e.iter().skip(1) { for cause in e.chain().skip(1) {
warn!("Caused By: {}", cause); warn!("Caused By: {}", cause);
} }
@ -296,7 +296,7 @@ impl<'a> Link<'a> {
RangeOrAnchor::Range(range) => take_lines(&s, range.clone()), RangeOrAnchor::Range(range) => take_lines(&s, range.clone()),
RangeOrAnchor::Anchor(anchor) => take_anchored_lines(&s, anchor), RangeOrAnchor::Anchor(anchor) => take_anchored_lines(&s, anchor),
}) })
.chain_err(|| { .with_context(|| {
format!( format!(
"Could not read file for link {} ({})", "Could not read file for link {} ({})",
self.link_text, self.link_text,
@ -316,7 +316,7 @@ impl<'a> Link<'a> {
take_rustdoc_include_anchored_lines(&s, anchor) take_rustdoc_include_anchored_lines(&s, anchor)
} }
}) })
.chain_err(|| { .with_context(|| {
format!( format!(
"Could not read file for link {} ({})", "Could not read file for link {} ({})",
self.link_text, self.link_text,
@ -327,7 +327,7 @@ impl<'a> Link<'a> {
LinkType::Playpen(ref pat, ref attrs) => { LinkType::Playpen(ref pat, ref attrs) => {
let target = base.join(pat); let target = base.join(pat);
let contents = fs::read_to_string(&target).chain_err(|| { let contents = fs::read_to_string(&target).with_context(|| {
format!( format!(
"Could not read file for link {} ({})", "Could not read file for link {} ({})",
self.link_text, self.link_text,

View File

@ -49,12 +49,12 @@ impl HtmlHandlebars {
// Update the context with data for this file // Update the context with data for this file
let ctx_path = path let ctx_path = path
.to_str() .to_str()
.chain_err(|| "Could not convert path to str")?; .with_context(|| "Could not convert path to str")?;
let filepath = Path::new(&ctx_path).with_extension("html"); let filepath = Path::new(&ctx_path).with_extension("html");
// "print.html" is used for the print page. // "print.html" is used for the print page.
if path == Path::new("print.md") { if path == Path::new("print.md") {
bail!(ErrorKind::ReservedFilenameError(path.clone())); bail!("{} is reserved for internal use", path.display());
}; };
let book_title = ctx let book_title = ctx
@ -260,7 +260,7 @@ impl HtmlHandlebars {
let output_location = destination.join(custom_file); let output_location = destination.join(custom_file);
if let Some(parent) = output_location.parent() { if let Some(parent) = output_location.parent() {
fs::create_dir_all(parent) fs::create_dir_all(parent)
.chain_err(|| format!("Unable to create {}", parent.display()))?; .with_context(|| format!("Unable to create {}", parent.display()))?;
} }
debug!( debug!(
"Copying {} -> {}", "Copying {} -> {}",
@ -268,7 +268,7 @@ impl HtmlHandlebars {
output_location.display() output_location.display()
); );
fs::copy(&input_location, &output_location).chain_err(|| { fs::copy(&input_location, &output_location).with_context(|| {
format!( format!(
"Unable to copy {} to {}", "Unable to copy {} to {}",
input_location.display(), input_location.display(),
@ -314,7 +314,7 @@ impl Renderer for HtmlHandlebars {
if destination.exists() { if destination.exists() {
utils::fs::remove_dir_content(destination) utils::fs::remove_dir_content(destination)
.chain_err(|| "Unable to remove stale HTML output")?; .with_context(|| "Unable to remove stale HTML output")?;
} }
trace!("render"); trace!("render");
@ -355,7 +355,7 @@ impl Renderer for HtmlHandlebars {
let mut print_content = String::new(); let mut print_content = String::new();
fs::create_dir_all(&destination) fs::create_dir_all(&destination)
.chain_err(|| "Unexpected error when constructing destination path")?; .with_context(|| "Unexpected error when constructing destination path")?;
let mut is_index = true; let mut is_index = true;
for item in book.iter() { for item in book.iter() {
@ -388,9 +388,9 @@ impl Renderer for HtmlHandlebars {
debug!("Copy static files"); debug!("Copy static files");
self.copy_static_files(&destination, &theme, &html_config) self.copy_static_files(&destination, &theme, &html_config)
.chain_err(|| "Unable to copy across static files")?; .with_context(|| "Unable to copy across static files")?;
self.copy_additional_css_and_js(&html_config, &ctx.root, &destination) self.copy_additional_css_and_js(&html_config, &ctx.root, &destination)
.chain_err(|| "Unable to copy across additional CSS and JS")?; .with_context(|| "Unable to copy across additional CSS and JS")?;
// Render search index // Render search index
#[cfg(feature = "search")] #[cfg(feature = "search")]
@ -549,7 +549,7 @@ fn make_data(
if let Some(ref path) = ch.path { if let Some(ref path) = ch.path {
let p = path let p = path
.to_str() .to_str()
.chain_err(|| "Could not convert path to str")?; .with_context(|| "Could not convert path to str")?;
chapter.insert("path".to_owned(), json!(p)); chapter.insert("path".to_owned(), json!(p));
} }
} }

View File

@ -82,7 +82,7 @@ fn render_item(
let filepath = Path::new(&chapter_path).with_extension("html"); let filepath = Path::new(&chapter_path).with_extension("html");
let filepath = filepath let filepath = filepath
.to_str() .to_str()
.chain_err(|| "Could not convert HTML path to str")?; .with_context(|| "Could not convert HTML path to str")?;
let anchor_base = utils::fs::normalize_path(filepath); let anchor_base = utils::fs::normalize_path(filepath);
let mut p = utils::new_cmark_parser(&chapter.content).peekable(); let mut p = utils::new_cmark_parser(&chapter.content).peekable();

View File

@ -28,7 +28,7 @@ impl Renderer for MarkdownRenderer {
if destination.exists() { if destination.exists() {
utils::fs::remove_dir_content(destination) utils::fs::remove_dir_content(destination)
.chain_err(|| "Unable to remove stale Markdown output")?; .with_context(|| "Unable to remove stale Markdown output")?;
} }
trace!("markdown render"); trace!("markdown render");
@ -45,7 +45,7 @@ impl Renderer for MarkdownRenderer {
} }
fs::create_dir_all(&destination) fs::create_dir_all(&destination)
.chain_err(|| "Unexpected error when constructing destination path")?; .with_context(|| "Unexpected error when constructing destination path")?;
Ok(()) Ok(())
} }

View File

@ -94,7 +94,7 @@ impl RenderContext {
/// Load a `RenderContext` from its JSON representation. /// Load a `RenderContext` from its JSON representation.
pub fn from_json<R: Read>(reader: R) -> Result<RenderContext> { pub fn from_json<R: Read>(reader: R) -> Result<RenderContext> {
serde_json::from_reader(reader).chain_err(|| "Unable to deserialize the `RenderContext`") serde_json::from_reader(reader).with_context(|| "Unable to deserialize the `RenderContext`")
} }
} }
@ -178,7 +178,7 @@ impl CmdRenderer {
); );
} }
} }
Err(error).chain_err(|| "Unable to start the backend")? Err(error).with_context(|| "Unable to start the backend")?
} }
} }
@ -216,7 +216,7 @@ impl Renderer for CmdRenderer {
let status = child let status = child
.wait() .wait()
.chain_err(|| "Error waiting for the backend to complete")?; .with_context(|| "Error waiting for the backend to complete")?;
trace!("{} exited with output: {:?}", self.cmd, status); trace!("{} exited with output: {:?}", self.cmd, status);

View File

@ -2,10 +2,11 @@
pub mod fs; pub mod fs;
mod string; mod string;
pub(crate) mod toml_ext;
use crate::errors::Error; use crate::errors::Error;
use regex::Regex; use regex::Regex;
use pulldown_cmark::{html, CowStr, Event, Options, Parser, Tag}; use pulldown_cmark::{html, CodeBlockKind, CowStr, Event, Options, Parser, Tag};
use std::borrow::Cow; use std::borrow::Cow;
use std::fmt::Write; use std::fmt::Write;
@ -226,10 +227,10 @@ impl EventQuoteConverter {
fn clean_codeblock_headers(event: Event<'_>) -> Event<'_> { fn clean_codeblock_headers(event: Event<'_>) -> Event<'_> {
match event { match event {
Event::Start(Tag::CodeBlock(ref info)) => { Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(ref info))) => {
let info: String = info.chars().filter(|ch| !ch.is_whitespace()).collect(); let info: String = info.chars().filter(|ch| !ch.is_whitespace()).collect();
Event::Start(Tag::CodeBlock(CowStr::from(info))) Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(CowStr::from(info))))
} }
_ => event, _ => event,
} }
@ -271,7 +272,7 @@ fn convert_quotes_to_curly(original_text: &str) -> String {
pub fn log_backtrace(e: &Error) { pub fn log_backtrace(e: &Error) {
error!("Error: {}", e); error!("Error: {}", e);
for cause in e.iter().skip(1) { for cause in e.chain().skip(1) {
error!("\tCaused By: {}", cause); error!("\tCaused By: {}", cause);
} }
} }

View File

@ -1,4 +1,3 @@
use itertools::Itertools;
use regex::Regex; use regex::Regex;
use std::ops::Bound::{Excluded, Included, Unbounded}; use std::ops::Bound::{Excluded, Included, Unbounded};
use std::ops::RangeBounds; use std::ops::RangeBounds;
@ -10,11 +9,17 @@ pub fn take_lines<R: RangeBounds<usize>>(s: &str, range: R) -> String {
Included(&n) => n, Included(&n) => n,
Unbounded => 0, Unbounded => 0,
}; };
let mut lines = s.lines().skip(start); let lines = s.lines().skip(start);
match range.end_bound() { match range.end_bound() {
Excluded(end) => lines.take(end.saturating_sub(start)).join("\n"), Excluded(end) => lines
Included(end) => lines.take((end + 1).saturating_sub(start)).join("\n"), .take(end.saturating_sub(start))
Unbounded => lines.join("\n"), .collect::<Vec<_>>()
.join("\n"),
Included(end) => lines
.take((end + 1).saturating_sub(start))
.collect::<Vec<_>>()
.join("\n"),
Unbounded => lines.collect::<Vec<_>>().join("\n"),
} }
} }

130
src/utils/toml_ext.rs Normal file
View File

@ -0,0 +1,130 @@
use toml::value::{Table, Value};
pub(crate) trait TomlExt {
fn read(&self, key: &str) -> Option<&Value>;
fn read_mut(&mut self, key: &str) -> Option<&mut Value>;
fn insert(&mut self, key: &str, value: Value);
fn delete(&mut self, key: &str) -> Option<Value>;
}
impl TomlExt for Value {
fn read(&self, key: &str) -> Option<&Value> {
if let Some((head, tail)) = split(key) {
self.get(head)?.read(tail)
} else {
self.get(key)
}
}
fn read_mut(&mut self, key: &str) -> Option<&mut Value> {
if let Some((head, tail)) = split(key) {
self.get_mut(head)?.read_mut(tail)
} else {
self.get_mut(key)
}
}
fn insert(&mut self, key: &str, value: Value) {
if !self.is_table() {
*self = Value::Table(Table::new());
}
let table = self.as_table_mut().expect("unreachable");
if let Some((head, tail)) = split(key) {
table
.entry(head)
.or_insert_with(|| Value::Table(Table::new()))
.insert(tail, value);
} else {
table.insert(key.to_string(), value);
}
}
fn delete(&mut self, key: &str) -> Option<Value> {
if let Some((head, tail)) = split(key) {
self.get_mut(head)?.delete(tail)
} else if let Some(table) = self.as_table_mut() {
table.remove(key)
} else {
None
}
}
}
fn split(key: &str) -> Option<(&str, &str)> {
let ix = key.find(".")?;
let (head, tail) = key.split_at(ix);
// splitting will leave the "."
let tail = &tail[1..];
Some((head, tail))
}
#[cfg(test)]
mod tests {
use super::*;
use std::str::FromStr;
#[test]
fn read_simple_table() {
let src = "[table]";
let value = Value::from_str(src).unwrap();
let got = value.read("table").unwrap();
assert!(got.is_table());
}
#[test]
fn read_nested_item() {
let src = "[table]\nnested=true";
let value = Value::from_str(src).unwrap();
let got = value.read("table.nested").unwrap();
assert_eq!(got, &Value::Boolean(true));
}
#[test]
fn insert_item_at_top_level() {
let mut value = Value::Table(Table::default());
let item = Value::Boolean(true);
value.insert("first", item.clone());
assert_eq!(value.get("first").unwrap(), &item);
}
#[test]
fn insert_nested_item() {
let mut value = Value::Table(Table::default());
let item = Value::Boolean(true);
value.insert("first.second", item.clone());
let inserted = value.read("first.second").unwrap();
assert_eq!(inserted, &item);
}
#[test]
fn delete_a_top_level_item() {
let src = "top = true";
let mut value = Value::from_str(src).unwrap();
let got = value.delete("top").unwrap();
assert_eq!(got, Value::Boolean(true));
}
#[test]
fn delete_a_nested_item() {
let src = "[table]\n nested = true";
let mut value = Value::from_str(src).unwrap();
let got = value.delete("table.nested").unwrap();
assert_eq!(got, Value::Boolean(true));
}
}

View File

@ -4,12 +4,12 @@
// Not all features are used in all test crates, so... // Not all features are used in all test crates, so...
#![allow(dead_code, unused_variables, unused_imports, unused_extern_crates)] #![allow(dead_code, unused_variables, unused_imports, unused_extern_crates)]
use anyhow::Context;
use mdbook::errors::*; use mdbook::errors::*;
use mdbook::MDBook;
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::path::Path; use std::path::Path;
use mdbook::MDBook;
use tempfile::{Builder as TempFileBuilder, TempDir}; use tempfile::{Builder as TempFileBuilder, TempDir};
use walkdir::WalkDir; use walkdir::WalkDir;
@ -43,10 +43,10 @@ impl DummyBook {
let temp = TempFileBuilder::new() let temp = TempFileBuilder::new()
.prefix("dummy_book-") .prefix("dummy_book-")
.tempdir() .tempdir()
.chain_err(|| "Unable to create temp directory")?; .with_context(|| "Unable to create temp directory")?;
let dummy_book_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/dummy_book"); let dummy_book_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/dummy_book");
recursive_copy(&dummy_book_root, temp.path()).chain_err(|| { recursive_copy(&dummy_book_root, temp.path()).with_context(|| {
"Couldn't copy files into a \ "Couldn't copy files into a \
temporary directory" temporary directory"
})?; })?;
@ -113,7 +113,7 @@ fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()>
let to = to.as_ref(); let to = to.as_ref();
for entry in WalkDir::new(&from) { for entry in WalkDir::new(&from) {
let entry = entry.chain_err(|| "Unable to inspect directory entry")?; let entry = entry.with_context(|| "Unable to inspect directory entry")?;
let original_location = entry.path(); let original_location = entry.path();
let relative = original_location let relative = original_location
@ -123,11 +123,11 @@ fn recursive_copy<A: AsRef<Path>, B: AsRef<Path>>(from: A, to: B) -> Result<()>
if original_location.is_file() { if original_location.is_file() {
if let Some(parent) = new_location.parent() { if let Some(parent) = new_location.parent() {
fs::create_dir_all(parent).chain_err(|| "Couldn't create directory")?; fs::create_dir_all(parent).with_context(|| "Couldn't create directory")?;
} }
fs::copy(&original_location, &new_location) fs::copy(&original_location, &new_location)
.chain_err(|| "Unable to copy file contents")?; .with_context(|| "Unable to copy file contents")?;
} }
} }

View File

@ -28,11 +28,9 @@ macro_rules! summary_md_test {
.unwrap(); .unwrap();
if let Err(e) = book::parse_summary(&content) { if let Err(e) = book::parse_summary(&content) {
use error_chain::ChainedError;
eprintln!("Error parsing {}", filename.display()); eprintln!("Error parsing {}", filename.display());
eprintln!(); eprintln!();
eprintln!("{}", e.display_chain()); eprintln!("{:?}", e);
panic!(); panic!();
} }
} }

View File

@ -5,6 +5,7 @@ mod dummy_book;
use crate::dummy_book::{assert_contains_strings, assert_doesnt_contain_strings, DummyBook}; use crate::dummy_book::{assert_contains_strings, assert_doesnt_contain_strings, DummyBook};
use anyhow::Context;
use mdbook::config::Config; use mdbook::config::Config;
use mdbook::errors::*; use mdbook::errors::*;
use mdbook::utils::fs::write_file; use mdbook::utils::fs::write_file;
@ -247,13 +248,13 @@ fn entry_ends_with(entry: &DirEntry, ending: &str) -> bool {
fn root_index_html() -> Result<Document> { fn root_index_html() -> Result<Document> {
let temp = DummyBook::new() let temp = DummyBook::new()
.build() .build()
.chain_err(|| "Couldn't create the dummy book")?; .with_context(|| "Couldn't create the dummy book")?;
MDBook::load(temp.path())? MDBook::load(temp.path())?
.build() .build()
.chain_err(|| "Book building failed")?; .with_context(|| "Book building failed")?;
let index_page = temp.path().join("book").join("index.html"); let index_page = temp.path().join("book").join("index.html");
let html = fs::read_to_string(&index_page).chain_err(|| "Unable to read index.html")?; let html = fs::read_to_string(&index_page).with_context(|| "Unable to read index.html")?;
Ok(Document::from(html.as_str())) Ok(Document::from(html.as_str()))
} }

View File

@ -6350,6 +6350,7 @@
} }
} }
}, },
"lang": "English",
"pipeline": [ "pipeline": [
"trimmer", "trimmer",
"stopWordFilter", "stopWordFilter",