parent
762d89ebbf
commit
1d69ccae48
|
@ -245,7 +245,8 @@ fn load_chapter<P: AsRef<Path>>(
|
|||
ch.number = link.number.clone();
|
||||
|
||||
sub_item_parents.push(link.name.clone());
|
||||
let sub_items = link.nested_items
|
||||
let sub_items = link
|
||||
.nested_items
|
||||
.iter()
|
||||
.map(|i| load_summary_item(i, src_dir, sub_item_parents.clone()))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
@ -475,7 +476,8 @@ And here is some \
|
|||
assert_eq!(got.len(), 5);
|
||||
|
||||
// checking the chapter names are in the order should be sufficient here...
|
||||
let chapter_names: Vec<String> = got.into_iter()
|
||||
let chapter_names: Vec<String> = got
|
||||
.into_iter()
|
||||
.filter_map(|i| match *i {
|
||||
BookItem::Chapter(ref ch) => Some(ch.name.clone()),
|
||||
_ => None,
|
||||
|
|
|
@ -110,7 +110,8 @@ impl BookBuilder {
|
|||
fn copy_across_theme(&self) -> Result<()> {
|
||||
debug!("Copying theme");
|
||||
|
||||
let themedir = self.config
|
||||
let themedir = self
|
||||
.config
|
||||
.html_config()
|
||||
.and_then(|html| html.theme)
|
||||
.unwrap_or_else(|| self.config.book.src.join("theme"));
|
||||
|
|
|
@ -221,11 +221,14 @@ impl<'a> SummaryParser<'a> {
|
|||
fn parse(mut self) -> Result<Summary> {
|
||||
let title = self.parse_title();
|
||||
|
||||
let prefix_chapters = self.parse_affix(true)
|
||||
let prefix_chapters = self
|
||||
.parse_affix(true)
|
||||
.chain_err(|| "There was an error parsing the prefix chapters")?;
|
||||
let numbered_chapters = self.parse_numbered()
|
||||
let numbered_chapters = self
|
||||
.parse_numbered()
|
||||
.chain_err(|| "There was an error parsing the numbered chapters")?;
|
||||
let suffix_chapters = self.parse_affix(false)
|
||||
let suffix_chapters = self
|
||||
.parse_affix(false)
|
||||
.chain_err(|| "There was an error parsing the suffix chapters")?;
|
||||
|
||||
Ok(Summary {
|
||||
|
|
|
@ -28,7 +28,8 @@ pub fn make_subcommand<'a, 'b>() -> App<'a, 'b> {
|
|||
|
||||
// test command implementation
|
||||
pub fn execute(args: &ArgMatches) -> Result<()> {
|
||||
let library_paths: Vec<&str> = args.values_of("library-path")
|
||||
let library_paths: Vec<&str> = args
|
||||
.values_of("library-path")
|
||||
.map(|v| v.collect())
|
||||
.unwrap_or_default();
|
||||
let book_dir = get_book_dir(args);
|
||||
|
|
|
@ -534,11 +534,7 @@ trait Updateable<'de>: Serialize + Deserialize<'de> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'de, T> Updateable<'de> for T
|
||||
where
|
||||
T: Serialize + Deserialize<'de>,
|
||||
{
|
||||
}
|
||||
impl<'de, T> Updateable<'de> for T where T: Serialize + Deserialize<'de> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
|
|
@ -37,7 +37,8 @@ impl HtmlHandlebars {
|
|||
print_content.push_str(&content);
|
||||
|
||||
// Update the context with data for this file
|
||||
let path = ch.path
|
||||
let path = ch
|
||||
.path
|
||||
.to_str()
|
||||
.chain_err(|| "Could not convert path to str")?;
|
||||
let filepath = Path::new(&ch.path).with_extension("html");
|
||||
|
@ -50,7 +51,8 @@ impl HtmlHandlebars {
|
|||
// Non-lexical lifetimes needed :'(
|
||||
let title: String;
|
||||
{
|
||||
let book_title = ctx.data
|
||||
let book_title = ctx
|
||||
.data
|
||||
.get("book_title")
|
||||
.and_then(serde_json::Value::as_str)
|
||||
.unwrap_or("");
|
||||
|
@ -465,7 +467,8 @@ fn make_data(
|
|||
}
|
||||
|
||||
chapter.insert("name".to_owned(), json!(ch.name));
|
||||
let path = ch.path
|
||||
let path = ch
|
||||
.path
|
||||
.to_str()
|
||||
.chain_err(|| "Could not convert path to str")?;
|
||||
chapter.insert("path".to_owned(), json!(path));
|
||||
|
|
|
@ -53,7 +53,8 @@ fn find_chapter(rc: &mut RenderContext, target: Target) -> Result<Option<StringM
|
|||
.map_err(|_| RenderError::new("Could not decode the JSON data"))
|
||||
})?;
|
||||
|
||||
let base_path = rc.evaluate_absolute("path", true)?
|
||||
let base_path = rc
|
||||
.evaluate_absolute("path", true)?
|
||||
.as_str()
|
||||
.ok_or_else(|| RenderError::new("Type error for `path`, string expected"))?
|
||||
.replace("\"", "");
|
||||
|
@ -89,7 +90,8 @@ fn render(
|
|||
trace!("Creating BTreeMap to inject in context");
|
||||
|
||||
let mut context = BTreeMap::new();
|
||||
let base_path = rc.evaluate_absolute("path", false)?
|
||||
let base_path = rc
|
||||
.evaluate_absolute("path", false)?
|
||||
.as_str()
|
||||
.ok_or_else(|| RenderError::new("Type error for `path`, string expected"))?
|
||||
.replace("\"", "");
|
||||
|
|
|
@ -22,7 +22,8 @@ impl HelperDef for RenderToc {
|
|||
serde_json::value::from_value::<Vec<BTreeMap<String, String>>>(c.clone())
|
||||
.map_err(|_| RenderError::new("Could not decode the JSON data"))
|
||||
})?;
|
||||
let current = rc.evaluate_absolute("path", true)?
|
||||
let current = rc
|
||||
.evaluate_absolute("path", true)?
|
||||
.as_str()
|
||||
.ok_or_else(|| RenderError::new("Type error for `path`, string expected"))?
|
||||
.replace("\"", "");
|
||||
|
|
|
@ -157,7 +157,8 @@ impl Renderer for CmdRenderer {
|
|||
|
||||
let _ = fs::create_dir_all(&ctx.destination);
|
||||
|
||||
let mut child = match self.compose_command()?
|
||||
let mut child = match self
|
||||
.compose_command()?
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::inherit())
|
||||
.stderr(Stdio::inherit())
|
||||
|
|
|
@ -36,7 +36,8 @@ pub fn normalize_id(content: &str) -> String {
|
|||
})
|
||||
.collect::<String>();
|
||||
// Ensure that the first character is [A-Za-z]
|
||||
if ret.chars()
|
||||
if ret
|
||||
.chars()
|
||||
.next()
|
||||
.map_or(false, |c| !c.is_ascii_alphabetic())
|
||||
{
|
||||
|
@ -110,7 +111,8 @@ pub fn render_markdown(text: &str, curly_quotes: bool) -> String {
|
|||
|
||||
let p = Parser::new_ext(text, opts);
|
||||
let mut converter = EventQuoteConverter::new(curly_quotes);
|
||||
let events = p.map(clean_codeblock_headers)
|
||||
let events = p
|
||||
.map(clean_codeblock_headers)
|
||||
.map(adjust_links)
|
||||
.map(|event| converter.convert(event));
|
||||
|
||||
|
|
|
@ -174,7 +174,8 @@ fn chapter_files_were_rendered_to_html() {
|
|||
.filter(|path| path.file_name().and_then(OsStr::to_str) != Some("SUMMARY.md"));
|
||||
|
||||
for chapter in chapter_files {
|
||||
let rendered_location = temp.path()
|
||||
let rendered_location = temp
|
||||
.path()
|
||||
.join(chapter.strip_prefix(&src).unwrap())
|
||||
.with_extension("html");
|
||||
assert!(
|
||||
|
@ -213,7 +214,8 @@ fn check_second_toc_level() {
|
|||
|
||||
let pred = descendants!(Class("chapter"), Name("li"), Name("li"), Name("a"));
|
||||
|
||||
let mut children_of_children: Vec<_> = doc.find(pred)
|
||||
let mut children_of_children: Vec<_> = doc
|
||||
.find(pred)
|
||||
.map(|elem| elem.text().trim().to_string())
|
||||
.collect();
|
||||
children_of_children.sort();
|
||||
|
@ -231,7 +233,8 @@ fn check_first_toc_level() {
|
|||
|
||||
let pred = descendants!(Class("chapter"), Name("li"), Name("a"));
|
||||
|
||||
let mut children: Vec<_> = doc.find(pred)
|
||||
let mut children: Vec<_> = doc
|
||||
.find(pred)
|
||||
.map(|elem| elem.text().trim().to_string())
|
||||
.collect();
|
||||
children.sort();
|
||||
|
@ -244,7 +247,8 @@ fn check_spacers() {
|
|||
let doc = root_index_html().unwrap();
|
||||
let should_be = 1;
|
||||
|
||||
let num_spacers = doc.find(Class("chapter").descendant(Name("li").and(Class("spacer"))))
|
||||
let num_spacers = doc
|
||||
.find(Class("chapter").descendant(Name("li").and(Class("spacer"))))
|
||||
.count();
|
||||
assert_eq!(num_spacers, should_be);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue