Merge branch 'master' into add_absolute_links_support
This commit is contained in:
commit
bcd63f97c5
|
@ -116,7 +116,7 @@ If possible, do your best to avoid breaking older browser releases.
|
||||||
Any change to the HTML or styling is encouraged to manually check on as many browsers and platforms that you can.
|
Any change to the HTML or styling is encouraged to manually check on as many browsers and platforms that you can.
|
||||||
Unfortunately at this time we don't have any automated UI or browser testing, so your assistance in testing is appreciated.
|
Unfortunately at this time we don't have any automated UI or browser testing, so your assistance in testing is appreciated.
|
||||||
|
|
||||||
## Updating higlight.js
|
## Updating highlight.js
|
||||||
|
|
||||||
The following are instructions for updating [highlight.js](https://highlightjs.org/).
|
The following are instructions for updating [highlight.js](https://highlightjs.org/).
|
||||||
|
|
||||||
|
|
|
@ -809,10 +809,10 @@ dependencies = [
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"gitignore",
|
"gitignore",
|
||||||
"handlebars",
|
"handlebars",
|
||||||
"lazy_static",
|
|
||||||
"log",
|
"log",
|
||||||
"memchr",
|
"memchr",
|
||||||
"notify",
|
"notify",
|
||||||
|
"once_cell",
|
||||||
"opener",
|
"opener",
|
||||||
"predicates",
|
"predicates",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
|
@ -997,6 +997,12 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "once_cell"
|
||||||
|
version = "1.15.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "opaque-debug"
|
name = "opaque-debug"
|
||||||
version = "0.2.3"
|
version = "0.2.3"
|
||||||
|
|
|
@ -20,9 +20,9 @@ anyhow = "1.0.28"
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
clap = { version = "3.0", features = ["cargo"] }
|
clap = { version = "3.0", features = ["cargo"] }
|
||||||
clap_complete = "3.0"
|
clap_complete = "3.0"
|
||||||
|
once_cell = "1"
|
||||||
env_logger = "0.9.0"
|
env_logger = "0.9.0"
|
||||||
handlebars = "4.0"
|
handlebars = "4.0"
|
||||||
lazy_static = "1.0"
|
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
memchr = "2.0"
|
memchr = "2.0"
|
||||||
opener = "0.5"
|
opener = "0.5"
|
||||||
|
@ -65,3 +65,7 @@ search = ["elasticlunr-rs", "ammonia"]
|
||||||
[[bin]]
|
[[bin]]
|
||||||
doc = false
|
doc = false
|
||||||
name = "mdbook"
|
name = "mdbook"
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "nop-preprocessor"
|
||||||
|
test = true
|
||||||
|
|
|
@ -101,4 +101,58 @@ mod nop_lib {
|
||||||
renderer != "not-supported"
|
renderer != "not-supported"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn nop_preprocessor_run() {
|
||||||
|
let input_json = r##"[
|
||||||
|
{
|
||||||
|
"root": "/path/to/book",
|
||||||
|
"config": {
|
||||||
|
"book": {
|
||||||
|
"authors": ["AUTHOR"],
|
||||||
|
"language": "en",
|
||||||
|
"multilingual": false,
|
||||||
|
"src": "src",
|
||||||
|
"title": "TITLE"
|
||||||
|
},
|
||||||
|
"preprocessor": {
|
||||||
|
"nop": {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"renderer": "html",
|
||||||
|
"mdbook_version": "0.4.21"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sections": [
|
||||||
|
{
|
||||||
|
"Chapter": {
|
||||||
|
"name": "Chapter 1",
|
||||||
|
"content": "# Chapter 1\n",
|
||||||
|
"number": [1],
|
||||||
|
"sub_items": [],
|
||||||
|
"path": "chapter_1.md",
|
||||||
|
"source_path": "chapter_1.md",
|
||||||
|
"parent_names": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"__non_exhaustive": null
|
||||||
|
}
|
||||||
|
]"##;
|
||||||
|
let input_json = input_json.as_bytes();
|
||||||
|
|
||||||
|
let (ctx, book) = mdbook::preprocess::CmdPreprocessor::parse_input(input_json).unwrap();
|
||||||
|
let expected_book = book.clone();
|
||||||
|
let result = Nop::new().run(&ctx, book);
|
||||||
|
assert!(result.is_ok());
|
||||||
|
|
||||||
|
// The nop-preprocessor should not have made any changes to the book content.
|
||||||
|
let actual_book = result.unwrap();
|
||||||
|
assert_eq!(actual_book, expected_book);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -454,7 +454,7 @@ impl<'a> SummaryParser<'a> {
|
||||||
items.push(item);
|
items.push(item);
|
||||||
}
|
}
|
||||||
Some(Event::Start(Tag::List(..))) => {
|
Some(Event::Start(Tag::List(..))) => {
|
||||||
// Skip this tag after comment bacause it is not nested.
|
// Skip this tag after comment because it is not nested.
|
||||||
if items.is_empty() {
|
if items.is_empty() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,8 +89,7 @@ pub fn execute(args: &ArgMatches) -> Result<()> {
|
||||||
let input_404 = book
|
let input_404 = book
|
||||||
.config
|
.config
|
||||||
.get("output.html.input-404")
|
.get("output.html.input-404")
|
||||||
.map(toml::Value::as_str)
|
.and_then(toml::Value::as_str)
|
||||||
.and_then(std::convert::identity) // flatten
|
|
||||||
.map(ToString::to_string);
|
.map(ToString::to_string);
|
||||||
let file_404 = get_404_output_file(&input_404);
|
let file_404 = get_404_output_file(&input_404);
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,8 @@ use std::path::Path;
|
||||||
use super::{Preprocessor, PreprocessorContext};
|
use super::{Preprocessor, PreprocessorContext};
|
||||||
use crate::book::{Book, BookItem};
|
use crate::book::{Book, BookItem};
|
||||||
use crate::errors::*;
|
use crate::errors::*;
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use log::warn;
|
use log::warn;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
/// A preprocessor for converting file name `README.md` to `index.md` since
|
/// A preprocessor for converting file name `README.md` to `index.md` since
|
||||||
/// `README.md` is the de facto index file in markdown-based documentation.
|
/// `README.md` is the de facto index file in markdown-based documentation.
|
||||||
|
@ -68,9 +68,8 @@ fn warn_readme_name_conflict<P: AsRef<Path>>(readme_path: P, index_path: P) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_readme_file<P: AsRef<Path>>(path: P) -> bool {
|
fn is_readme_file<P: AsRef<Path>>(path: P) -> bool {
|
||||||
lazy_static! {
|
static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)^readme$").unwrap());
|
||||||
static ref RE: Regex = Regex::new(r"(?i)^readme$").unwrap();
|
|
||||||
}
|
|
||||||
RE.is_match(
|
RE.is_match(
|
||||||
path.as_ref()
|
path.as_ref()
|
||||||
.file_stem()
|
.file_stem()
|
||||||
|
|
|
@ -10,8 +10,8 @@ use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use super::{Preprocessor, PreprocessorContext};
|
use super::{Preprocessor, PreprocessorContext};
|
||||||
use crate::book::{Book, BookItem};
|
use crate::book::{Book, BookItem};
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use log::{error, warn};
|
use log::{error, warn};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
const ESCAPE_CHAR: char = '\\';
|
const ESCAPE_CHAR: char = '\\';
|
||||||
const MAX_LINK_NESTED_DEPTH: usize = 10;
|
const MAX_LINK_NESTED_DEPTH: usize = 10;
|
||||||
|
@ -410,19 +410,20 @@ impl<'a> Iterator for LinkIter<'a> {
|
||||||
fn find_links(contents: &str) -> LinkIter<'_> {
|
fn find_links(contents: &str) -> LinkIter<'_> {
|
||||||
// lazily compute following regex
|
// lazily compute following regex
|
||||||
// r"\\\{\{#.*\}\}|\{\{#([a-zA-Z0-9]+)\s*([^}]+)\}\}")?;
|
// r"\\\{\{#.*\}\}|\{\{#([a-zA-Z0-9]+)\s*([^}]+)\}\}")?;
|
||||||
lazy_static! {
|
static RE: Lazy<Regex> = Lazy::new(|| {
|
||||||
static ref RE: Regex = Regex::new(
|
Regex::new(
|
||||||
r"(?x) # insignificant whitespace mode
|
r"(?x) # insignificant whitespace mode
|
||||||
\\\{\{\#.*\}\} # match escaped link
|
\\\{\{\#.*\}\} # match escaped link
|
||||||
| # or
|
| # or
|
||||||
\{\{\s* # link opening parens and whitespace
|
\{\{\s* # link opening parens and whitespace
|
||||||
\#([a-zA-Z0-9_]+) # link type
|
\#([a-zA-Z0-9_]+) # link type
|
||||||
\s+ # separating whitespace
|
\s+ # separating whitespace
|
||||||
([^}]+) # link target path and space separated properties
|
([^}]+) # link target path and space separated properties
|
||||||
\}\} # link closing parens"
|
\}\} # link closing parens",
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap()
|
||||||
}
|
});
|
||||||
|
|
||||||
LinkIter(RE.captures_iter(contents))
|
LinkIter(RE.captures_iter(contents))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@ use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use crate::utils::fs::get_404_output_file;
|
use crate::utils::fs::get_404_output_file;
|
||||||
use handlebars::Handlebars;
|
use handlebars::Handlebars;
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use log::{debug, trace, warn};
|
use log::{debug, trace, warn};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
|
@ -780,9 +780,8 @@ fn make_data(
|
||||||
/// Goes through the rendered HTML, making sure all header tags have
|
/// Goes through the rendered HTML, making sure all header tags have
|
||||||
/// an anchor respectively so people can link to sections directly.
|
/// an anchor respectively so people can link to sections directly.
|
||||||
fn build_header_links(html: &str) -> String {
|
fn build_header_links(html: &str) -> String {
|
||||||
lazy_static! {
|
static BUILD_HEADER_LINKS: Lazy<Regex> =
|
||||||
static ref BUILD_HEADER_LINKS: Regex = Regex::new(r"<h(\d)>(.*?)</h\d>").unwrap();
|
Lazy::new(|| Regex::new(r"<h(\d)>(.*?)</h\d>").unwrap());
|
||||||
}
|
|
||||||
|
|
||||||
let mut id_counter = HashMap::new();
|
let mut id_counter = HashMap::new();
|
||||||
|
|
||||||
|
@ -823,10 +822,8 @@ fn insert_link_into_header(
|
||||||
// ```
|
// ```
|
||||||
// This function replaces all commas by spaces in the code block classes
|
// This function replaces all commas by spaces in the code block classes
|
||||||
fn fix_code_blocks(html: &str) -> String {
|
fn fix_code_blocks(html: &str) -> String {
|
||||||
lazy_static! {
|
static FIX_CODE_BLOCKS: Lazy<Regex> =
|
||||||
static ref FIX_CODE_BLOCKS: Regex =
|
Lazy::new(|| Regex::new(r##"<code([^>]+)class="([^"]+)"([^>]*)>"##).unwrap());
|
||||||
Regex::new(r##"<code([^>]+)class="([^"]+)"([^>]*)>"##).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
FIX_CODE_BLOCKS
|
FIX_CODE_BLOCKS
|
||||||
.replace_all(html, |caps: &Captures<'_>| {
|
.replace_all(html, |caps: &Captures<'_>| {
|
||||||
|
@ -849,10 +846,9 @@ fn add_playground_pre(
|
||||||
playground_config: &Playground,
|
playground_config: &Playground,
|
||||||
edition: Option<RustEdition>,
|
edition: Option<RustEdition>,
|
||||||
) -> String {
|
) -> String {
|
||||||
lazy_static! {
|
static ADD_PLAYGROUND_PRE: Lazy<Regex> =
|
||||||
static ref ADD_PLAYGROUND_PRE: Regex =
|
Lazy::new(|| Regex::new(r##"((?s)<code[^>]?class="([^"]+)".*?>(.*?)</code>)"##).unwrap());
|
||||||
Regex::new(r##"((?s)<code[^>]?class="([^"]+)".*?>(.*?)</code>)"##).unwrap();
|
|
||||||
}
|
|
||||||
ADD_PLAYGROUND_PRE
|
ADD_PLAYGROUND_PRE
|
||||||
.replace_all(html, |caps: &Captures<'_>| {
|
.replace_all(html, |caps: &Captures<'_>| {
|
||||||
let text = &caps[1];
|
let text = &caps[1];
|
||||||
|
@ -915,9 +911,7 @@ fn add_playground_pre(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hide_lines(content: &str) -> String {
|
fn hide_lines(content: &str) -> String {
|
||||||
lazy_static! {
|
static BORING_LINES_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^(\s*)#(.?)(.*)$").unwrap());
|
||||||
static ref BORING_LINES_REGEX: Regex = Regex::new(r"^(\s*)#(.?)(.*)$").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut result = String::with_capacity(content.len());
|
let mut result = String::with_capacity(content.len());
|
||||||
let mut lines = content.lines().peekable();
|
let mut lines = content.lines().peekable();
|
||||||
|
|
|
@ -148,15 +148,12 @@ fn render(
|
||||||
|
|
||||||
trace!("Render template");
|
trace!("Render template");
|
||||||
|
|
||||||
_h.template()
|
let t = _h
|
||||||
.ok_or_else(|| RenderError::new("Error with the handlebars template"))
|
.template()
|
||||||
.and_then(|t| {
|
.ok_or_else(|| RenderError::new("Error with the handlebars template"))?;
|
||||||
let local_ctx = Context::wraps(&context)?;
|
let local_ctx = Context::wraps(&context)?;
|
||||||
let mut local_rc = rc.clone();
|
let mut local_rc = rc.clone();
|
||||||
t.render(r, &local_ctx, &mut local_rc, out)
|
t.render(r, &local_ctx, &mut local_rc, out)
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn previous(
|
pub fn previous(
|
||||||
|
|
|
@ -117,35 +117,35 @@ impl HelperDef for RenderToc {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Link
|
// Link
|
||||||
let path_exists = if let Some(path) =
|
let path_exists: bool;
|
||||||
item.get("path")
|
match item.get("path") {
|
||||||
.and_then(|p| if p.is_empty() { None } else { Some(p) })
|
Some(path) if !path.is_empty() => {
|
||||||
{
|
out.write("<a href=\"")?;
|
||||||
out.write("<a href=\"")?;
|
let tmp = Path::new(path)
|
||||||
|
.with_extension("html")
|
||||||
|
.to_str()
|
||||||
|
.unwrap()
|
||||||
|
// Hack for windows who tends to use `\` as separator instead of `/`
|
||||||
|
.replace('\\', "/");
|
||||||
|
|
||||||
let tmp = Path::new(item.get("path").expect("Error: path should be Some(_)"))
|
// Add link
|
||||||
.with_extension("html")
|
out.write(&utils::fs::path_to_root(¤t_path))?;
|
||||||
.to_str()
|
out.write(&tmp)?;
|
||||||
.unwrap()
|
out.write("\"")?;
|
||||||
// Hack for windows who tends to use `\` as separator instead of `/`
|
|
||||||
.replace('\\', "/");
|
|
||||||
|
|
||||||
// Add link
|
if path == ¤t_path || is_first_chapter {
|
||||||
out.write(&utils::fs::path_to_root(¤t_path))?;
|
is_first_chapter = false;
|
||||||
out.write(&tmp)?;
|
out.write(" class=\"active\"")?;
|
||||||
out.write("\"")?;
|
}
|
||||||
|
|
||||||
if path == ¤t_path || is_first_chapter {
|
out.write(">")?;
|
||||||
is_first_chapter = false;
|
path_exists = true;
|
||||||
out.write(" class=\"active\"")?;
|
|
||||||
}
|
}
|
||||||
|
_ => {
|
||||||
out.write(">")?;
|
out.write("<div>")?;
|
||||||
true
|
path_exists = false;
|
||||||
} else {
|
}
|
||||||
out.write("<div>")?;
|
}
|
||||||
false
|
|
||||||
};
|
|
||||||
|
|
||||||
if !self.no_section_label {
|
if !self.no_section_label {
|
||||||
// Section does not necessarily exist
|
// Section does not necessarily exist
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::collections::{HashMap, HashSet};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use elasticlunr::{Index, IndexBuilder};
|
use elasticlunr::{Index, IndexBuilder};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
use pulldown_cmark::*;
|
use pulldown_cmark::*;
|
||||||
|
|
||||||
use crate::book::{Book, BookItem};
|
use crate::book::{Book, BookItem};
|
||||||
|
@ -10,7 +11,6 @@ use crate::config::Search;
|
||||||
use crate::errors::*;
|
use crate::errors::*;
|
||||||
use crate::theme::searcher;
|
use crate::theme::searcher;
|
||||||
use crate::utils;
|
use crate::utils;
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use log::{debug, warn};
|
use log::{debug, warn};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
|
@ -267,21 +267,19 @@ fn write_to_json(index: Index, search_config: &Search, doc_urls: Vec<String>) ->
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clean_html(html: &str) -> String {
|
fn clean_html(html: &str) -> String {
|
||||||
lazy_static! {
|
static AMMONIA: Lazy<ammonia::Builder<'static>> = Lazy::new(|| {
|
||||||
static ref AMMONIA: ammonia::Builder<'static> = {
|
let mut clean_content = HashSet::new();
|
||||||
let mut clean_content = HashSet::new();
|
clean_content.insert("script");
|
||||||
clean_content.insert("script");
|
clean_content.insert("style");
|
||||||
clean_content.insert("style");
|
let mut builder = ammonia::Builder::new();
|
||||||
let mut builder = ammonia::Builder::new();
|
builder
|
||||||
builder
|
.tags(HashSet::new())
|
||||||
.tags(HashSet::new())
|
.tag_attributes(HashMap::new())
|
||||||
.tag_attributes(HashMap::new())
|
.generic_attributes(HashSet::new())
|
||||||
.generic_attributes(HashSet::new())
|
.link_rel(None)
|
||||||
.link_rel(None)
|
.allowed_classes(HashMap::new())
|
||||||
.allowed_classes(HashMap::new())
|
.clean_content_tags(clean_content);
|
||||||
.clean_content_tags(clean_content);
|
builder
|
||||||
builder
|
});
|
||||||
};
|
|
||||||
}
|
|
||||||
AMMONIA.clean(html).to_string()
|
AMMONIA.clean(html).to_string()
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,8 +4,8 @@ pub mod fs;
|
||||||
mod string;
|
mod string;
|
||||||
pub(crate) mod toml_ext;
|
pub(crate) mod toml_ext;
|
||||||
use crate::errors::Error;
|
use crate::errors::Error;
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use log::error;
|
use log::error;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
use pulldown_cmark::{html, CodeBlockKind, CowStr, Event, Options, Parser, Tag};
|
use pulldown_cmark::{html, CodeBlockKind, CowStr, Event, Options, Parser, Tag};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
@ -21,9 +21,7 @@ pub use self::string::{
|
||||||
|
|
||||||
/// Replaces multiple consecutive whitespace characters with a single space character.
|
/// Replaces multiple consecutive whitespace characters with a single space character.
|
||||||
pub fn collapse_whitespace(text: &str) -> Cow<'_, str> {
|
pub fn collapse_whitespace(text: &str) -> Cow<'_, str> {
|
||||||
lazy_static! {
|
static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"\s\s+").unwrap());
|
||||||
static ref RE: Regex = Regex::new(r"\s\s+").unwrap();
|
|
||||||
}
|
|
||||||
RE.replace_all(text, " ")
|
RE.replace_all(text, " ")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,9 +50,7 @@ pub fn id_from_content(content: &str) -> String {
|
||||||
let mut content = content.to_string();
|
let mut content = content.to_string();
|
||||||
|
|
||||||
// Skip any tags or html-encoded stuff
|
// Skip any tags or html-encoded stuff
|
||||||
lazy_static! {
|
static HTML: Lazy<Regex> = Lazy::new(|| Regex::new(r"(<.*?>)").unwrap());
|
||||||
static ref HTML: Regex = Regex::new(r"(<.*?>)").unwrap();
|
|
||||||
}
|
|
||||||
content = HTML.replace_all(&content, "").into();
|
content = HTML.replace_all(&content, "").into();
|
||||||
const REPL_SUB: &[&str] = &["<", ">", "&", "'", """];
|
const REPL_SUB: &[&str] = &["<", ">", "&", "'", """];
|
||||||
for sub in REPL_SUB {
|
for sub in REPL_SUB {
|
||||||
|
@ -97,10 +93,9 @@ pub fn unique_id_from_content(content: &str, id_counter: &mut HashMap<String, us
|
||||||
/// None. Ideally, print page links would link to anchors on the print page,
|
/// None. Ideally, print page links would link to anchors on the print page,
|
||||||
/// but that is very difficult.
|
/// but that is very difficult.
|
||||||
fn adjust_links<'a>(event: Event<'a>, path: Option<&Path>, abs_url: Option<&String>) -> Event<'a> {
|
fn adjust_links<'a>(event: Event<'a>, path: Option<&Path>, abs_url: Option<&String>) -> Event<'a> {
|
||||||
lazy_static! {
|
static SCHEME_LINK: Lazy<Regex> = Lazy::new(|| Regex::new(r"^[a-z][a-z0-9+.-]*:").unwrap());
|
||||||
static ref SCHEME_LINK: Regex = Regex::new(r"^[a-z][a-z0-9+.-]*:").unwrap();
|
static MD_LINK: Lazy<Regex> =
|
||||||
static ref MD_LINK: Regex = Regex::new(r"(?P<link>.*)\.md(?P<anchor>#.*)?").unwrap();
|
Lazy::new(|| Regex::new(r"(?P<link>.*)\.md(?P<anchor>#.*)?").unwrap());
|
||||||
}
|
|
||||||
|
|
||||||
fn fix<'a>(dest: CowStr<'a>, path: Option<&Path>, abs_url: Option<&String>) -> CowStr<'a> {
|
fn fix<'a>(dest: CowStr<'a>, path: Option<&Path>, abs_url: Option<&String>) -> CowStr<'a> {
|
||||||
if dest.starts_with('#') {
|
if dest.starts_with('#') {
|
||||||
|
@ -160,10 +155,8 @@ fn adjust_links<'a>(event: Event<'a>, path: Option<&Path>, abs_url: Option<&Stri
|
||||||
// There are dozens of HTML tags/attributes that contain paths, so
|
// There are dozens of HTML tags/attributes that contain paths, so
|
||||||
// feel free to add more tags if desired; these are the only ones I
|
// feel free to add more tags if desired; these are the only ones I
|
||||||
// care about right now.
|
// care about right now.
|
||||||
lazy_static! {
|
static HTML_LINK: Lazy<Regex> =
|
||||||
static ref HTML_LINK: Regex =
|
Lazy::new(|| Regex::new(r#"(<(?:a|img) [^>]*?(?:src|href)=")([^"]+?)""#).unwrap());
|
||||||
Regex::new(r#"(<(?:a|img) [^>]*?(?:src|href)=")([^"]+?)""#).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
HTML_LINK
|
HTML_LINK
|
||||||
.replace_all(&html, |caps: ®ex::Captures<'_>| {
|
.replace_all(&html, |caps: ®ex::Captures<'_>| {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use lazy_static::lazy_static;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::ops::Bound::{Excluded, Included, Unbounded};
|
use std::ops::Bound::{Excluded, Included, Unbounded};
|
||||||
use std::ops::RangeBounds;
|
use std::ops::RangeBounds;
|
||||||
|
@ -24,10 +24,10 @@ pub fn take_lines<R: RangeBounds<usize>>(s: &str, range: R) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
static ANCHOR_START: Lazy<Regex> =
|
||||||
static ref ANCHOR_START: Regex = Regex::new(r"ANCHOR:\s*(?P<anchor_name>[\w_-]+)").unwrap();
|
Lazy::new(|| Regex::new(r"ANCHOR:\s*(?P<anchor_name>[\w_-]+)").unwrap());
|
||||||
static ref ANCHOR_END: Regex = Regex::new(r"ANCHOR_END:\s*(?P<anchor_name>[\w_-]+)").unwrap();
|
static ANCHOR_END: Lazy<Regex> =
|
||||||
}
|
Lazy::new(|| Regex::new(r"ANCHOR_END:\s*(?P<anchor_name>[\w_-]+)").unwrap());
|
||||||
|
|
||||||
/// Take anchored lines from a string.
|
/// Take anchored lines from a string.
|
||||||
/// Lines containing anchor are ignored.
|
/// Lines containing anchor are ignored.
|
||||||
|
|
Loading…
Reference in New Issue