From 3c5da7abb80c077366e69148355c1d75676c8284 Mon Sep 17 00:00:00 2001 From: MrShwhale Date: Sat, 30 Mar 2024 14:25:03 -0700 Subject: [PATCH 1/8] Add documentation for tokenizer step. --- src/parsing/token/mod.rs | 13 +++++++++++-- src/tokenizer.rs | 1 + src/utf16.rs | 2 +- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/parsing/token/mod.rs b/src/parsing/token/mod.rs index 9c9d05e88..e480f9aa1 100644 --- a/src/parsing/token/mod.rs +++ b/src/parsing/token/mod.rs @@ -38,6 +38,7 @@ use pest::Parser; use std::ops::Range; use strum_macros::IntoStaticStr; +/// Struct that represents a token in a specific text. #[derive(Serialize, Debug, Clone, PartialEq, Eq)] pub struct ExtractedToken<'a> { pub token: Token, @@ -46,6 +47,8 @@ pub struct ExtractedToken<'a> { } impl<'a> ExtractedToken<'a> { + /// Returns a new object with the same values, except with span refering to the byte indicies + /// of the text if it were in UTF-16 rather than in UTF-8. #[must_use] pub fn to_utf16_indices(&self, map: &Utf16IndexMap) -> Self { // Copy fields @@ -61,6 +64,8 @@ impl<'a> ExtractedToken<'a> { } } +/// Enum that represents the type of a parsed token. For a struct with additional context +/// surrounding the positioning and content of the token, see [`ExtractedToken`]. #[derive( Serialize, Deserialize, Enum, IntoStaticStr, Debug, Copy, Clone, PartialEq, Eq, )] @@ -163,6 +168,10 @@ pub enum Token { } impl Token { + /// Extracts all tokens from the given text. + /// # Errors + /// Returns an error if something goes wrong with the parsing process. This will result in the + /// only Token being a raw text containing all of the input. pub(crate) fn extract_all(text: &str) -> Vec { info!("Running lexer on input"); @@ -196,7 +205,7 @@ impl Token { } } - /// Converts a single `Pair` from pest into its corresponding `ExtractedToken`. + /// Converts a single [`Pair`] from pest into its corresponding [`ExtractedToken`]. fn convert_pair(pair: Pair) -> ExtractedToken { // Extract values from the Pair let rule = pair.as_rule(); @@ -212,7 +221,7 @@ impl Token { ExtractedToken { token, slice, span } } - /// Mapping of a pest `Rule` to its corresponding `Token` enum. + /// Maps each pest [`Rule`] to its corresponding [`Token`]. fn get_from_rule(rule: Rule) -> Token { match rule { // Symbols diff --git a/src/tokenizer.rs b/src/tokenizer.rs index b720745da..c5de78d36 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -27,6 +27,7 @@ pub struct Tokenization<'t> { full_text: FullText<'t>, } +/// Struct that represents both a list of tokens and the text the tokens were generated from impl<'t> Tokenization<'t> { #[inline] pub fn tokens<'r>(&'r self) -> &'r [ExtractedToken<'t>] { diff --git a/src/utf16.rs b/src/utf16.rs index b3a02e087..85903c4d3 100644 --- a/src/utf16.rs +++ b/src/utf16.rs @@ -41,7 +41,7 @@ pub struct Utf16IndexMap<'t> { impl<'t> Utf16IndexMap<'t> { /// Produces a mapping of UTF-8 byte index to UTF-16 index. /// - /// This enables objects to be converted into using character indices + /// This enables objects to be converted from UTF-8 into UTF-16 using character indices /// for strings rather than byte indices. This is useful for environments /// which do use UTF-16 strings, such as Javascript (via WebASM). pub fn new(text: &'t str) -> Self { From 4c3fef57bcf351cfba3cdd2ddc9805115cb9e3dd Mon Sep 17 00:00:00 2001 From: MrShwhale Date: Sat, 30 Mar 2024 14:42:27 -0700 Subject: [PATCH 2/8] Change placement of tokenizer docline. --- src/tokenizer.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tokenizer.rs b/src/tokenizer.rs index c5de78d36..31467466b 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -21,13 +21,13 @@ use crate::parsing::{ExtractedToken, Token}; use crate::text::FullText; +/// Struct that represents both a list of tokens and the text the tokens were generated from. #[derive(Debug, Clone)] pub struct Tokenization<'t> { tokens: Vec>, full_text: FullText<'t>, } -/// Struct that represents both a list of tokens and the text the tokens were generated from impl<'t> Tokenization<'t> { #[inline] pub fn tokens<'r>(&'r self) -> &'r [ExtractedToken<'t>] { From 83fdd906a43873183c33ad99a16a8b8df8fd7156 Mon Sep 17 00:00:00 2001 From: MrShwhale Date: Thu, 4 Apr 2024 13:30:44 -0700 Subject: [PATCH 3/8] Update parsing documentation and fix clippy warnings. Clippy warnings were fixed by testing after removing the unused imports. On a failure, the import would be readded with an allow. --- src/data/page_ref.rs | 2 ++ src/parsing/collect/mod.rs | 1 - src/parsing/mod.rs | 7 ++--- src/parsing/paragraph/stack.rs | 2 ++ src/parsing/parser.rs | 3 +- src/parsing/rule/impls/block/blocks/later.rs | 2 +- src/parsing/rule/impls/block/blocks/mod.rs | 4 +-- src/parsing/rule/impls/mod.rs | 5 ++- src/parsing/rule/mod.rs | 2 +- src/render/html/element/mod.rs | 2 +- src/render/mod.rs | 1 + src/settings/interwiki.rs | 32 ++++++++++++++++++++ src/settings/mod.rs | 1 + src/tree/attribute/safe.rs | 2 +- src/tree/element/object.rs | 3 +- src/tree/partial.rs | 2 +- 16 files changed, 53 insertions(+), 18 deletions(-) diff --git a/src/data/page_ref.rs b/src/data/page_ref.rs index 29ad84241..5d7cef3ff 100644 --- a/src/data/page_ref.rs +++ b/src/data/page_ref.rs @@ -40,6 +40,7 @@ pub struct PageRef<'t> { } impl<'t> PageRef<'t> { + /// Creates a [`PageRef`] with the given page and site. #[inline] pub fn page_and_site(site: S1, page: S2) -> Self where @@ -52,6 +53,7 @@ impl<'t> PageRef<'t> { } } + /// Creates a [`PageRef`] with the given page and no site. #[inline] pub fn page_only(page: S) -> Self where diff --git a/src/parsing/collect/mod.rs b/src/parsing/collect/mod.rs index 84a42d8a3..96018cf4f 100644 --- a/src/parsing/collect/mod.rs +++ b/src/parsing/collect/mod.rs @@ -35,7 +35,6 @@ mod prelude { pub use crate::parsing::prelude::*; pub use crate::parsing::rule::Rule; pub use crate::parsing::token::{ExtractedToken, Token}; - pub use crate::text::FullText; } mod consume; diff --git a/src/parsing/mod.rs b/src/parsing/mod.rs index 20f54e7c3..3720544dc 100644 --- a/src/parsing/mod.rs +++ b/src/parsing/mod.rs @@ -45,7 +45,7 @@ mod prelude { }; pub use crate::settings::WikitextSettings; pub use crate::text::FullText; - pub use crate::tree::{Element, Elements, OwnedElementsIterator}; + pub use crate::tree::{Element, Elements}; } use self::depth::{process_depths, DepthItem, DepthList}; @@ -74,7 +74,7 @@ pub use self::token::{ExtractedToken, Token}; /// Parse through the given tokens and produce an AST. /// -/// This takes a list of `ExtractedToken` items produced by `tokenize()`. +/// This takes a list of [`ExtractedToken`] items produced by [tokenize](crate::tokenizer::tokenize()). pub fn parse<'r, 't>( tokenization: &'r Tokenization<'t>, page_info: &'r PageInfo<'t>, @@ -243,8 +243,7 @@ impl NextIndex for Incrementer { } } -// Parse internal result - +/// Represents the result of an internal parse. #[derive(Serialize, Deserialize, Debug, Clone)] pub struct UnstructuredParseResult<'r, 't> { /// The returned result from parsing. diff --git a/src/parsing/paragraph/stack.rs b/src/parsing/paragraph/stack.rs index 1d829071f..ab837503c 100644 --- a/src/parsing/paragraph/stack.rs +++ b/src/parsing/paragraph/stack.rs @@ -91,6 +91,7 @@ impl<'t> ParagraphStack<'t> { } } + /// Creates a paragraph element out of this struct pub fn build_paragraph(&mut self) -> Option> { debug!( "Building paragraph from current stack state (length {})", @@ -111,6 +112,7 @@ impl<'t> ParagraphStack<'t> { Some(element) } + /// Set the finished field in this struct to the paragraph element pub fn end_paragraph(&mut self) { debug!("Ending the current paragraph to push as a completed element"); diff --git a/src/parsing/parser.rs b/src/parsing/parser.rs index 59b5b34a6..04b661dbf 100644 --- a/src/parsing/parser.rs +++ b/src/parsing/parser.rs @@ -32,6 +32,7 @@ use std::{mem, ptr}; const MAX_RECURSION_DEPTH: usize = 100; +/// Parser for a set of tokens. #[derive(Debug, Clone)] pub struct Parser<'r, 't> { // Page and parse information @@ -204,7 +205,7 @@ impl<'r, 't> Parser<'r, 't> { } } - // Table of Contents + /// Add heading element to table of contents pub fn push_table_of_contents_entry( &mut self, heading: HeadingLevel, diff --git a/src/parsing/rule/impls/block/blocks/later.rs b/src/parsing/rule/impls/block/blocks/later.rs index 3b15af120..43e21323e 100644 --- a/src/parsing/rule/impls/block/blocks/later.rs +++ b/src/parsing/rule/impls/block/blocks/later.rs @@ -25,7 +25,7 @@ //! (not to be confused with `MiniRecentThreads`) which only //! outputted "later." and no other functionality. //! -//! See https://twitter.com/wikidotbugs/status/1328588862218702850 +//! See use super::prelude::*; diff --git a/src/parsing/rule/impls/block/blocks/mod.rs b/src/parsing/rule/impls/block/blocks/mod.rs index d278eec68..7865c168e 100644 --- a/src/parsing/rule/impls/block/blocks/mod.rs +++ b/src/parsing/rule/impls/block/blocks/mod.rs @@ -20,11 +20,9 @@ mod prelude { pub use super::super::{Arguments, BlockRule}; - pub use crate::parsing::collect::*; - pub use crate::parsing::condition::ParseCondition; pub use crate::parsing::parser::Parser; pub use crate::parsing::prelude::*; - pub use crate::parsing::{ParseError, Token}; + pub use crate::parsing::ParseError; pub use crate::tree::{Container, ContainerType, Element}; #[cfg(debug)] diff --git a/src/parsing/rule/impls/mod.rs b/src/parsing/rule/impls/mod.rs index 0439257dc..fd354408b 100644 --- a/src/parsing/rule/impls/mod.rs +++ b/src/parsing/rule/impls/mod.rs @@ -25,11 +25,10 @@ mod prelude { pub use crate::parsing::consume::consume; pub use crate::parsing::error::{ParseError, ParseErrorKind}; pub use crate::parsing::parser::Parser; - pub use crate::parsing::result::{ParseResult, ParseSuccess}; + pub use crate::parsing::result::ParseResult; pub use crate::parsing::rule::{LineRequirement, Rule}; pub use crate::parsing::token::{ExtractedToken, Token}; - pub use crate::text::FullText; - pub use crate::tree::{AttributeMap, Container, ContainerType, Element, Elements}; + pub use crate::tree::{AttributeMap, ContainerType, Element, Elements}; } mod anchor; diff --git a/src/parsing/rule/mod.rs b/src/parsing/rule/mod.rs index a7d81ef3d..c644449af 100644 --- a/src/parsing/rule/mod.rs +++ b/src/parsing/rule/mod.rs @@ -26,7 +26,7 @@ mod mapping; pub mod impls; -pub use self::mapping::{get_rules_for_token, RULE_MAP}; +pub use self::mapping::get_rules_for_token; /// Defines a rule that can possibly match tokens and return an `Element`. #[derive(Copy, Clone)] diff --git a/src/render/html/element/mod.rs b/src/render/html/element/mod.rs index 57a0ea37d..e9369d77c 100644 --- a/src/render/html/element/mod.rs +++ b/src/render/html/element/mod.rs @@ -46,7 +46,7 @@ mod prelude { pub use super::super::context::HtmlContext; pub use super::super::random::Random; pub use super::{render_element, render_elements}; - pub use crate::tree::{Element, SyntaxTree}; + pub use crate::tree::Element; } use self::bibliography::{render_bibcite, render_bibliography}; diff --git a/src/render/mod.rs b/src/render/mod.rs index b44e27813..8af85bc3a 100644 --- a/src/render/mod.rs +++ b/src/render/mod.rs @@ -18,6 +18,7 @@ * along with this program. If not, see . */ +#[allow(unused_imports)] mod prelude { pub use super::Render; pub use crate::data::PageInfo; diff --git a/src/settings/interwiki.rs b/src/settings/interwiki.rs index 80248685a..663c7c835 100644 --- a/src/settings/interwiki.rs +++ b/src/settings/interwiki.rs @@ -22,9 +22,23 @@ use once_cell::sync::Lazy; use std::borrow::Cow; use std::collections::HashMap; +/// An [`InterwikiSettings`] instance that has no prefixes. pub static EMPTY_INTERWIKI: Lazy = Lazy::new(|| InterwikiSettings { prefixes: hashmap! {}, }); + +/// An [`InterwikiSettings`] instance that has the default prefixes. +/// +/// These prefixes are: +/// - wikipedia:path => https://wikipedia.org/wiki/path +/// - wp:path => https://wikipedia.org/wiki/path +/// - commons:path => https://commons.wikimedia.org/wiki/path +/// - google:path => https://google.com/search?q=path +/// - duckduckgo:path => https://duckduckgo.com/?q=path +/// - ddg:path => https://duckduckgo.com/?q=path +/// - dictionary:path => https://dictionary.com/browse/path +/// - thesaurus:path => https://thesaurus.com/browse/path +#[allow(rustdoc::bare_urls)] pub static DEFAULT_INTERWIKI: Lazy = Lazy::new(|| InterwikiSettings { prefixes: hashmap! { cow!("wikipedia") => cow!("https://wikipedia.org/wiki/$$"), @@ -38,18 +52,35 @@ pub static DEFAULT_INTERWIKI: Lazy = Lazy::new(|| InterwikiSe }, }); +/// Settings that determine how to turn [`interwiki links`](http://org.wikidot.com/doc:wiki-syntax#toc21) +/// into full URLs. #[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, Eq)] pub struct InterwikiSettings { #[serde(flatten)] + /// A map from each interwiki prefix to the interwiki URL. A '$$' in the URL indicates where the path specified in + /// the Wikijump interwiki block should go. pub prefixes: HashMap, Cow<'static, str>>, } impl InterwikiSettings { + /// Creates a new instance with no prefixes. #[inline] pub fn new() -> Self { InterwikiSettings::default() } + /// Creates a full URL from an interwiki link. + /// # Example + /// ``` + /// # use ftml::settings::*; + /// assert_eq!(DEFAULT_INTERWIKI.build("wikipedia:Mallard").unwrap(), "https://wikipedia.org/wiki/Mallard"); + /// ``` + /// # Errors + /// Returns None if: + /// - The link starts with a colon + /// - There is no colon in the link + /// - There is nothing after the colon + /// - The interwiki prefix is not found pub fn build(&self, link: &str) -> Option { match link.find(':') { // Starting with a colon is not interwiki, skip. @@ -145,4 +176,5 @@ fn interwiki_prefixes() { check!("thesaurus:oak", Some("https://thesaurus.com/browse/oak")); check!("banana:fruit-salad", None); check!(":empty", None); + check!("no-link:", None); } diff --git a/src/settings/mod.rs b/src/settings/mod.rs index 3b9eb008e..98ab3314c 100644 --- a/src/settings/mod.rs +++ b/src/settings/mod.rs @@ -93,6 +93,7 @@ pub struct WikitextSettings { } impl WikitextSettings { + /// Returns the default settings for the given WikitextMode pub fn from_mode(mode: WikitextMode) -> Self { let interwiki = DEFAULT_INTERWIKI.clone(); diff --git a/src/tree/attribute/safe.rs b/src/tree/attribute/safe.rs index 4bd77faca..1a3d2accf 100644 --- a/src/tree/attribute/safe.rs +++ b/src/tree/attribute/safe.rs @@ -39,7 +39,7 @@ macro_rules! hashset_unicase { /// List of safe attributes. All others will be filtered out. /// -/// See https://scuttle.atlassian.net/wiki/spaces/WD/pages/1030782977/Allowed+Attributes+in+Wikitext +/// See pub static SAFE_ATTRIBUTES: Lazy>> = Lazy::new(|| { hashset_unicase![ "accept", diff --git a/src/tree/element/object.rs b/src/tree/element/object.rs index 5abe8b6fd..b83515f0c 100644 --- a/src/tree/element/object.rs +++ b/src/tree/element/object.rs @@ -29,6 +29,7 @@ use ref_map::*; use std::borrow::Cow; use std::num::NonZeroU32; +/// Represents an element to be rendered. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "kebab-case", tag = "element", content = "data")] pub enum Element<'t> { @@ -367,7 +368,7 @@ impl Element<'_> { /// This is to avoid making the call very expensive, but for a complete /// understanding of the paragraph requirements, see the `Elements` return. /// - /// See https://developer.mozilla.org/en-US/docs/Web/Guide/HTML/Content_categories#phrasing_content + /// See pub fn paragraph_safe(&self) -> bool { match self { Element::Container(container) => container.ctype().paragraph_safe(), diff --git a/src/tree/partial.rs b/src/tree/partial.rs index b4b62535d..1c0a04f0c 100644 --- a/src/tree/partial.rs +++ b/src/tree/partial.rs @@ -42,7 +42,7 @@ pub enum PartialElement<'t> { /// Text associated with a Ruby annotation. /// - /// Outputs HTML ``. See also https://developer.mozilla.org/en-US/docs/Web/HTML/Element/ruby. + /// Outputs HTML ``. See also . RubyText(RubyText<'t>), } From b863dbce36256939acb43640819289897a55effc Mon Sep 17 00:00:00 2001 From: MrShwhale Date: Thu, 4 Apr 2024 14:05:17 -0700 Subject: [PATCH 4/8] Correct docs on build_paragraph. --- src/parsing/paragraph/stack.rs | 6 +++--- src/parsing/parser.rs | 2 +- src/settings/interwiki.rs | 2 +- src/settings/mod.rs | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/parsing/paragraph/stack.rs b/src/parsing/paragraph/stack.rs index ab837503c..06b5928a6 100644 --- a/src/parsing/paragraph/stack.rs +++ b/src/parsing/paragraph/stack.rs @@ -27,7 +27,7 @@ pub struct ParagraphStack<'t> { /// Elements being accumulated in the current paragraph. current: Vec>, - /// Previous elements created, to be outputted in the final `SyntaxTree`. + /// Previous elements created, to be outputted in the final [`SyntaxTree`]. finished: Vec>, /// Gathered errors from paragraph parsing. @@ -91,7 +91,7 @@ impl<'t> ParagraphStack<'t> { } } - /// Creates a paragraph element out of this struct + /// Creates a paragraph element out of this instance's current elements. pub fn build_paragraph(&mut self) -> Option> { debug!( "Building paragraph from current stack state (length {})", @@ -112,7 +112,7 @@ impl<'t> ParagraphStack<'t> { Some(element) } - /// Set the finished field in this struct to the paragraph element + /// Set the finished field in this struct to the paragraph element. pub fn end_paragraph(&mut self) { debug!("Ending the current paragraph to push as a completed element"); diff --git a/src/parsing/parser.rs b/src/parsing/parser.rs index 04b661dbf..1427ba8b4 100644 --- a/src/parsing/parser.rs +++ b/src/parsing/parser.rs @@ -205,7 +205,7 @@ impl<'r, 't> Parser<'r, 't> { } } - /// Add heading element to table of contents + /// Add heading element to table of contents. pub fn push_table_of_contents_entry( &mut self, heading: HeadingLevel, diff --git a/src/settings/interwiki.rs b/src/settings/interwiki.rs index 663c7c835..139e88889 100644 --- a/src/settings/interwiki.rs +++ b/src/settings/interwiki.rs @@ -27,6 +27,7 @@ pub static EMPTY_INTERWIKI: Lazy = Lazy::new(|| InterwikiSett prefixes: hashmap! {}, }); +#[allow(rustdoc::bare_urls)] /// An [`InterwikiSettings`] instance that has the default prefixes. /// /// These prefixes are: @@ -38,7 +39,6 @@ pub static EMPTY_INTERWIKI: Lazy = Lazy::new(|| InterwikiSett /// - ddg:path => https://duckduckgo.com/?q=path /// - dictionary:path => https://dictionary.com/browse/path /// - thesaurus:path => https://thesaurus.com/browse/path -#[allow(rustdoc::bare_urls)] pub static DEFAULT_INTERWIKI: Lazy = Lazy::new(|| InterwikiSettings { prefixes: hashmap! { cow!("wikipedia") => cow!("https://wikipedia.org/wiki/$$"), diff --git a/src/settings/mod.rs b/src/settings/mod.rs index 98ab3314c..478eb2b41 100644 --- a/src/settings/mod.rs +++ b/src/settings/mod.rs @@ -93,7 +93,7 @@ pub struct WikitextSettings { } impl WikitextSettings { - /// Returns the default settings for the given WikitextMode + /// Returns the default settings for the given WikitextMode. pub fn from_mode(mode: WikitextMode) -> Self { let interwiki = DEFAULT_INTERWIKI.clone(); From 4a9f2eab9c870d4b3a9c1566ddac52e335e3c0f6 Mon Sep 17 00:00:00 2001 From: William Patmore <93882520+MrShwhale@users.noreply.github.com> Date: Thu, 4 Apr 2024 17:54:19 -0700 Subject: [PATCH 5/8] Update src/parsing/token/mod.rs Co-authored-by: emmiegit --- src/parsing/token/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/parsing/token/mod.rs b/src/parsing/token/mod.rs index e480f9aa1..5cdf87a16 100644 --- a/src/parsing/token/mod.rs +++ b/src/parsing/token/mod.rs @@ -171,7 +171,7 @@ impl Token { /// Extracts all tokens from the given text. /// # Errors /// Returns an error if something goes wrong with the parsing process. This will result in the - /// only Token being a raw text containing all of the input. + /// only [`Token`] being a raw text containing all of the input. pub(crate) fn extract_all(text: &str) -> Vec { info!("Running lexer on input"); From c339eb9a69c22d22283102f439a290a2cf4ff016 Mon Sep 17 00:00:00 2001 From: William Patmore <93882520+MrShwhale@users.noreply.github.com> Date: Thu, 4 Apr 2024 17:54:27 -0700 Subject: [PATCH 6/8] Update src/settings/mod.rs Co-authored-by: emmiegit --- src/settings/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/settings/mod.rs b/src/settings/mod.rs index 478eb2b41..a03fa0889 100644 --- a/src/settings/mod.rs +++ b/src/settings/mod.rs @@ -93,7 +93,7 @@ pub struct WikitextSettings { } impl WikitextSettings { - /// Returns the default settings for the given WikitextMode. + /// Returns the default settings for the given [`WikitextMode`]. pub fn from_mode(mode: WikitextMode) -> Self { let interwiki = DEFAULT_INTERWIKI.clone(); From 65ac566349931ff4a78cacfc340915b1ddcdb1c6 Mon Sep 17 00:00:00 2001 From: William Patmore <93882520+MrShwhale@users.noreply.github.com> Date: Thu, 4 Apr 2024 17:54:32 -0700 Subject: [PATCH 7/8] Update src/settings/interwiki.rs Co-authored-by: emmiegit --- src/settings/interwiki.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/settings/interwiki.rs b/src/settings/interwiki.rs index 139e88889..5e51eb2c9 100644 --- a/src/settings/interwiki.rs +++ b/src/settings/interwiki.rs @@ -75,7 +75,7 @@ impl InterwikiSettings { /// # use ftml::settings::*; /// assert_eq!(DEFAULT_INTERWIKI.build("wikipedia:Mallard").unwrap(), "https://wikipedia.org/wiki/Mallard"); /// ``` - /// # Errors + /// /// Returns None if: /// - The link starts with a colon /// - There is no colon in the link From 97d71eae4df9987be285f46d683598f1b1c82177 Mon Sep 17 00:00:00 2001 From: William Patmore <93882520+MrShwhale@users.noreply.github.com> Date: Thu, 4 Apr 2024 17:57:38 -0700 Subject: [PATCH 8/8] Update src/settings/interwiki.rs Co-authored-by: emmiegit --- src/settings/interwiki.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/settings/interwiki.rs b/src/settings/interwiki.rs index 5e51eb2c9..0619506ab 100644 --- a/src/settings/interwiki.rs +++ b/src/settings/interwiki.rs @@ -31,14 +31,14 @@ pub static EMPTY_INTERWIKI: Lazy = Lazy::new(|| InterwikiSett /// An [`InterwikiSettings`] instance that has the default prefixes. /// /// These prefixes are: -/// - wikipedia:path => https://wikipedia.org/wiki/path -/// - wp:path => https://wikipedia.org/wiki/path -/// - commons:path => https://commons.wikimedia.org/wiki/path -/// - google:path => https://google.com/search?q=path -/// - duckduckgo:path => https://duckduckgo.com/?q=path -/// - ddg:path => https://duckduckgo.com/?q=path -/// - dictionary:path => https://dictionary.com/browse/path -/// - thesaurus:path => https://thesaurus.com/browse/path +/// - `wikipedia:path` => `https://wikipedia.org/wiki/path` +/// - `wp:path` => `https://wikipedia.org/wiki/path` +/// - `commons:path` => `https://commons.wikimedia.org/wiki/path` +/// - `google:path` => `https://google.com/search?q=path` +/// - `duckduckgo:path` => `https://duckduckgo.com/?q=path` +/// - `ddg:path` => `https://duckduckgo.com/?q=path` +/// - `dictionary:path` => `https://dictionary.com/browse/path` +/// - `thesaurus:path` => `https://thesaurus.com/browse/path` pub static DEFAULT_INTERWIKI: Lazy = Lazy::new(|| InterwikiSettings { prefixes: hashmap! { cow!("wikipedia") => cow!("https://wikipedia.org/wiki/$$"),