From ee0f301e9ebd62daa728488734cc95dd79f2e935 Mon Sep 17 00:00:00 2001 From: Ludovic Fernandez Date: Fri, 20 May 2016 21:01:55 +0200 Subject: [PATCH] feat: add mark support (#128) * feat: add mark support * feat: highlight style --- grammars/language-asciidoc.cson | 56 ++++++ grammars/repositories/asciidoc-grammar.cson | 2 + .../repositories/inlines/mark-grammar.cson | 59 ++++++ spec/inlines/mark-grammar-spec.coffee | 181 ++++++++++++++++++ styles/asciidoc.atom-text-editor.less | 11 ++ 5 files changed, 309 insertions(+) create mode 100644 grammars/repositories/inlines/mark-grammar.cson create mode 100644 spec/inlines/mark-grammar-spec.coffee diff --git a/grammars/language-asciidoc.cson b/grammars/language-asciidoc.cson index 679977b..3b86851 100644 --- a/grammars/language-asciidoc.cson +++ b/grammars/language-asciidoc.cson @@ -154,6 +154,9 @@ repository: { include: "#subscript" } + { + include: "#mark" + } { include: "#general-block-macro" } @@ -702,6 +705,59 @@ repository: match: "\\p{Word}[\\p{Word}.%+-]*(@)\\p{Alnum}[\\p{Alnum}.-]*(\\.)\\p{Alpha}{2,4}\\b" } ] + mark: + patterns: [ + { + name: "markup.mark.unconstrained.asciidoc" + match: "(? + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage 'language-asciidoc' + + runs -> + grammar = atom.grammars.grammarForScopeName 'source.asciidoc' + + it 'parses the grammar', -> + expect(grammar).toBeDefined() + expect(grammar.scopeName).toBe 'source.asciidoc' + + describe 'Should tokenizes constrained mark text', -> + + it 'when constrained mark text', -> + {tokens} = grammar.tokenizeLine 'this is #mark# text' + expect(tokens).toHaveLength 5 + expect(tokens[0]).toEqualJson value: 'this is ', scopes: ['source.asciidoc'] + expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[4]).toEqualJson value: ' text', scopes: ['source.asciidoc'] + + it 'when constrained mark at the beginning of the line', -> + {tokens} = grammar.tokenizeLine '#mark text# from the start.' + expect(tokens).toHaveLength 4 + expect(tokens[0]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[1]).toEqualJson value: 'mark text', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[3]).toEqualJson value: ' from the start.', scopes: ['source.asciidoc'] + + it 'when constrained mark is escaped', -> + {tokens} = grammar.tokenizeLine '\\#mark text#' + expect(tokens).toHaveLength 1 + expect(tokens[0]).toEqualJson value: '\\#mark text#', scopes: ['source.asciidoc'] + + it 'when constrained mark in a *bulleted list', -> + {tokens} = grammar.tokenizeLine '* #mark text# followed by normal text' + expect(tokens).toHaveLength 6 + expect(tokens[0]).toEqualJson value: '*', scopes: ['source.asciidoc', 'markup.list.asciidoc', 'markup.list.bullet.asciidoc'] + expect(tokens[1]).toEqualJson value: ' ', scopes: ['source.asciidoc'] + expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[3]).toEqualJson value: 'mark text', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[4]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[5]).toEqualJson value: ' followed by normal text', scopes: ['source.asciidoc'] + + it 'when constrained mark text within special characters', -> + {tokens} = grammar.tokenizeLine 'a#non-mark#a, !#mark#?, \'#mark#:, .#mark#; ,#mark#' + expect(tokens).toHaveLength 16 + expect(tokens[0]).toEqualJson value: 'a#non-mark#a, !', scopes: ['source.asciidoc'] + expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[4]).toEqualJson value: '?, \'', scopes: ['source.asciidoc'] + expect(tokens[5]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[6]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[7]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[8]).toEqualJson value: ':, .', scopes: ['source.asciidoc'] + expect(tokens[9]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[10]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[11]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[12]).toEqualJson value: '; ,', scopes: ['source.asciidoc'] + expect(tokens[13]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[14]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[15]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + + it 'when text is "this is \\#mark\\# text"', -> + {tokens} = grammar.tokenizeLine 'this is #mark# text' + expect(tokens).toHaveLength 5 + expect(tokens[0]).toEqualJson value: 'this is ', scopes: ['source.asciidoc'] + expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[4]).toEqualJson value: ' text', scopes: ['source.asciidoc'] + + it 'when text is "* text\\#"', -> + {tokens} = grammar.tokenizeLine '* text#' + expect(tokens).toHaveLength 2 + expect(tokens[0]).toEqualJson value: '*', scopes: ['source.asciidoc', 'markup.list.asciidoc', 'markup.list.bullet.asciidoc'] + expect(tokens[1]).toEqualJson value: ' text#', scopes: ['source.asciidoc'] + + it 'when text is "\\#mark text\\#"', -> + {tokens} = grammar.tokenizeLine '#mark text#' + expect(tokens).toHaveLength 3 + expect(tokens[0]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[1]).toEqualJson value: 'mark text', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + + it 'when text is "\\#mark\\#text\\#"', -> + {tokens} = grammar.tokenizeLine '#mark#text#' + expect(tokens).toHaveLength 3 + expect(tokens[0]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[1]).toEqualJson value: 'mark#text', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + + it 'when text is "\\#mark\\# text \\#mark\\# text"', -> + {tokens} = grammar.tokenizeLine '#mark# text #mark# text' + expect(tokens).toHaveLength 8 + expect(tokens[0]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[1]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[3]).toEqualJson value: ' text ', scopes: ['source.asciidoc'] + expect(tokens[4]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[5]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[6]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[7]).toEqualJson value: ' text', scopes: ['source.asciidoc'] + + it 'when text is "* \\#mark\\# text" (list context)', -> + {tokens} = grammar.tokenizeLine '* #mark# text' + expect(tokens).toHaveLength 6 + expect(tokens[0]).toEqualJson value: '*', scopes: ['source.asciidoc', 'markup.list.asciidoc', 'markup.list.bullet.asciidoc'] + expect(tokens[1]).toEqualJson value: ' ', scopes: ['source.asciidoc'] + expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[3]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[4]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[5]).toEqualJson value: ' text', scopes: ['source.asciidoc'] + + it 'when text is "* \\#mark\\#" (list context)', -> + {tokens} = grammar.tokenizeLine '* #mark#' + expect(tokens).toHaveLength 5 + expect(tokens[0]).toEqualJson value: '*', scopes: ['source.asciidoc', 'markup.list.asciidoc', 'markup.list.bullet.asciidoc'] + expect(tokens[1]).toEqualJson value: ' ', scopes: ['source.asciidoc'] + expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[3]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[4]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + + it 'when having a [role] set on constrained mark text', -> + {tokens} = grammar.tokenizeLine '[role]#mark#' + expect(tokens).toHaveLength 4 + expect(tokens[0]).toEqualJson value: '[role]', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.meta.attribute-list.asciidoc'] + expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc'] + expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc'] + + it 'when having [role1 role2] set on constrained mark text', -> + {tokens} = grammar.tokenizeLine '[role1 role2]#mark#' + expect(tokens).toHaveLength 4 + expect(tokens[0]).toEqualJson value: '[role1 role2]', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.meta.attribute-list.asciidoc'] + expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc'] + expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc'] + + describe 'Should tokenizes unconstrained math text', -> + + it 'when unconstrained mark text', -> + {tokens} = grammar.tokenizeLine 'this is##mark##text' + expect(tokens[0]).toEqualJson value: 'this is', scopes: ['source.asciidoc'] + expect(tokens[1]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[3]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[4]).toEqualJson value: 'text', scopes: ['source.asciidoc'] + + it 'when unconstrained mark text with asterisks', -> + {tokens} = grammar.tokenizeLine 'this is##mark#text##' + expect(tokens[0]).toEqualJson value: 'this is', scopes: ['source.asciidoc'] + expect(tokens[1]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark#text', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc'] + expect(tokens[3]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc'] + + it 'when unconstrained mark is double escaped', -> + {tokens} = grammar.tokenizeLine '\\\\##mark text##' + expect(tokens).toHaveLength 1 + expect(tokens[0]).toEqualJson value: '\\\\##mark text##', scopes: ['source.asciidoc'] + + it 'when having a [role] set on unconstrained mark text', -> + {tokens} = grammar.tokenizeLine '[role]##mark##' + expect(tokens).toHaveLength 4 + expect(tokens[0]).toEqualJson value: '[role]', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.meta.attribute-list.asciidoc'] + expect(tokens[1]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc'] + expect(tokens[3]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc'] + + it 'when having [role1 role2] set on unconstrained mark text', -> + {tokens} = grammar.tokenizeLine '[role1 role2]##mark##' + expect(tokens).toHaveLength 4 + expect(tokens[0]).toEqualJson value: '[role1 role2]', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.meta.attribute-list.asciidoc'] + expect(tokens[1]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc'] + expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc'] + expect(tokens[3]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc'] diff --git a/styles/asciidoc.atom-text-editor.less b/styles/asciidoc.atom-text-editor.less index 21adbf9..e44b008 100644 --- a/styles/asciidoc.atom-text-editor.less +++ b/styles/asciidoc.atom-text-editor.less @@ -1,6 +1,8 @@ @import 'syntax-variables'; @syntax-text-color-unobtrusive: fadeout(@syntax-text-color, 50%); +@syntax-text-mark: mix(@syntax-color-constant, @syntax-color-keyword, 50%); +@syntax-text-highlight: mix(yellow, @syntax-background-color, 70%); atom-text-editor::shadow, :host { .asciidoc { @@ -15,6 +17,15 @@ atom-text-editor::shadow, :host { font-style: italic; } + &.mark { + color: @syntax-text-mark; + } + + &.highlight { + color: black; + background-color: @syntax-text-highlight; + } + &.character-reference { font-style: italic; color: mix(red, @syntax-text-color, 20%);