Skip to content
This repository has been archived by the owner on Dec 28, 2022. It is now read-only.

Commit

Permalink
feat: add mark support (#128)
Browse files Browse the repository at this point in the history
* feat: add mark support
* feat: highlight style
  • Loading branch information
ldez committed May 20, 2016
1 parent 41cd546 commit ee0f301
Show file tree
Hide file tree
Showing 5 changed files with 309 additions and 0 deletions.
56 changes: 56 additions & 0 deletions grammars/language-asciidoc.cson
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,9 @@ repository:
{
include: "#subscript"
}
{
include: "#mark"
}
{
include: "#general-block-macro"
}
Expand Down Expand Up @@ -702,6 +705,59 @@ repository:
match: "\\p{Word}[\\p{Word}.%+-]*(@)\\p{Alnum}[\\p{Alnum}.-]*(\\.)\\p{Alpha}{2,4}\\b"
}
]
mark:
patterns: [
{
name: "markup.mark.unconstrained.asciidoc"
match: "(?<!\\\\\\\\)(\\[[^\\]]+?\\])((##)(.+?)(##))"
captures:
"1":
name: "markup.meta.attribute-list.asciidoc"
"2":
name: "markup.mark.asciidoc"
"3":
name: "punctuation.definition.asciidoc"
"5":
name: "punctuation.definition.asciidoc"
}
{
name: "markup.mark.unconstrained.asciidoc"
match: "(?<!\\\\\\\\)((##)(.+?)(##))"
captures:
"1":
name: "markup.highlight.asciidoc"
"2":
name: "punctuation.definition.asciidoc"
"4":
name: "punctuation.definition.asciidoc"
}
{
name: "markup.mark.constrained.asciidoc"
match: "(?<![\\\\;:\\p{Word}#])(\\[[^\\]]+?\\])((#)(\\S|\\S.*?\\S)(#)(?!\\p{Word}))"
captures:
"1":
name: "markup.meta.attribute-list.asciidoc"
"2":
name: "markup.mark.asciidoc"
"3":
name: "punctuation.definition.asciidoc"
"5":
name: "punctuation.definition.asciidoc"
}
{
name: "markup.mark.constrained.asciidoc"
match: "(?<![\\\\;:\\p{Word}#])(\\[[^\\]]+?\\])?((#)(\\S|\\S.*?\\S)(#)(?!\\p{Word}))"
captures:
"1":
name: "markup.meta.attribute-list.asciidoc"
"2":
name: "markup.highlight.asciidoc"
"3":
name: "punctuation.definition.asciidoc"
"5":
name: "punctuation.definition.asciidoc"
}
]
"menu-macro":
patterns: [
{
Expand Down
2 changes: 2 additions & 0 deletions grammars/repositories/asciidoc-grammar.cson
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,8 @@ repository:
include: '#superscript'
,
include: '#subscript'
,
include: '#mark'
,
include: '#general-block-macro'
,
Expand Down
59 changes: 59 additions & 0 deletions grammars/repositories/inlines/mark-grammar.cson
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
key: 'mark'

patterns: [

# Matches mark unconstrained phrases
#
# Examples:
#
# m[role]##ark## phrase
#
name: 'markup.mark.unconstrained.asciidoc'
match: '(?<!\\\\\\\\)(\\[[^\\]]+?\\])((##)(.+?)(##))'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
2: name: 'markup.mark.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
,
# Matches mark unconstrained phrases. (highlight)
#
# Examples:
#
# m##ark## phrase
#
name: 'markup.mark.unconstrained.asciidoc'
match: '(?<!\\\\\\\\)((##)(.+?)(##))'
captures:
1: name: 'markup.highlight.asciidoc'
2: name: 'punctuation.definition.asciidoc'
4: name: 'punctuation.definition.asciidoc'
,
# Matches mark constrained phrases
#
# Examples:
#
# [smal]#mark phrase#
#
name: 'markup.mark.constrained.asciidoc'
match: '(?<![\\\\;:\\p{Word}#])(\\[[^\\]]+?\\])((#)(\\S|\\S.*?\\S)(#)(?!\\p{Word}))'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
2: name: 'markup.mark.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
,
# Matches mark constrained phrases (highlight)
#
# Examples:
#
# #mark phrase#
#
name: 'markup.mark.constrained.asciidoc'
match: '(?<![\\\\;:\\p{Word}#])(\\[[^\\]]+?\\])?((#)(\\S|\\S.*?\\S)(#)(?!\\p{Word}))'
captures:
1: name: 'markup.meta.attribute-list.asciidoc'
2: name: 'markup.highlight.asciidoc'
3: name: 'punctuation.definition.asciidoc'
5: name: 'punctuation.definition.asciidoc'
]
181 changes: 181 additions & 0 deletions spec/inlines/mark-grammar-spec.coffee
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
describe 'mark text', ->
grammar = null

beforeEach ->
waitsForPromise ->
atom.packages.activatePackage 'language-asciidoc'

runs ->
grammar = atom.grammars.grammarForScopeName 'source.asciidoc'

it 'parses the grammar', ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe 'source.asciidoc'

describe 'Should tokenizes constrained mark text', ->

it 'when constrained mark text', ->
{tokens} = grammar.tokenizeLine 'this is #mark# text'
expect(tokens).toHaveLength 5
expect(tokens[0]).toEqualJson value: 'this is ', scopes: ['source.asciidoc']
expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[4]).toEqualJson value: ' text', scopes: ['source.asciidoc']

it 'when constrained mark at the beginning of the line', ->
{tokens} = grammar.tokenizeLine '#mark text# from the start.'
expect(tokens).toHaveLength 4
expect(tokens[0]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[1]).toEqualJson value: 'mark text', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[3]).toEqualJson value: ' from the start.', scopes: ['source.asciidoc']

it 'when constrained mark is escaped', ->
{tokens} = grammar.tokenizeLine '\\#mark text#'
expect(tokens).toHaveLength 1
expect(tokens[0]).toEqualJson value: '\\#mark text#', scopes: ['source.asciidoc']

it 'when constrained mark in a *bulleted list', ->
{tokens} = grammar.tokenizeLine '* #mark text# followed by normal text'
expect(tokens).toHaveLength 6
expect(tokens[0]).toEqualJson value: '*', scopes: ['source.asciidoc', 'markup.list.asciidoc', 'markup.list.bullet.asciidoc']
expect(tokens[1]).toEqualJson value: ' ', scopes: ['source.asciidoc']
expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[3]).toEqualJson value: 'mark text', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[4]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[5]).toEqualJson value: ' followed by normal text', scopes: ['source.asciidoc']

it 'when constrained mark text within special characters', ->
{tokens} = grammar.tokenizeLine 'a#non-mark#a, !#mark#?, \'#mark#:, .#mark#; ,#mark#'
expect(tokens).toHaveLength 16
expect(tokens[0]).toEqualJson value: 'a#non-mark#a, !', scopes: ['source.asciidoc']
expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[4]).toEqualJson value: '?, \'', scopes: ['source.asciidoc']
expect(tokens[5]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[6]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[7]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[8]).toEqualJson value: ':, .', scopes: ['source.asciidoc']
expect(tokens[9]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[10]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[11]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[12]).toEqualJson value: '; ,', scopes: ['source.asciidoc']
expect(tokens[13]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[14]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[15]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']

it 'when text is "this is \\#mark\\# text"', ->
{tokens} = grammar.tokenizeLine 'this is #mark# text'
expect(tokens).toHaveLength 5
expect(tokens[0]).toEqualJson value: 'this is ', scopes: ['source.asciidoc']
expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[4]).toEqualJson value: ' text', scopes: ['source.asciidoc']

it 'when text is "* text\\#"', ->
{tokens} = grammar.tokenizeLine '* text#'
expect(tokens).toHaveLength 2
expect(tokens[0]).toEqualJson value: '*', scopes: ['source.asciidoc', 'markup.list.asciidoc', 'markup.list.bullet.asciidoc']
expect(tokens[1]).toEqualJson value: ' text#', scopes: ['source.asciidoc']

it 'when text is "\\#mark text\\#"', ->
{tokens} = grammar.tokenizeLine '#mark text#'
expect(tokens).toHaveLength 3
expect(tokens[0]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[1]).toEqualJson value: 'mark text', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']

it 'when text is "\\#mark\\#text\\#"', ->
{tokens} = grammar.tokenizeLine '#mark#text#'
expect(tokens).toHaveLength 3
expect(tokens[0]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[1]).toEqualJson value: 'mark#text', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']

it 'when text is "\\#mark\\# text \\#mark\\# text"', ->
{tokens} = grammar.tokenizeLine '#mark# text #mark# text'
expect(tokens).toHaveLength 8
expect(tokens[0]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[1]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[3]).toEqualJson value: ' text ', scopes: ['source.asciidoc']
expect(tokens[4]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[5]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[6]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[7]).toEqualJson value: ' text', scopes: ['source.asciidoc']

it 'when text is "* \\#mark\\# text" (list context)', ->
{tokens} = grammar.tokenizeLine '* #mark# text'
expect(tokens).toHaveLength 6
expect(tokens[0]).toEqualJson value: '*', scopes: ['source.asciidoc', 'markup.list.asciidoc', 'markup.list.bullet.asciidoc']
expect(tokens[1]).toEqualJson value: ' ', scopes: ['source.asciidoc']
expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[3]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[4]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[5]).toEqualJson value: ' text', scopes: ['source.asciidoc']

it 'when text is "* \\#mark\\#" (list context)', ->
{tokens} = grammar.tokenizeLine '* #mark#'
expect(tokens).toHaveLength 5
expect(tokens[0]).toEqualJson value: '*', scopes: ['source.asciidoc', 'markup.list.asciidoc', 'markup.list.bullet.asciidoc']
expect(tokens[1]).toEqualJson value: ' ', scopes: ['source.asciidoc']
expect(tokens[2]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[3]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[4]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']

it 'when having a [role] set on constrained mark text', ->
{tokens} = grammar.tokenizeLine '[role]#mark#'
expect(tokens).toHaveLength 4
expect(tokens[0]).toEqualJson value: '[role]', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.meta.attribute-list.asciidoc']
expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc']
expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc']

it 'when having [role1 role2] set on constrained mark text', ->
{tokens} = grammar.tokenizeLine '[role1 role2]#mark#'
expect(tokens).toHaveLength 4
expect(tokens[0]).toEqualJson value: '[role1 role2]', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.meta.attribute-list.asciidoc']
expect(tokens[1]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc']
expect(tokens[3]).toEqualJson value: '#', scopes: ['source.asciidoc', 'markup.mark.constrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc']

describe 'Should tokenizes unconstrained math text', ->

it 'when unconstrained mark text', ->
{tokens} = grammar.tokenizeLine 'this is##mark##text'
expect(tokens[0]).toEqualJson value: 'this is', scopes: ['source.asciidoc']
expect(tokens[1]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[3]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[4]).toEqualJson value: 'text', scopes: ['source.asciidoc']

it 'when unconstrained mark text with asterisks', ->
{tokens} = grammar.tokenizeLine 'this is##mark#text##'
expect(tokens[0]).toEqualJson value: 'this is', scopes: ['source.asciidoc']
expect(tokens[1]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark#text', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc']
expect(tokens[3]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.highlight.asciidoc', 'punctuation.definition.asciidoc']

it 'when unconstrained mark is double escaped', ->
{tokens} = grammar.tokenizeLine '\\\\##mark text##'
expect(tokens).toHaveLength 1
expect(tokens[0]).toEqualJson value: '\\\\##mark text##', scopes: ['source.asciidoc']

it 'when having a [role] set on unconstrained mark text', ->
{tokens} = grammar.tokenizeLine '[role]##mark##'
expect(tokens).toHaveLength 4
expect(tokens[0]).toEqualJson value: '[role]', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.meta.attribute-list.asciidoc']
expect(tokens[1]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc']
expect(tokens[3]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc']

it 'when having [role1 role2] set on unconstrained mark text', ->
{tokens} = grammar.tokenizeLine '[role1 role2]##mark##'
expect(tokens).toHaveLength 4
expect(tokens[0]).toEqualJson value: '[role1 role2]', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.meta.attribute-list.asciidoc']
expect(tokens[1]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc']
expect(tokens[2]).toEqualJson value: 'mark', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc']
expect(tokens[3]).toEqualJson value: '##', scopes: ['source.asciidoc', 'markup.mark.unconstrained.asciidoc', 'markup.mark.asciidoc', 'punctuation.definition.asciidoc']
Loading

0 comments on commit ee0f301

Please sign in to comment.