chore(deps): upgrade vendored dependencies

This commit is contained in:
Christopher Allen Lane
2022-07-04 21:51:57 -04:00
parent c482488c41
commit d3250fda79
236 changed files with 19082 additions and 11468 deletions

View File

@ -25,6 +25,17 @@ linters:
- testpackage
- godot
- nestif
- paralleltest
- nlreturn
- cyclop
- exhaustivestruct
- gci
- gofumpt
- errorlint
- exhaustive
- ifshort
- wrapcheck
- stylecheck
linters-settings:
govet:
@ -36,6 +47,11 @@ linters-settings:
goconst:
min-len: 8
min-occurrences: 3
forbidigo:
forbid:
- (Must)?NewLexer
exclude_godoc_examples: false
issues:
max-per-linter: 0

View File

@ -14,11 +14,13 @@ builds:
- darwin
- windows
goarch:
- arm64
- amd64
- "386"
goarm:
- "6"
main: ./cmd/chroma/main.go
dir: ./cmd/chroma
main: .
ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
binary: chroma
archives:

View File

@ -12,7 +12,7 @@ tokentype_string.go: types.go
chromad:
rm -f chromad
(export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .)
(export CGOENABLED=0 GOOS=linux GOARCH=amd64; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .)
upload: chromad
scp chromad root@swapoff.org: && \

View File

@ -1,4 +1,5 @@
# Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CircleCI](https://img.shields.io/circleci/project/github/alecthomas/chroma.svg)](https://circleci.com/gh/alecthomas/chroma) [![Go Report Card](https://goreportcard.com/badge/github.com/alecthomas/chroma)](https://goreportcard.com/report/github.com/alecthomas/chroma) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/)
# Chroma — A general purpose syntax highlighter in pure Go
[![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CI](https://github.com/alecthomas/chroma/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/)
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
@ -36,12 +37,12 @@ translators for Pygments lexers and styles.
Prefix | Language
:----: | --------
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, BlitzBasic, BNF, Brainfuck
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, Brainfuck
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
F | Factor, Fish, Forth, Fortran, FSharp
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy
I | Idris, Igor, INI, Io
J | J, Java, JavaScript, JSON, Julia, Jungle
@ -49,11 +50,11 @@ K | Kotlin
L | Lighttpd configuration file, LLVM, Lua
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
N | NASM, Newspeak, Nginx configuration file, Nim, Nix
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python, Python 3
O | Objective-C, OCaml, Octave, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode
P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python 2, Python
Q | QBasic
R | R, Racket, Ragel, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust
S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Swift, SYSTEMD, systemverilog
R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust
S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Svelte, Swift, SYSTEMD, systemverilog
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
V | VB.net, verilog, VHDL, VimL, vue
W | WDTE
@ -210,10 +211,10 @@ using the included Python 3 script `pygments2chroma.py`. I use something like
the following:
```sh
python3 ~/Projects/chroma/_tools/pygments2chroma.py \
python3 _tools/pygments2chroma.py \
pygments.lexers.jvm.KotlinLexer \
> ~/Projects/chroma/lexers/kotlin.go \
&& gofmt -s -w ~/Projects/chroma/lexers/*.go
> lexers/k/kotlin.go \
&& gofmt -s -w lexers/k/kotlin.go
```
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
@ -249,18 +250,34 @@ For a quick overview of the available styles and how they look, check out the [C
<a id="markdown-command-line-interface" name="command-line-interface"></a>
## Command-line interface
A command-line interface to Chroma is included. It can be installed with:
A command-line interface to Chroma is included.
```sh
go get -u github.com/alecthomas/chroma/cmd/chroma
Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases).
The CLI can be used as a preprocessor to colorise output of `less(1)`,
see documentation for the `LESSOPEN` environment variable.
The `--fail` flag can be used to suppress output and return with exit status
1 to facilitate falling back to some other preprocessor in case chroma
does not resolve a specific lexer to use for the given file. For example:
```shell
export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"'
```
Replace `cat` with your favourite fallback preprocessor.
When invoked as `.lessfilter`, the `--fail` flag is automatically turned
on under the hood for easy integration with [lesspipe shipping with
Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS);
for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`.
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a>
## What's missing compared to Pygments?
- Quite a few lexers, for various reasons (pull-requests welcome):
- Pygments lexers for complex languages often include custom code to
handle certain aspects, such as Perl6's ability to nest code inside
handle certain aspects, such as Raku's ability to nest code inside
regular expressions. These require time and effort to convert.
- I mostly only converted languages I had heard of, to reduce the porting cost.
- Some more esoteric features of Pygments are omitted for simplicity.

View File

@ -46,6 +46,13 @@ func WithPreWrapper(wrapper PreWrapper) Option {
}
}
// WrapLongLines wraps long lines.
func WrapLongLines(b bool) Option {
return func(f *Formatter) {
f.wrapLongLines = b
}
}
// WithLineNumbers formats output with line numbers.
func WithLineNumbers(b bool) Option {
return func(f *Formatter) {
@ -131,10 +138,18 @@ var (
}
defaultPreWrapper = preWrapper{
start: func(code bool, styleAttr string) string {
return fmt.Sprintf("<pre%s>", styleAttr)
if code {
return fmt.Sprintf(`<pre tabindex="0"%s><code>`, styleAttr)
}
return fmt.Sprintf(`<pre tabindex="0"%s>`, styleAttr)
},
end: func(code bool) string {
return "</pre>"
if code {
return `</code></pre>`
}
return `</pre>`
},
}
)
@ -147,6 +162,7 @@ type Formatter struct {
allClasses bool
preWrapper PreWrapper
tabWidth int
wrapLongLines bool
lineNumbers bool
lineNumbersInTable bool
linkableLineNumbers bool
@ -197,10 +213,10 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
if wrapInTable {
// List line numbers in its own <td>
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.Background))
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper))
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.Background)))
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
for index := range lines {
line := f.baseLineNumber + index
highlight, next := f.shouldHighlight(highlightIndex, line)
@ -222,7 +238,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
}
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.Background)))
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
highlightIndex = 0
for index, tokens := range lines {
@ -232,14 +248,28 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
if next {
highlightIndex++
}
// Start of Line
fmt.Fprint(w, `<span`)
if highlight {
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
// Line + LineHighlight
if f.Classes {
fmt.Fprintf(w, ` class="%s %s"`, f.class(chroma.Line), f.class(chroma.LineHighlight))
} else {
fmt.Fprintf(w, ` style="%s %s"`, css[chroma.Line], css[chroma.LineHighlight])
}
fmt.Fprint(w, `>`)
} else {
fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line))
}
// Line number
if f.lineNumbers && !wrapInTable {
fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
}
fmt.Fprintf(w, `<span%s>`, f.styleAttr(css, chroma.CodeLine))
for _, token := range tokens {
html := html.EscapeString(token.String())
attr := f.styleAttr(css, token.Type)
@ -248,9 +278,10 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
}
fmt.Fprint(w, html)
}
if highlight {
fmt.Fprintf(w, "</span>")
}
fmt.Fprint(w, `</span>`) // End of CodeLine
fmt.Fprint(w, `</span>`) // End of Line
}
fmt.Fprintf(w, f.preWrapper.End(true))
@ -351,7 +382,11 @@ func (f *Formatter) tabWidthStyle() string {
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
css := f.styleToCSS(style)
// Special-case background as it is mapped to the outer ".chroma" class.
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
return err
}
// Special-case PreWrapper as it is the ".chroma" class.
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.PreWrapper, f.prefix, css[chroma.PreWrapper]); err != nil {
return err
}
// Special-case code column of table to expand width.
@ -375,7 +410,8 @@ func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
sort.Ints(tts)
for _, ti := range tts {
tt := chroma.TokenType(ti)
if tt == chroma.Background {
switch tt {
case chroma.Background, chroma.PreWrapper:
continue
}
class := f.class(tt)
@ -405,12 +441,21 @@ func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string
classes[t] = StyleEntryToCSS(entry)
}
classes[chroma.Background] += f.tabWidthStyle()
lineNumbersStyle := "margin-right: 0.4em; padding: 0 0.4em 0 0.4em;"
classes[chroma.PreWrapper] += classes[chroma.Background] + `;`
// Make PreWrapper a grid to show highlight style with full width.
if len(f.highlightRanges) > 0 {
classes[chroma.PreWrapper] += `display: grid;`
}
// Make PreWrapper wrap long lines.
if f.wrapLongLines {
classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;`
}
lineNumbersStyle := `white-space: pre; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;`
// All rules begin with default rules followed by user provided rules
classes[chroma.Line] = `display: flex;` + classes[chroma.Line]
classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers]
classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable]
classes[chroma.LineHighlight] = "display: block; width: 100%;" + classes[chroma.LineHighlight]
classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0; width: auto; overflow: auto; display: block;" + classes[chroma.LineTable]
classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable]
classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD]
return classes
}

View File

@ -120,7 +120,7 @@ func maxLineWidth(lines [][]chroma.Token) int {
for _, tokens := range lines {
length := 0
for _, token := range tokens {
length += len(strings.Replace(token.String(), ` `, " ", -1))
length += len(strings.ReplaceAll(token.String(), ` `, " "))
}
if length > maxWidth {
maxWidth = length
@ -136,7 +136,7 @@ func (f *Formatter) writeTokenBackgrounds(w io.Writer, lines [][]chroma.Token, s
for index, tokens := range lines {
lineLength := 0
for _, token := range tokens {
length := len(strings.Replace(token.String(), ` `, " ", -1))
length := len(strings.ReplaceAll(token.String(), ` `, " "))
tokenBackground := style.Get(token.Type).Background
if tokenBackground.IsSet() && tokenBackground != style.Get(chroma.Background).Background {
fmt.Fprintf(w, "<rect id=\"%s\" x=\"%dch\" y=\"%fem\" width=\"%dch\" height=\"1.2em\" fill=\"%s\" />\n", escapeString(token.String()), lineLength, 1.2*float64(index)+0.25, length, style.Get(token.Type).Background.String())

View File

@ -3,16 +3,7 @@ module github.com/alecthomas/chroma
go 1.13
require (
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 // indirect
github.com/alecthomas/kong v0.2.4
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dlclark/regexp2 v1.4.0
github.com/mattn/go-colorable v0.1.6
github.com/mattn/go-isatty v0.0.12
github.com/pkg/errors v0.9.1 // indirect
github.com/sergi/go-diff v1.0.0 // indirect
github.com/stretchr/testify v1.3.0 // indirect
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
github.com/stretchr/testify v1.7.0
)

View File

@ -1,38 +1,14 @@
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
github.com/alecthomas/kong v0.2.4 h1:Y0ZBCHAvHhTHw7FFJ2FzCAAG4pkbTgA45nc7BpMhDNk=
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@ -2,6 +2,7 @@ package chroma
import (
"fmt"
"strings"
)
var (
@ -98,9 +99,11 @@ type Lexer interface {
// Lexers is a slice of lexers sortable by name.
type Lexers []Lexer
func (l Lexers) Len() int { return len(l) }
func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l Lexers) Less(i, j int) bool { return l[i].Config().Name < l[j].Config().Name }
func (l Lexers) Len() int { return len(l) }
func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l Lexers) Less(i, j int) bool {
return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name)
}
// PrioritisedLexers is a slice of lexers sortable by priority.
type PrioritisedLexers []Lexer

View File

@ -3,6 +3,9 @@
The tests in this directory feed a known input `testdata/<name>.actual` into the parser for `<name>` and check
that its output matches `<name>.exported`.
It is also possible to perform several tests on a same parser `<name>`, by placing know inputs `*.actual` into a
directory `testdata/<name>/`.
## Running the tests
Run the tests as normal:

View File

@ -30,13 +30,12 @@ func alRules() Rules {
{`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil},
{`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil},
{`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil},
// Added new objects types of BC 2021 wave 1 (REPORTEXTENSION|Entitlement|PermissionSet|PermissionSetExtension)
{`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|Entitlement|PermissionSet|PermissionSetExtension))\b`, Keyword, nil},
{`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|INTERFACE|PERMISSIONSET|PERMISSIONSETEXTENSION|ENTITLEMENT))\b`, Keyword, nil},
{`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil},
{`\b([<>]=|<>|<|>)\b?`, Operator, nil},
{`\b(\-|\+|\/|\*)\b`, Operator, nil},
{`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil},
{`\b(?i:(ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil},
{`\b(?i:(ADD|ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil},
{`\s*[(\.\.)&\|]\s*`, Operator, nil},
{`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil},
{`[;:,]`, Punctuation, nil},

View File

@ -26,10 +26,10 @@ func aplRules() Rules {
{`[⋄◇()]`, Punctuation, nil},
{`[\[\];]`, LiteralStringRegex, nil},
{`⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameFunction, nil},
{`[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameVariable, nil},
{`[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*`, NameVariable, nil},
{`¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?`, LiteralNumber, nil},
{`[\.\\/⌿⍀¨⍣⍨⍠⍤∘]`, NameAttribute, nil},
{`[+\-×÷⌈⌊∣|?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]`, Operator, nil},
{`[\.\\/⌿⍀¨⍣⍨⍠⍤∘⍥@⌺⌶⍢]`, NameAttribute, nil},
{`[+\-×÷⌈⌊∣|?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⍸]`, Operator, nil},
{``, NameConstant, nil},
{`[⎕⍞]`, NameVariableGlobal, nil},
{`[←→]`, KeywordDeclaration, nil},

72
vendor/github.com/alecthomas/chroma/lexers/a/armasm.go generated vendored Normal file
View File

@ -0,0 +1,72 @@
package a
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
var ArmAsm = internal.Register(MustNewLazyLexer(
&Config{
Name: "ArmAsm",
Aliases: []string{"armasm"},
EnsureNL: true,
Filenames: []string{"*.s", "*.S"},
MimeTypes: []string{"text/x-armasm", "text/x-asm"},
},
armasmRules,
))
func armasmRules() Rules {
return Rules{
"commentsandwhitespace": {
{`\s+`, Text, nil},
{`[@;].*?\n`, CommentSingle, nil},
{`/\*.*?\*/`, CommentMultiline, nil},
},
"literal": {
// Binary
{`0b[01]+`, NumberBin, Pop(1)},
// Hex
{`0x\w{1,8}`, NumberHex, Pop(1)},
// Octal
{`0\d+`, NumberOct, Pop(1)},
// Float
{`\d+?\.\d+?`, NumberFloat, Pop(1)},
// Integer
{`\d+`, NumberInteger, Pop(1)},
// String
{`(")(.+)(")`, ByGroups(Punctuation, StringDouble, Punctuation), Pop(1)},
// Char
{`(')(.{1}|\\.{1})(')`, ByGroups(Punctuation, StringChar, Punctuation), Pop(1)},
},
"opcode": {
// Escape at line end
{`\n`, Text, Pop(1)},
// Comment
{`(@|;).*\n`, CommentSingle, Pop(1)},
// Whitespace
{`(\s+|,)`, Text, nil},
// Register by number
{`[rapcfxwbhsdqv]\d{1,2}`, NameClass, nil},
// Address by hex
{`=0x\w+`, ByGroups(Text, NameLabel), nil},
// Pseudo address by label
{`(=)(\w+)`, ByGroups(Text, NameLabel), nil},
// Immediate
{`#`, Text, Push("literal")},
},
"root": {
Include("commentsandwhitespace"),
// Directive with optional param
{`(\.\w+)([ \t]+\w+\s+?)?`, ByGroups(KeywordNamespace, NameLabel), nil},
// Label with data
{`(\w+)(:)(\s+\.\w+\s+)`, ByGroups(NameLabel, Punctuation, KeywordNamespace), Push("literal")},
// Label
{`(\w+)(:)`, ByGroups(NameLabel, Punctuation), nil},
// Syscall Op
{`svc\s+\w+`, NameNamespace, nil},
// Opcode
{`[a-zA-Z]+`, Text, Push("opcode")},
},
}
}

View File

@ -0,0 +1,27 @@
package b
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// BashSession lexer.
var BashSession = internal.Register(MustNewLazyLexer(
&Config{
Name: "BashSession",
Aliases: []string{"bash-session", "console", "shell-session"},
Filenames: []string{".sh-session"},
MimeTypes: []string{"text/x-sh"},
EnsureNL: true,
},
bashsessionRules,
))
func bashsessionRules() Rules {
return Rules{
"root": {
{`^((?:\[[^]]+@[^]]+\]\s?)?[#$%>])(\s*)(.*\n?)`, ByGroups(GenericPrompt, Text, Using(Bash)), nil},
{`^.+\n?`, GenericOutput, nil},
},
}
}

112
vendor/github.com/alecthomas/chroma/lexers/b/bicep.go generated vendored Normal file
View File

@ -0,0 +1,112 @@
package b
import (
"strings"
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Bicep lexer.
var Bicep = internal.Register(MustNewLazyLexer(
&Config{
Name: "Bicep",
Aliases: []string{"bicep"},
Filenames: []string{"*.bicep"},
},
bicepRules,
))
func bicepRules() Rules {
bicepFunctions := []string{
"any",
"array",
"concat",
"contains",
"empty",
"first",
"intersection",
"items",
"last",
"length",
"min",
"max",
"range",
"skip",
"take",
"union",
"dateTimeAdd",
"utcNow",
"deployment",
"environment",
"loadFileAsBase64",
"loadTextContent",
"int",
"json",
"extensionResourceId",
"getSecret",
"list",
"listKeys",
"listKeyValue",
"listAccountSas",
"listSecrets",
"pickZones",
"reference",
"resourceId",
"subscriptionResourceId",
"tenantResourceId",
"managementGroup",
"resourceGroup",
"subscription",
"tenant",
"base64",
"base64ToJson",
"base64ToString",
"dataUri",
"dataUriToString",
"endsWith",
"format",
"guid",
"indexOf",
"lastIndexOf",
"length",
"newGuid",
"padLeft",
"replace",
"split",
"startsWith",
"string",
"substring",
"toLower",
"toUpper",
"trim",
"uniqueString",
"uri",
"uriComponent",
"uriComponentToString",
}
return Rules{
"root": {
{`//[^\n\r]+`, CommentSingle, nil},
{`/\*.*?\*/`, CommentMultiline, nil},
{`([']?\w+[']?)(:)`, ByGroups(NameProperty, Punctuation), nil},
{`\b('(resourceGroup|subscription|managementGroup|tenant)')\b`, KeywordNamespace, nil},
{`'[\w\$\{\(\)\}\.]{1,}?'`, LiteralStringInterpol, nil},
{`('''|').*?('''|')`, LiteralString, nil},
{`\b(allowed|batchSize|description|maxLength|maxValue|metadata|minLength|minValue|secure)\b`, NameDecorator, nil},
{`\b(az|sys)\.`, NameNamespace, nil},
{`\b(` + strings.Join(bicepFunctions, "|") + `)\b`, NameFunction, nil},
// https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/bicep-functions-logical
{`\b(bool)(\()`, ByGroups(NameFunction, Punctuation), nil},
{`\b(for|if|in)\b`, Keyword, nil},
{`\b(module|output|param|resource|var)\b`, KeywordDeclaration, nil},
{`\b(array|bool|int|object|string)\b`, KeywordType, nil},
// https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/operators
{`(>=|>|<=|<|==|!=|=~|!~|::|&&|\?\?|!|-|%|\*|\/|\+)`, Operator, nil},
{`[\(\)\[\]\.:\?{}@=]`, Punctuation, nil},
{`[\w_-]+`, Text, nil},
{`\s+`, TextWhitespace, nil},
},
}
}

View File

@ -10,8 +10,8 @@ var C = internal.Register(MustNewLazyLexer(
&Config{
Name: "C",
Aliases: []string{"c"},
Filenames: []string{"*.c", "*.h", "*.idc"},
MimeTypes: []string{"text/x-chdr", "text/x-csrc"},
Filenames: []string{"*.c", "*.h", "*.idc", "*.x[bp]m"},
MimeTypes: []string{"text/x-chdr", "text/x-csrc", "image/x-xbitmap", "image/x-xpixmap"},
EnsureNL: true,
},
cRules,
@ -43,7 +43,7 @@ func cRules() Rules {
{`[~!%^&*+=|?:<>/-]`, Operator, nil},
{`[()\[\],.]`, Punctuation, nil},
{Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil},
{`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil},
{`(bool|int|long|float|short|double|char((8|16|32)_t)?|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil},
{Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil},
{`(__m(128i|128d|128|64))\b`, KeywordReserved, nil},
{Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil},

View File

@ -24,7 +24,8 @@ func cSharpRules() Rules {
{`^\s*\[.*?\]`, NameAttribute, nil},
{`[^\S\n]+`, Text, nil},
{`\\\n`, Text, nil},
{`//.*?\n`, CommentSingle, nil},
{`///[^\n\r]+`, CommentSpecial, nil},
{`//[^\n\r]+`, CommentSingle, nil},
{`/[*].*?[*]/`, CommentMultiline, nil},
{`\n`, Text, nil},
{`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil},
@ -34,12 +35,12 @@ func cSharpRules() Rules {
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil},
{`'\\.'|'[^\\]'`, LiteralStringChar, nil},
{`0[xX][0-9a-fA-F]+[Ll]?|[0-9_](\.[0-9]*)?([eE][+-]?[0-9]+)?[flFLdD]?`, LiteralNumber, nil},
{`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
{`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma|nullable)\b[^\n\r]+`, CommentPreproc, nil},
{`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil},
{`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|interface|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil},
{`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|init|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil},
{`(global)(::)`, ByGroups(Keyword, Punctuation), nil},
{`(bool|byte|char|decimal|double|dynamic|float|int|long|object|sbyte|short|string|uint|ulong|ushort|var)\b\??`, KeywordType, nil},
{`(class|struct)(\s+)`, ByGroups(Keyword, Text), Push("class")},
{`(class|struct|record|interface)(\s+)`, ByGroups(Keyword, Text), Push("class")},
{`(namespace|using)(\s+)`, ByGroups(Keyword, Text), Push("namespace")},
{`@?[_a-zA-Z]\w*`, Name, nil},
},

View File

@ -52,7 +52,7 @@ func phpCommonRules() Rules {
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
{`0[0-7]+`, LiteralNumberOct, nil},
{`0x[a-f0-9_]+`, LiteralNumberHex, nil},
{`[\d_]+`, LiteralNumberInteger, nil},
{`\d[\d_]*`, LiteralNumberInteger, nil},
{`0b[01]+`, LiteralNumberBin, nil},
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},

View File

@ -13,11 +13,12 @@ var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
&Config{
Name: "PHTML",
Aliases: []string{"phtml"},
Filenames: []string{"*.phtml"},
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5"},
Filenames: []string{"*.phtml", "*.php", "*.php[345]", "*.inc"},
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5", "text/x-php"},
DotAll: true,
CaseInsensitive: true,
EnsureNL: true,
Priority: 2,
},
phtmlRules,
).SetAnalyser(func(text string) float32 {

View File

@ -6,7 +6,7 @@ import (
)
// Dylan lexer.
var Dylan = internal.Register(MustNewLexer(
var Dylan = internal.Register(MustNewLazyLexer(
&Config{
Name: "Dylan",
Aliases: []string{"dylan"},
@ -14,61 +14,63 @@ var Dylan = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-dylan"},
CaseInsensitive: true,
},
Rules{
"root": {
{`\s+`, Whitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{`([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Whitespace, LiteralString), nil},
Default(Push("code")),
},
"code": {
{`\s+`, Whitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("comment")},
{`"`, LiteralString, Push("string")},
{`'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil},
{`#b[01]+`, LiteralNumberBin, nil},
{`#o[0-7]+`, LiteralNumberOct, nil},
{`[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)`, LiteralNumberFloat, nil},
{`[-+]?\d+`, LiteralNumberInteger, nil},
{`#x[0-9a-f]+`, LiteralNumberHex, nil},
func() Rules {
return Rules{
"root": {
{`\s+`, Whitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{`([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Whitespace, LiteralString), nil},
Default(Push("code")),
},
"code": {
{`\s+`, Whitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("comment")},
{`"`, LiteralString, Push("string")},
{`'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil},
{`#b[01]+`, LiteralNumberBin, nil},
{`#o[0-7]+`, LiteralNumberOct, nil},
{`[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)`, LiteralNumberFloat, nil},
{`[-+]?\d+`, LiteralNumberInteger, nil},
{`#x[0-9a-f]+`, LiteralNumberHex, nil},
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)`,
ByGroups(Operator, NameVariable, Operator, NameBuiltin), nil},
{`(\?)(:)(token|name|variable|expression|body|case-body|\*)`,
ByGroups(Operator, Operator, NameVariable), nil},
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, NameVariable), nil},
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)`,
ByGroups(Operator, NameVariable, Operator, NameBuiltin), nil},
{`(\?)(:)(token|name|variable|expression|body|case-body|\*)`,
ByGroups(Operator, Operator, NameVariable), nil},
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, NameVariable), nil},
{`(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])`, Punctuation, nil},
{`:=`, Operator, nil},
{`#[tf]`, Literal, nil},
{`#"`, LiteralStringSymbol, Push("symbol")},
{`#[a-z0-9-]+`, Keyword, nil},
{`#(all-keys|include|key|next|rest)`, Keyword, nil},
{`[\w!&*<>|^$%@+~?/=-]+:`, KeywordConstant, nil},
{`<[\w!&*<>|^$%@+~?/=-]+>`, NameClass, nil},
{`\*[\w!&*<>|^$%@+~?/=-]+\*`, NameVariableGlobal, nil},
{`\$[\w!&*<>|^$%@+~?/=-]+`, NameConstant, nil},
{`(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(NameBuiltin, Whitespace, NameVariable), nil},
{`(error|signal|return|break)`, NameException, nil},
{`(\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, Name), nil},
},
"comment": {
{`[^*/]`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push()},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
"symbol": {
{`"`, LiteralStringSymbol, Pop(1)},
{`[^\\"]+`, LiteralStringSymbol, nil},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil},
{`[^\\"\n]+`, LiteralString, nil},
{`\\\n`, LiteralString, nil},
{`\\`, LiteralString, nil},
},
{`(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])`, Punctuation, nil},
{`:=`, Operator, nil},
{`#[tf]`, Literal, nil},
{`#"`, LiteralStringSymbol, Push("symbol")},
{`#[a-z0-9-]+`, Keyword, nil},
{`#(all-keys|include|key|next|rest)`, Keyword, nil},
{`[\w!&*<>|^$%@+~?/=-]+:`, KeywordConstant, nil},
{`<[\w!&*<>|^$%@+~?/=-]+>`, NameClass, nil},
{`\*[\w!&*<>|^$%@+~?/=-]+\*`, NameVariableGlobal, nil},
{`\$[\w!&*<>|^$%@+~?/=-]+`, NameConstant, nil},
{`(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(NameBuiltin, Whitespace, NameVariable), nil},
{`(error|signal|return|break)`, NameException, nil},
{`(\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, Name), nil},
},
"comment": {
{`[^*/]`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push()},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
"symbol": {
{`"`, LiteralStringSymbol, Pop(1)},
{`[^\\"]+`, LiteralStringSymbol, nil},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil},
{`[^\\"\n]+`, LiteralString, nil},
{`\\\n`, LiteralString, nil},
{`\\`, LiteralString, nil},
},
}
},
))

66
vendor/github.com/alecthomas/chroma/lexers/f/fennel.go generated vendored Normal file
View File

@ -0,0 +1,66 @@
package f
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Fennel lexer.
var Fennel = internal.Register(MustNewLazyLexer(
&Config{
Name: "Fennel",
Aliases: []string{"fennel", "fnl"},
Filenames: []string{"*.fennel"},
MimeTypes: []string{"text/x-fennel", "application/x-fennel"},
},
fennelRules,
))
// Here's some Fennel code used to generate the lists of keywords:
// (local fennel (require :fennel))
//
// (fn member? [t x] (each [_ y (ipairs t)] (when (= y x) (lua "return true"))))
//
// (local declarations [:fn :lambda :λ :local :var :global :macro :macros])
// (local keywords [])
// (local globals [])
//
// (each [name data (pairs (fennel.syntax))]
// (if (member? declarations name) nil ; already populated
// data.special? (table.insert keywords name)
// data.macro? (table.insert keywords name)
// data.global? (table.insert globals name)))
//
// (fn quoted [tbl]
// (table.sort tbl)
// (table.concat (icollect [_ k (ipairs tbl)]
// (string.format "`%s`" k)) ", "))
//
// (print :Keyword (quoted keywords))
// (print :KeywordDeclaration (quoted declarations))
// (print :NameBuiltin (quoted globals))
func fennelRules() Rules {
return Rules{
"root": {
{`;.*$`, CommentSingle, nil},
{`\s+`, Whitespace, nil},
{`-?\d+\.\d+`, LiteralNumberFloat, nil},
{`-?\d+`, LiteralNumberInteger, nil},
{`0x-?[abcdef\d]+`, LiteralNumberHex, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil},
{`\\(.|[a-z]+)`, LiteralStringChar, nil},
{`::?#?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil},
{"~@|[`\\'#^~&@]", Operator, nil},
{Words(``, ` `, `#`, `%`, `*`, `+`, `-`, `->`, `->>`, `-?>`, `-?>>`, `.`, `..`, `/`, `//`, `:`, `<`, `<=`, `=`, `>`, `>=`, `?.`, `^`, `accumulate`, `and`, `band`, `bnot`, `bor`, `bxor`, `collect`, `comment`, `do`, `doc`, `doto`, `each`, `eval-compiler`, `for`, `hashfn`, `icollect`, `if`, `import-macros`, `include`, `length`, `let`, `lshift`, `lua`, `macrodebug`, `match`, `not`, `not=`, `or`, `partial`, `pick-args`, `pick-values`, `quote`, `require-macros`, `rshift`, `set`, `set-forcibly!`, `tset`, `values`, `when`, `while`, `with-open`, `~=`), Keyword, nil},
{Words(``, ` `, `fn`, `global`, `lambda`, `local`, `macro`, `macros`, `var`, `λ`), KeywordDeclaration, nil},
{Words(``, ` `, `_G`, `arg`, `assert`, `bit32`, `bit32.arshift`, `bit32.band`, `bit32.bnot`, `bit32.bor`, `bit32.btest`, `bit32.bxor`, `bit32.extract`, `bit32.lrotate`, `bit32.lshift`, `bit32.replace`, `bit32.rrotate`, `bit32.rshift`, `collectgarbage`, `coroutine`, `coroutine.create`, `coroutine.resume`, `coroutine.running`, `coroutine.status`, `coroutine.wrap`, `coroutine.yield`, `debug`, `debug.debug`, `debug.gethook`, `debug.getinfo`, `debug.getlocal`, `debug.getmetatable`, `debug.getregistry`, `debug.getupvalue`, `debug.getuservalue`, `debug.sethook`, `debug.setlocal`, `debug.setmetatable`, `debug.setupvalue`, `debug.setuservalue`, `debug.traceback`, `debug.upvalueid`, `debug.upvaluejoin`, `dofile`, `error`, `getmetatable`, `io`, `io.close`, `io.flush`, `io.input`, `io.lines`, `io.open`, `io.output`, `io.popen`, `io.read`, `io.tmpfile`, `io.type`, `io.write`, `ipairs`, `load`, `loadfile`, `loadstring`, `math`, `math.abs`, `math.acos`, `math.asin`, `math.atan`, `math.atan2`, `math.ceil`, `math.cos`, `math.cosh`, `math.deg`, `math.exp`, `math.floor`, `math.fmod`, `math.frexp`, `math.ldexp`, `math.log`, `math.log10`, `math.max`, `math.min`, `math.modf`, `math.pow`, `math.rad`, `math.random`, `math.randomseed`, `math.sin`, `math.sinh`, `math.sqrt`, `math.tan`, `math.tanh`, `module`, `next`, `os`, `os.clock`, `os.date`, `os.difftime`, `os.execute`, `os.exit`, `os.getenv`, `os.remove`, `os.rename`, `os.setlocale`, `os.time`, `os.tmpname`, `package`, `package.loadlib`, `package.searchpath`, `package.seeall`, `pairs`, `pcall`, `print`, `rawequal`, `rawget`, `rawlen`, `rawset`, `require`, `select`, `setmetatable`, `string`, `string.byte`, `string.char`, `string.dump`, `string.find`, `string.format`, `string.gmatch`, `string.gsub`, `string.len`, `string.lower`, `string.match`, `string.rep`, `string.reverse`, `string.sub`, `string.upper`, `table`, `table.concat`, `table.insert`, `table.maxn`, `table.pack`, `table.remove`, `table.sort`, `table.unpack`, `tonumber`, `tostring`, `type`, `unpack`, `xpcall`), NameBuiltin, nil},
{`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil},
{`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil},
{`(\[|\])`, Punctuation, nil},
{`(\{|\})`, Punctuation, nil},
{`(\(|\))`, Punctuation, nil},
},
}
}

View File

@ -17,11 +17,32 @@ var Fish = internal.Register(MustNewLazyLexer(
))
func fishRules() Rules {
keywords := []string{
`begin`, `end`, `if`, `else`, `while`, `break`, `for`, `return`, `function`, `block`,
`case`, `continue`, `switch`, `not`, `and`, `or`, `set`, `echo`, `exit`, `pwd`, `true`,
`false`, `cd`, `cdh`, `count`, `test`,
}
keywordsPattern := Words(`\b`, `\b`, keywords...)
builtins := []string{
`alias`, `bg`, `bind`, `breakpoint`, `builtin`, `argparse`, `abbr`, `string`, `command`,
`commandline`, `complete`, `contains`, `dirh`, `dirs`, `disown`, `emit`, `eval`, `exec`,
`fg`, `fish`, `fish_add_path`, `fish_breakpoint_prompt`, `fish_command_not_found`,
`fish_config`, `fish_git_prompt`, `fish_greeting`, `fish_hg_prompt`, `fish_indent`,
`fish_is_root_user`, `fish_key_reader`, `fish_mode_prompt`, `fish_opt`, `fish_pager`,
`fish_prompt`, `fish_right_prompt`, `fish_status_to_signal`, `fish_svn_prompt`,
`fish_title`, `fish_update_completions`, `fish_vcs_prompt`, `fishd`, `funced`,
`funcsave`, `functions`, `help`, `history`, `isatty`, `jobs`, `math`, `mimedb`, `nextd`,
`open`, `prompt_pwd`, `realpath`, `popd`, `prevd`, `psub`, `pushd`, `random`, `read`,
`set_color`, `source`, `status`, `suspend`, `trap`, `type`, `ulimit`, `umask`, `vared`,
`fc`, `getopts`, `hash`, `kill`, `printf`, `time`, `wait`,
}
return Rules{
"root": {
Include("basic"),
Include("data"),
Include("interp"),
Include("data"),
},
"interp": {
{`\$\(\(`, Keyword, Push("math")},
@ -29,13 +50,20 @@ func fishRules() Rules {
{`\$#?(\w+|.)`, NameVariable, nil},
},
"basic": {
{`\b(begin|end|if|else|while|break|for|in|return|function|block|case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|cd|count|test)(\s*)\b`, ByGroups(Keyword, Text), nil},
{`\b(alias|bg|bind|breakpoint|builtin|command|commandline|complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|fish_indent|fish_pager|fish_prompt|fish_right_prompt|fish_update_completions|fishd|funced|funcsave|functions|help|history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|pushd|random|read|set_color|source|status|trap|type|ulimit|umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)`, NameBuiltin, nil},
{Words(`(?<=(?:^|\A|;|&&|\|\||\||`+keywordsPattern+`)\s*)`, `(?=;?\b)`, keywords...), Keyword, nil},
{`(?<=for\s+\S+\s+)in\b`, Keyword, nil},
{Words(`\b`, `\s*\b(?!\.)`, builtins...), NameBuiltin, nil},
{`#!.*\n`, CommentHashbang, nil},
{`#.*\n`, Comment, nil},
{`\\[\w\W]`, LiteralStringEscape, nil},
{`(\b\w+)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), nil},
{`[\[\]()=]`, Operator, nil},
{`[\[\]()={}]`, Operator, nil},
{`(?<=\[[^\]]+)\.\.|-(?=[^\[]+\])`, Operator, nil},
{`<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil},
{`(?<=set\s+(?:--?[^\d\W][\w-]*\s+)?)\w+`, NameVariable, nil},
{`(?<=for\s+)\w[\w-]*(?=\s+in)`, NameVariable, nil},
{`(?<=function\s+)\w(?:[^\n])*?(?= *[-\n])`, NameFunction, nil},
{`(?<=(?:^|\b(?:and|or|sudo)\b|;|\|\||&&|\||\(|(?:\b\w+\s*=\S+\s)) *)\w[\w-]*`, NameFunction, nil},
},
"data": {
{`(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"`, LiteralStringDouble, nil},
@ -43,10 +71,11 @@ func fishRules() Rules {
{`(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil},
{`(?s)'.*?'`, LiteralStringSingle, nil},
{`;`, Punctuation, nil},
{`&|\||\^|<|>`, Operator, nil},
{`&&|\|\||&|\||\^|<|>`, Operator, nil},
{`\s+`, Text, nil},
{`\d+(?= |\Z)`, LiteralNumber, nil},
{"[^=\\s\\[\\]{}()$\"\\'`\\\\<&|;]+", Text, nil},
{`\b\d+\b`, LiteralNumber, nil},
{`(?<=\s+)--?[^\d][\w-]*`, NameAttribute, nil},
{".+?", Text, nil},
},
"string": {
{`"`, LiteralStringDouble, Pop(1)},

View File

@ -0,0 +1,39 @@
package f
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// FortranFixed lexer.
var FortranFixed = internal.Register(MustNewLazyLexer(
&Config{
Name: "FortranFixed",
Aliases: []string{"fortranfixed"},
Filenames: []string{"*.f", "*.F"},
MimeTypes: []string{"text/x-fortran"},
NotMultiline: true,
CaseInsensitive: true,
},
func() Rules {
return Rules{
"root": {
{`[C*].*\n`, Comment, nil},
{`#.*\n`, CommentPreproc, nil},
{`[\t ]*!.*\n`, Comment, nil},
{`(.{5})`, NameLabel, Push("cont-char")},
{`.*\n`, Using(Fortran), nil},
},
"cont-char": {
{` `, Text, Push("code")},
{`0`, Comment, Push("code")},
{`.`, GenericStrong, Push("code")},
},
"code": {
{`(.{66})(.*)(\n)`, ByGroups(Using(Fortran), Comment, Text), Push("root")},
{`.*\n`, Using(Fortran), Push("root")},
Default(Push("root")),
},
}
},
))

View File

@ -50,6 +50,7 @@ func goRules() Rules {
{`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
{`0[0-7]+`, LiteralNumberOct, nil},
{`0[xX][0-9a-fA-F_]+`, LiteralNumberHex, nil},
{`0b[01_]+`, LiteralNumberBin, nil},
{`(0|[1-9][0-9_]*)`, LiteralNumberInteger, nil},
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
{"(`)([^`]*)(`)", ByGroups(LiteralString, Using(TypeRemappingLexer(GoTextTemplate, TypeMapping{{Other, LiteralString, nil}})), LiteralString), nil},
@ -97,6 +98,7 @@ func goTemplateRules() Rules {
{`-?\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
{`-?0[0-7]+`, LiteralNumberOct, nil},
{`-?0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
{`-?0b[01_]+`, LiteralNumberBin, nil},
{`-?(0|[1-9][0-9]*)`, LiteralNumberInteger, nil},
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
{"`[^`]*`", LiteralString, nil},

47
vendor/github.com/alecthomas/chroma/lexers/g/groff.go generated vendored Normal file
View File

@ -0,0 +1,47 @@
package g
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Groff lexer.
var Groff = internal.Register(MustNewLazyLexer(
&Config{
Name: "Groff",
Aliases: []string{"groff", "nroff", "man"},
Filenames: []string{"*.[1-9]", "*.1p", "*.3pm", "*.man"},
MimeTypes: []string{"application/x-troff", "text/troff"},
},
func() Rules {
return Rules{
"root": {
{`(\.)(\w+)`, ByGroups(Text, Keyword), Push("request")},
{`\.`, Punctuation, Push("request")},
{`[^\\\n]+`, Text, Push("textline")},
Default(Push("textline")),
},
"textline": {
Include("escapes"),
{`[^\\\n]+`, Text, nil},
{`\n`, Text, Pop(1)},
},
"escapes": {
{`\\"[^\n]*`, Comment, nil},
{`\\[fn]\w`, LiteralStringEscape, nil},
{`\\\(.{2}`, LiteralStringEscape, nil},
{`\\.\[.*\]`, LiteralStringEscape, nil},
{`\\.`, LiteralStringEscape, nil},
{`\\\n`, Text, Push("request")},
},
"request": {
{`\n`, Text, Pop(1)},
Include("escapes"),
{`"[^\n"]+"`, LiteralStringDouble, nil},
{`\d+`, LiteralNumber, nil},
{`\S+`, LiteralString, nil},
{`\s+`, Text, nil},
},
}
},
))

View File

@ -27,10 +27,10 @@ func haskellRules() Rules {
{`\berror\b`, NameException, nil},
{`\b(case|class|data|default|deriving|do|else|family|if|in|infix[lr]?|instance|let|newtype|of|then|type|where|_)(?!\')\b`, KeywordReserved, nil},
{`'[^\\]'`, LiteralStringChar, nil},
{`^[_a-zµß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣԥԧա-ևᴀ-ᴫᵫ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷℊℎ-ℏℓℯℴℹℼ--ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱻⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⳬⳮⳳⴀ-ⴥⴧⴭꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙡꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌꞎꞑꞓꞡꞣꞥꞧꞩꟺff-stﬓ-ﬗa-z𐐨-𐑏𝐚-𝐳𝑎-𝑔𝑖-𝑧𝒂-𝒛𝒶-𝒹𝒻𝒽-𝓃𝓅-𝓏𝓪-𝔃𝔞-𝔷𝕒-𝕫𝖆-𝖟𝖺-𝗓𝗮-𝘇𝘢-𝘻𝙖-𝙯𝚊-𝚥𝛂-𝛚𝛜-𝛡𝛼-𝜔𝜖-𝜛𝜶-𝝎𝝐-𝝕𝝰-𝞈𝞊-𝞏𝞪-𝟂𝟄-𝟉𝟋][\w\']*`, NameFunction, nil},
{`'?[_a-zµß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣԥԧա-ևᴀ-ᴫᵫ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷℊℎ-ℏℓℯℴℹℼ--ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱻⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⳬⳮⳳⴀ-ⴥⴧⴭꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙡꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌꞎꞑꞓꞡꞣꞥꞧꞩꟺff-stﬓ-ﬗa-z𐐨-𐑏𝐚-𝐳𝑎-𝑔𝑖-𝑧𝒂-𝒛𝒶-𝒹𝒻𝒽-𝓃𝓅-𝓏𝓪-𝔃𝔞-𝔷𝕒-𝕫𝖆-𝖟𝖺-𝗓𝗮-𝘇𝘢-𝘻𝙖-𝙯𝚊-𝚥𝛂-𝛚𝛜-𝛡𝛼-𝜔𝜖-𝜛𝜶-𝝎𝝐-𝝕𝝰-𝞈𝞊-𝞏𝞪-𝟂𝟄-𝟉𝟋][\w']*`, Name, nil},
{`('')?[A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w\']*`, KeywordType, nil},
{`(')[A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w\']*`, KeywordType, nil},
{`^[_\p{Ll}][\w\']*`, NameFunction, nil},
{`'?[_\p{Ll}][\w']*`, Name, nil},
{`('')?[\p{Lu}][\w\']*`, KeywordType, nil},
{`(')[\p{Lu}][\w\']*`, KeywordType, nil},
{`(')\[[^\]]*\]`, KeywordType, nil},
{`(')\([^)]*\)`, KeywordType, nil},
{`\\(?![:!#$%&*+.\\/<=>?@^|~-]+)`, NameFunction, nil},
@ -53,20 +53,20 @@ func haskellRules() Rules {
{`"`, LiteralString, Push("string")},
{`\)`, Punctuation, Pop(1)},
{`qualified\b`, Keyword, nil},
{`([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)(\s+)(as)(\s+)([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)`, ByGroups(NameNamespace, Text, Keyword, Text, Name), Pop(1)},
{`([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)(\s+)(hiding)(\s+)(\()`, ByGroups(NameNamespace, Text, Keyword, Text, Punctuation), Push("funclist")},
{`([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")},
{`([\p{Lu}][\w.]*)(\s+)(as)(\s+)([\p{Lu}][\w.]*)`, ByGroups(NameNamespace, Text, Keyword, Text, Name), Pop(1)},
{`([\p{Lu}][\w.]*)(\s+)(hiding)(\s+)(\()`, ByGroups(NameNamespace, Text, Keyword, Text, Punctuation), Push("funclist")},
{`([\p{Lu}][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")},
{`[\w.]+`, NameNamespace, Pop(1)},
},
"module": {
{`\s+`, Text, nil},
{`([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")},
{`[A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*`, NameNamespace, Pop(1)},
{`([\p{Lu}][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")},
{`[\p{Lu}][\w.]*`, NameNamespace, Pop(1)},
},
"funclist": {
{`\s+`, Text, nil},
{`[A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊]\w*`, KeywordType, nil},
{`(_[\w\']+|[a-zµß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣԥԧա-ևᴀ-ᴫᵫ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷℊℎ-ℏℓℯℴℹℼ--ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱻⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⳬⳮⳳⴀ-ⴥⴧⴭꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙡꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌꞎꞑꞓꞡꞣꞥꞧꞩꟺff-stﬓ-ﬗa-z𐐨-𐑏𝐚-𝐳𝑎-𝑔𝑖-𝑧𝒂-𝒛𝒶-𝒹𝒻𝒽-𝓃𝓅-𝓏𝓪-𝔃𝔞-𝔷𝕒-𝕫𝖆-𝖟𝖺-𝗓𝗮-𝘇𝘢-𝘻𝙖-𝙯𝚊-𝚥𝛂-𝛚𝛜-𝛡𝛼-𝜔𝜖-𝜛𝜶-𝝎𝝐-𝝕𝝰-𝞈𝞊-𝞏𝞪-𝟂𝟄-𝟉𝟋][\w\']*)`, NameFunction, nil},
{`[\p{Lu}]\w*`, KeywordType, nil},
{`(_[\w\']+|[\p{Ll}][\w\']*)`, NameFunction, nil},
{`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil},
{`\{-`, CommentMultiline, Push("comment")},
{`,`, Punctuation, nil},
@ -92,7 +92,7 @@ func haskellRules() Rules {
},
"escape": {
{`[abfnrtv"\'&\\]`, LiteralStringEscape, Pop(1)},
{`\^[][A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ---ℝℤΩℨK--ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊@^_]`, LiteralStringEscape, Pop(1)},
{`\^[][\p{Lu}@^_]`, LiteralStringEscape, Pop(1)},
{`NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL`, LiteralStringEscape, Pop(1)},
{`o[0-7]+`, LiteralStringEscape, Pop(1)},
{`x[\da-fA-F]+`, LiteralStringEscape, Pop(1)},

View File

@ -37,14 +37,14 @@ func httpRules() Rules {
}
}
func httpContentBlock(groups []string, lexer Lexer) Iterator {
func httpContentBlock(groups []string, state *LexerState) Iterator {
tokens := []Token{
{Generic, groups[0]},
}
return Literator(tokens...)
}
func httpHeaderBlock(groups []string, lexer Lexer) Iterator {
func httpHeaderBlock(groups []string, state *LexerState) Iterator {
tokens := []Token{
{Name, groups[1]},
{Text, groups[2]},
@ -56,7 +56,7 @@ func httpHeaderBlock(groups []string, lexer Lexer) Iterator {
return Literator(tokens...)
}
func httpContinuousHeaderBlock(groups []string, lexer Lexer) Iterator {
func httpContinuousHeaderBlock(groups []string, state *LexerState) Iterator {
tokens := []Token{
{Text, groups[1]},
{Literal, groups[2]},

View File

@ -6,11 +6,22 @@ import (
"sort"
"strings"
"github.com/danwakefield/fnmatch"
"github.com/alecthomas/chroma"
)
var (
ignoredSuffixes = [...]string{
// Editor backups
"~", ".bak", ".old", ".orig",
// Debian and derivatives apt/dpkg/ucf backups
".dpkg-dist", ".dpkg-old", ".ucf-dist", ".ucf-new", ".ucf-old",
// Red Hat and derivatives rpm backups
".rpmnew", ".rpmorig", ".rpmsave",
// Build system input/template files
".in",
}
)
// Registry of Lexers.
var Registry = struct {
Lexers chroma.Lexers
@ -91,8 +102,21 @@ func Match(filename string) chroma.Lexer {
for _, lexer := range Registry.Lexers {
config := lexer.Config()
for _, glob := range config.Filenames {
if fnmatch.Match(glob, filename, 0) {
ok, err := filepath.Match(glob, filename)
if err != nil { // nolint
panic(err)
} else if ok {
matched = append(matched, lexer)
} else {
for _, suf := range &ignoredSuffixes {
ok, err := filepath.Match(glob+suf, filename)
if err != nil {
panic(err)
} else if ok {
matched = append(matched, lexer)
break
}
}
}
}
}
@ -105,8 +129,21 @@ func Match(filename string) chroma.Lexer {
for _, lexer := range Registry.Lexers {
config := lexer.Config()
for _, glob := range config.AliasFilenames {
if fnmatch.Match(glob, filename, 0) {
ok, err := filepath.Match(glob, filename)
if err != nil { // nolint
panic(err)
} else if ok {
matched = append(matched, lexer)
} else {
for _, suf := range &ignoredSuffixes {
ok, err := filepath.Match(glob+suf, filename)
if err != nil {
panic(err)
} else if ok {
matched = append(matched, lexer)
break
}
}
}
}
}

File diff suppressed because one or more lines are too long

View File

@ -23,6 +23,9 @@ func jsonRules() Rules {
"whitespace": {
{`\s+`, Text, nil},
},
"comment": {
{`//.*?\n`, CommentSingle, nil},
},
"simplevalue": {
{`(true|false|null)\b`, KeywordConstant, nil},
{`-?(0|[1-9]\d*)(\.\d+[eE](\+|-)?\d+|[eE](\+|-)?\d+|\.\d+)`, LiteralNumberFloat, nil},
@ -37,18 +40,21 @@ func jsonRules() Rules {
},
"objectvalue": {
Include("whitespace"),
Include("comment"),
{`"(\\\\|\\"|[^"])*"`, NameTag, Push("objectattribute")},
{`\}`, Punctuation, Pop(1)},
},
"arrayvalue": {
Include("whitespace"),
Include("value"),
Include("comment"),
{`,`, Punctuation, nil},
{`\]`, Punctuation, Pop(1)},
},
"value": {
Include("whitespace"),
Include("simplevalue"),
Include("comment"),
{`\{`, Punctuation, Push("objectvalue")},
{`\[`, Punctuation, Push("arrayvalue")},
},

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -3,7 +3,7 @@
// Sub-packages contain lexer implementations.
package lexers
// nolint: golint
// nolint
import (
"github.com/alecthomas/chroma"
_ "github.com/alecthomas/chroma/lexers/a"

View File

@ -40,7 +40,7 @@ func markdownRules() Rules {
},
"inline": {
{`\\.`, Text, nil},
{`(\s)([*_][^*_]+[*_])(\W|\n)`, ByGroups(Text, GenericEmph, Text), nil},
{`(\s)(\*|_)((?:(?!\2).)*)(\2)((?=\W|\n))`, ByGroups(Text, GenericEmph, GenericEmph, GenericEmph, Text), nil},
{`(\s)((\*\*|__).*?)\3((?=\W|\n))`, ByGroups(Text, GenericStrong, GenericStrong, Text), nil},
{`(\s)(~~[^~]+~~)((?=\W|\n))`, ByGroups(Text, GenericDeleted, Text), nil},
{"`[^`]+`", LiteralStringBacktick, nil},

View File

@ -6,7 +6,7 @@ import (
)
// mcfunction lexer.
var MCFunction = internal.Register(MustNewLexer(
var MCFunction = internal.Register(MustNewLazyLexer(
&Config{
Name: "mcfunction",
Aliases: []string{"mcfunction"},
@ -15,93 +15,95 @@ var MCFunction = internal.Register(MustNewLexer(
NotMultiline: true,
DotAll: true,
},
Rules{
"simplevalue": {
{`(true|false)`, KeywordConstant, nil},
{`[01]b`, LiteralNumber, nil},
{`-?(0|[1-9]\d*)(\.\d+[eE](\+|-)?\d+|[eE](\+|-)?\d+|\.\d+)`, LiteralNumberFloat, nil},
{`(-?\d+)(\.\.)(-?\d+)`, ByGroups(LiteralNumberInteger, Punctuation, LiteralNumberInteger), nil},
{`-?(0|[1-9]\d*)`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'[^']+'`, LiteralStringSingle, nil},
{`([!#]?)(\w+)`, ByGroups(Punctuation, Text), nil},
},
"nbtobjectattribute": {
Include("nbtvalue"),
{`:`, Punctuation, nil},
{`,`, Punctuation, Pop(1)},
{`\}`, Punctuation, Pop(2)},
},
"nbtobjectvalue": {
{`("(\\\\|\\"|[^"])*"|[a-zA-Z0-9_]+)`, NameTag, Push("nbtobjectattribute")},
{`\}`, Punctuation, Pop(1)},
},
"nbtarrayvalue": {
Include("nbtvalue"),
{`,`, Punctuation, nil},
{`\]`, Punctuation, Pop(1)},
},
"nbtvalue": {
Include("simplevalue"),
{`\{`, Punctuation, Push("nbtobjectvalue")},
{`\[`, Punctuation, Push("nbtarrayvalue")},
},
"argumentvalue": {
Include("simplevalue"),
{`,`, Punctuation, Pop(1)},
{`[}\]]`, Punctuation, Pop(2)},
},
"argumentlist": {
{`(nbt)(={)`, ByGroups(NameAttribute, Punctuation), Push("nbtobjectvalue")},
{`([A-Za-z0-9/_!]+)(={)`, ByGroups(NameAttribute, Punctuation), Push("argumentlist")},
{`([A-Za-z0-9/_!]+)(=)`, ByGroups(NameAttribute, Punctuation), Push("argumentvalue")},
Include("simplevalue"),
{`,`, Punctuation, nil},
{`[}\]]`, Punctuation, Pop(1)},
},
"root": {
{`#.*?\n`, CommentSingle, nil},
{Words(`/?`, `\b`, `ability`, `attributes`, `advancement`,
`ban`, `ban-ip`, `banlist`, `bossbar`,
`camerashake`, `classroommode`, `clear`,
`clearspawnpoint`, `clone`, `code`, `collect`,
`createagent`, `data`, `datapack`, `debug`,
`defaultgamemode`, `deop`, `destroy`, `detect`,
`detectredstone`, `difficulty`, `dropall`,
`effect`, `enchant`, `event`, `execute`,
`experience`, `fill`, `flog`, `forceload`,
`function`, `gamemode`, `gamerule`,
`geteduclientinfo`, `give`, `help`, `item`,
`immutableworld`, `kick`, `kill`, `list`,
`locate`, `locatebiome`, `loot`, `me`, `mixer`,
`mobevent`, `move`, `msg`, `music`, `op`,
`pardon`, `particle`, `playanimation`,
`playsound`, `position`, `publish`,
`raytracefog`, `recipe`, `reload`, `remove`,
`replaceitem`, `ride`, `save`, `save-all`,
`save-off`, `save-on`, `say`, `schedule`,
`scoreboard`, `seed`, `setblock`,
`setidletimeout`, `setmaxplayers`,
`setworldspawn`, `spawnpoint`, `spectate`,
`spreadplayers`, `stop`, `stopsound`,
`structure`, `summon`, `tag`, `team`, `teammsg`,
`teleport`, `tell`, `tellraw`, `testfor`,
`testforblock`, `testforblocks`, `tickingarea`,
`time`, `title`, `toggledownfall`, `tp`,
`tpagent`, `transfer`, `transferserver`,
`trigger`, `turn`, `w`, `weather`, `whitelist`,
`worldborder`, `worldbuilder`, `wsserver`, `xp`,
), KeywordReserved, nil},
{Words(``, ``, `@p`, `@r`, `@a`, `@e`, `@s`, `@c`, `@v`),
KeywordConstant, nil},
{`\[`, Punctuation, Push("argumentlist")},
{`{`, Punctuation, Push("nbtobjectvalue")},
{`~`, NameBuiltin, nil},
{`([a-zA-Z_]+:)?[a-zA-Z_]+\b`, Text, nil},
{`([a-z]+)(\.)([0-9]+)\b`, ByGroups(Text, Punctuation, LiteralNumber), nil},
{`([<>=]|<=|>=)`, Punctuation, nil},
Include("simplevalue"),
{`\s+`, TextWhitespace, nil},
},
func() Rules {
return Rules{
"simplevalue": {
{`(true|false)`, KeywordConstant, nil},
{`[01]b`, LiteralNumber, nil},
{`-?(0|[1-9]\d*)(\.\d+[eE](\+|-)?\d+|[eE](\+|-)?\d+|\.\d+)`, LiteralNumberFloat, nil},
{`(-?\d+)(\.\.)(-?\d+)`, ByGroups(LiteralNumberInteger, Punctuation, LiteralNumberInteger), nil},
{`-?(0|[1-9]\d*)`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'[^']+'`, LiteralStringSingle, nil},
{`([!#]?)(\w+)`, ByGroups(Punctuation, Text), nil},
},
"nbtobjectattribute": {
Include("nbtvalue"),
{`:`, Punctuation, nil},
{`,`, Punctuation, Pop(1)},
{`\}`, Punctuation, Pop(2)},
},
"nbtobjectvalue": {
{`("(\\\\|\\"|[^"])*"|[a-zA-Z0-9_]+)`, NameTag, Push("nbtobjectattribute")},
{`\}`, Punctuation, Pop(1)},
},
"nbtarrayvalue": {
Include("nbtvalue"),
{`,`, Punctuation, nil},
{`\]`, Punctuation, Pop(1)},
},
"nbtvalue": {
Include("simplevalue"),
{`\{`, Punctuation, Push("nbtobjectvalue")},
{`\[`, Punctuation, Push("nbtarrayvalue")},
},
"argumentvalue": {
Include("simplevalue"),
{`,`, Punctuation, Pop(1)},
{`[}\]]`, Punctuation, Pop(2)},
},
"argumentlist": {
{`(nbt)(={)`, ByGroups(NameAttribute, Punctuation), Push("nbtobjectvalue")},
{`([A-Za-z0-9/_!]+)(={)`, ByGroups(NameAttribute, Punctuation), Push("argumentlist")},
{`([A-Za-z0-9/_!]+)(=)`, ByGroups(NameAttribute, Punctuation), Push("argumentvalue")},
Include("simplevalue"),
{`,`, Punctuation, nil},
{`[}\]]`, Punctuation, Pop(1)},
},
"root": {
{`#.*?\n`, CommentSingle, nil},
{Words(`/?`, `\b`, `ability`, `attributes`, `advancement`,
`ban`, `ban-ip`, `banlist`, `bossbar`,
`camerashake`, `classroommode`, `clear`,
`clearspawnpoint`, `clone`, `code`, `collect`,
`createagent`, `data`, `datapack`, `debug`,
`defaultgamemode`, `deop`, `destroy`, `detect`,
`detectredstone`, `difficulty`, `dropall`,
`effect`, `enchant`, `event`, `execute`,
`experience`, `fill`, `flog`, `forceload`,
`function`, `gamemode`, `gamerule`,
`geteduclientinfo`, `give`, `help`, `item`,
`immutableworld`, `kick`, `kill`, `list`,
`locate`, `locatebiome`, `loot`, `me`, `mixer`,
`mobevent`, `move`, `msg`, `music`, `op`,
`pardon`, `particle`, `playanimation`,
`playsound`, `position`, `publish`,
`raytracefog`, `recipe`, `reload`, `remove`,
`replaceitem`, `ride`, `save`, `save-all`,
`save-off`, `save-on`, `say`, `schedule`,
`scoreboard`, `seed`, `setblock`,
`setidletimeout`, `setmaxplayers`,
`setworldspawn`, `spawnpoint`, `spectate`,
`spreadplayers`, `stop`, `stopsound`,
`structure`, `summon`, `tag`, `team`, `teammsg`,
`teleport`, `tell`, `tellraw`, `testfor`,
`testforblock`, `testforblocks`, `tickingarea`,
`time`, `title`, `toggledownfall`, `tp`,
`tpagent`, `transfer`, `transferserver`,
`trigger`, `turn`, `w`, `weather`, `whitelist`,
`worldborder`, `worldbuilder`, `wsserver`, `xp`,
), KeywordReserved, nil},
{Words(``, ``, `@p`, `@r`, `@a`, `@e`, `@s`, `@c`, `@v`),
KeywordConstant, nil},
{`\[`, Punctuation, Push("argumentlist")},
{`{`, Punctuation, Push("nbtobjectvalue")},
{`~`, NameBuiltin, nil},
{`([a-zA-Z_]+:)?[a-zA-Z_]+\b`, Text, nil},
{`([a-z]+)(\.)([0-9]+)\b`, ByGroups(Text, Punctuation, LiteralNumber), nil},
{`([<>=]|<=|>=)`, Punctuation, nil},
Include("simplevalue"),
{`\s+`, TextWhitespace, nil},
},
}
},
))

51
vendor/github.com/alecthomas/chroma/lexers/m/meson.go generated vendored Normal file
View File

@ -0,0 +1,51 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Meson lexer.
var Meson = internal.Register(MustNewLazyLexer(
&Config{
Name: "Meson",
Aliases: []string{"meson", "meson.build"},
Filenames: []string{"meson.build", "meson_options.txt"},
MimeTypes: []string{"text/x-meson"},
},
func() Rules {
return Rules{
"root": {
{`#.*?$`, Comment, nil},
{`'''.*'''`, LiteralStringSingle, nil},
{`[1-9][0-9]*`, LiteralNumberInteger, nil},
{`0o[0-7]+`, LiteralNumberOct, nil},
{`0x[a-fA-F0-9]+`, LiteralNumberHex, nil},
Include("string"),
Include("keywords"),
Include("expr"),
{`[a-zA-Z_][a-zA-Z_0-9]*`, Name, nil},
{`\s+`, TextWhitespace, nil},
},
"string": {
{`[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}`, LiteralString, nil},
{`'.*?(?<!\\)(\\\\)*?'`, LiteralString, nil},
},
"keywords": {
{Words(``, `\b`, `if`, `elif`, `else`, `endif`, `foreach`, `endforeach`, `break`, `continue`), Keyword, nil},
},
"expr": {
{`(in|and|or|not)\b`, OperatorWord, nil},
{`(\*=|/=|%=|\+]=|-=|==|!=|\+|-|=)`, Operator, nil},
{`[\[\]{}:().,?]`, Punctuation, nil},
{Words(``, `\b`, `true`, `false`), KeywordConstant, nil},
Include("builtins"),
{Words(``, `\b`, `meson`, `build_machine`, `host_machine`, `target_machine`), NameVariableMagic, nil},
},
"builtins": {
{Words(`(?<!\.)`, `\b`, `add_global_arguments`, `add_global_link_arguments`, `add_languages`, `add_project_arguments`, `add_project_link_arguments`, `add_test_setup`, `assert`, `benchmark`, `both_libraries`, `build_target`, `configuration_data`, `configure_file`, `custom_target`, `declare_dependency`, `dependency`, `disabler`, `environment`, `error`, `executable`, `files`, `find_library`, `find_program`, `generator`, `get_option`, `get_variable`, `include_directories`, `install_data`, `install_headers`, `install_man`, `install_subdir`, `is_disabler`, `is_variable`, `jar`, `join_paths`, `library`, `message`, `project`, `range`, `run_command`, `set_variable`, `shared_library`, `shared_module`, `static_library`, `subdir`, `subdir_done`, `subproject`, `summary`, `test`, `vcs_tag`, `warning`), NameBuiltin, nil},
{`(?<!\.)import\b`, NameNamespace, nil},
},
}
},
))

101
vendor/github.com/alecthomas/chroma/lexers/m/metal.go generated vendored Normal file
View File

@ -0,0 +1,101 @@
package m
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Metal lexer.
var Metal = internal.Register(MustNewLazyLexer(
&Config{
Name: "Metal",
Aliases: []string{"metal"},
Filenames: []string{"*.metal"},
MimeTypes: []string{"text/x-metal"},
EnsureNL: true,
},
metalRules,
))
func metalRules() Rules {
return Rules{
"statements": {
{Words(``, `\b`, `namespace`, `operator`, `template`, `this`, `using`, `constexpr`), Keyword, nil},
{`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")},
{`(class|struct|enum|union)\b(\s*)`, ByGroups(Keyword, Text), Push("classname")},
{`\[\[.+\]\]`, NameAttribute, nil},
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil},
{`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil},
{`0[xX]([0-9A-Fa-f]('?[0-9A-Fa-f]+)*)[LlUu]*`, LiteralNumberHex, nil},
{`0('?[0-7]+)+[LlUu]*`, LiteralNumberOct, nil},
{`0[Bb][01]('?[01]+)*[LlUu]*`, LiteralNumberBin, nil},
{`[0-9]('?[0-9]+)*[LlUu]*`, LiteralNumberInteger, nil},
{`\*/`, Error, nil},
{`[~!%^&*+=|?:<>/-]`, Operator, nil},
{`[()\[\],.]`, Punctuation, nil},
{Words(``, `\b`, `break`, `case`, `const`, `continue`, `do`, `else`, `enum`, `extern`, `for`, `if`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `while`), Keyword, nil},
{`(bool|float|half|long|ptrdiff_t|size_t|unsigned|u?char|u?int((8|16|32|64)_t)?|u?short)\b`, KeywordType, nil},
{`(bool|float|half|u?(char|int|long|short))(2|3|4)\b`, KeywordType, nil},
{`packed_(float|half|long|u?(char|int|short))(2|3|4)\b`, KeywordType, nil},
{`(float|half)(2|3|4)x(2|3|4)\b`, KeywordType, nil},
{`atomic_u?int\b`, KeywordType, nil},
{`(rg?(8|16)(u|s)norm|rgba(8|16)(u|s)norm|srgba8unorm|rgb10a2|rg11b10f|rgb9e5)\b`, KeywordType, nil},
{`(array|depth(2d|cube)(_array)?|depth2d_ms(_array)?|sampler|texture_buffer|texture(1|2)d(_array)?|texture2d_ms(_array)?|texture3d|texturecube(_array)?|uniform|visible_function_table)\b`, KeywordType, nil},
{`(true|false|NULL)\b`, NameBuiltin, nil},
{Words(``, `\b`, `device`, `constant`, `ray_data`, `thread`, `threadgroup`, `threadgroup_imageblock`), Keyword, nil},
{`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil},
{`[a-zA-Z_]\w*`, Name, nil},
},
"root": {
Include("whitespace"),
{`(fragment|kernel|vertex)?((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(Keyword, UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")},
{`(fragment|kernel|vertex)?((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(Keyword, UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil},
Default(Push("statement")),
},
"classname": {
{`(\[\[.+\]\])(\s*)`, ByGroups(NameAttribute, Text), nil},
{`[a-zA-Z_]\w*`, NameClass, Pop(1)},
{`\s*(?=[>{])`, Text, Pop(1)},
},
"whitespace": {
{`^#if\s+0`, CommentPreproc, Push("if0")},
{`^#`, CommentPreproc, Push("macro")},
{`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")},
{`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")},
{`\n`, Text, nil},
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
{`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil},
{`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil},
{`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil},
},
"statement": {
Include("whitespace"),
Include("statements"),
{`[{]`, Punctuation, Push("root")},
{`[;}]`, Punctuation, Pop(1)},
},
"function": {
Include("whitespace"),
Include("statements"),
{`;`, Punctuation, nil},
{`\{`, Punctuation, Push()},
{`\}`, Punctuation, Pop(1)},
},
"macro": {
{`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil},
{`[^/\n]+`, CommentPreproc, nil},
{`/[*](.|\n)*?[*]/`, CommentMultiline, nil},
{`//.*?\n`, CommentSingle, Pop(1)},
{`/`, CommentPreproc, nil},
{`(?<=\\)\n`, CommentPreproc, nil},
{`\n`, CommentPreproc, Pop(1)},
},
"if0": {
{`^\s*#if.*?(?<!\\)\n`, CommentPreproc, Push()},
{`^\s*#el(?:se|if).*\n`, CommentPreproc, Pop(1)},
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil},
},
}
}

View File

@ -22,13 +22,13 @@ func myghtyRules() Rules {
"root": {
{`\s+`, Text, nil},
{`(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil},
{`(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Python), NameTag), nil},
{`(<&[^|])(.*?)(,.*?)?(&>)`, ByGroups(NameTag, NameFunction, Using(Python), NameTag), nil},
{`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Python), NameTag), nil},
{`(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Python2), NameTag), nil},
{`(<&[^|])(.*?)(,.*?)?(&>)`, ByGroups(NameTag, NameFunction, Using(Python2), NameTag), nil},
{`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Python2), NameTag), nil},
{`</&>`, NameTag, nil},
{`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Python), NameTag), nil},
{`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Python2), NameTag), nil},
{`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil},
{`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Python), Other), nil},
{`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Python2), Other), nil},
{`(?sx)
(.+?) # anything, followed by:
(?:

View File

@ -1,10 +1,17 @@
package m
import (
"regexp"
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
var (
mysqlAnalyserNameBetweenBacktickRe = regexp.MustCompile("`[a-zA-Z_]\\w*`")
mysqlAnalyserNameBetweenBracketRe = regexp.MustCompile(`\[[a-zA-Z_]\w*\]`)
)
// MySQL lexer.
var MySQL = internal.Register(MustNewLazyLexer(
&Config{
@ -16,7 +23,25 @@ var MySQL = internal.Register(MustNewLazyLexer(
CaseInsensitive: true,
},
mySQLRules,
))
).SetAnalyser(func(text string) float32 {
nameBetweenBacktickCount := len(mysqlAnalyserNameBetweenBacktickRe.FindAllString(text, -1))
nameBetweenBracketCount := len(mysqlAnalyserNameBetweenBracketRe.FindAllString(text, -1))
var result float32
// Same logic as above in the TSQL analysis.
dialectNameCount := nameBetweenBacktickCount + nameBetweenBracketCount
if dialectNameCount >= 1 && nameBetweenBacktickCount >= (2*nameBetweenBracketCount) {
// Found at least twice as many `name` as [name].
result += 0.5
} else if nameBetweenBacktickCount > nameBetweenBracketCount {
result += 0.2
} else if nameBetweenBacktickCount > 0 {
result += 0.1
}
return result
}))
func mySQLRules() Rules {
return Rules{

View File

@ -0,0 +1,50 @@
package o
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// 1S:Enterprise lexer.
var OnesEnterprise = internal.Register(MustNewLazyLexer(
&Config{
Name: "OnesEnterprise",
Aliases: []string{"ones", "onesenterprise", "1S", "1S:Enterprise"},
Filenames: []string{"*.EPF", "*.epf", "*.ERF", "*.erf"},
MimeTypes: []string{"application/octet-stream"},
CaseInsensitive: true,
},
onesRules,
))
func onesRules() Rules {
return Rules{
"root": {
{`\n`, Text, nil},
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
{`[^\S\n]+`, Text, nil},
{`//(.*?)\n`, Comment, nil},
{`(#область|#region|#конецобласти|#endregion|#если|#if|#иначе|#else|#конецесли|#endif).*`, CommentPreproc, nil},
{`(&наклиенте|&atclient|&насервере|&atserver|&насерверебезконтекста|&atservernocontext|&наклиентенасерверебезконтекста|&atclientatservernocontext).*`, CommentPreproc, nil},
{`(>=|<=|<>|\+|-|=|>|<|\*|/|%)`, Operator, nil},
{`(;|,|\)|\(|\.)`, Punctuation, nil},
{Words(``, `\b`, `истина`, `true`, `ложь`, `false`, `и`, `and`, `или`, `or`, `не`, `not`), Operator, nil},
{Words(``, `\b`, `если`, `if`, `тогда`, `then`, `иначе`, `else`, `иначеесли`, `elsif`, `конецесли`, `endif`), Operator, nil},
{Words(``, `\b`, `для`, `for`, `каждого`, `each`, `из`, `in`, `цикл`, `do`, `пока`, `while`, `конеццикла`, `enddo`, `по`, `to`), Operator, nil},
{Words(``, `\b`, `прервать`, `break`, `продолжить`, `continue`, `возврат`, `return`, `перейти`, `goto`), Operator, nil},
{Words(``, `\b`, `процедура`, `procedure`, `конецпроцедуры`, `endprocedure`, `функция`, `function`, `конецфункции`, `endfunction`), Keyword, nil},
{Words(``, `\b`, `новый`, `new`, `знач`, `val`, `экспорт`, `export`, `перем`, `var`), Keyword, nil},
{Words(``, `\b`, `попытка`, `try`, `исключение`, `except`, `вызватьисключение`, `raise`, `конецпопытки`, `endtry`), Keyword, nil},
{Words(``, `\b`, `выполнить`, `execute`, `вычислить`, `eval`), Keyword, nil},
{`"`, LiteralString, Push("string")},
{`[_а-яА-Я0-9][а-яА-Я0-9]*`, Name, nil},
{`[_\w][\w]*`, Name, nil},
},
"string": {
{`""`, LiteralString, nil},
{`"C?`, LiteralString, Pop(1)},
{`[^"]+`, LiteralString, nil},
},
}
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,76 @@
package p
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// nolint
// Lexer for the Plutus Core Languages (version 2.1)
//
// including both Typed- and Untyped- versions
// based on “Formal Specification of the Plutus Core Language (version 2.1)”, published 6th April 2021:
// https://hydra.iohk.io/build/8205579/download/1/plutus-core-specification.pdf
var PlutusCoreLang = internal.Register(MustNewLazyLexer(
&Config{
Name: "Plutus Core",
Aliases: []string{"plutus-core", "plc"},
Filenames: []string{"*.plc"},
MimeTypes: []string{"text/x-plutus-core", "application/x-plutus-core"},
},
plutusCoreRules,
))
func plutusCoreRules() Rules {
return Rules{
"root": {
{`\s+`, Text, nil},
{`(\(|\))`, Punctuation, nil},
{`(\[|\])`, Punctuation, nil},
{`({|})`, Punctuation, nil},
// Constants. Figure 1.
// For version, see handling of (program ...) below.
{`([+-]?\d+)`, LiteralNumberInteger, nil},
{`(#([a-fA-F0-9][a-fA-F0-9])+)`, LiteralString, nil},
{`(\(\))`, NameConstant, nil},
{`(True|False)`, NameConstant, nil},
// Keywords. Figures 2 and 15.
// Special handling for program because it is followed by a version.
{`(con |abs |iwrap |unwrap |lam |builtin |delay |force |error)`, Keyword, nil},
{`(fun |all |ifix |lam |con )`, Keyword, nil},
{`(type|fun )`, Keyword, nil},
{`(program )(\S+)`, ByGroups(Keyword, LiteralString), nil},
// Built-in Types. Figure 12.
{`(unit|bool|integer|bytestring|string)`, KeywordType, nil},
// Built-ins Functions. Figure 14 but, more importantly, implementation:
// https://github.com/input-output-hk/plutus/blob/6d759c4/plutus-core/plutus-core/src/PlutusCore/Default/Builtins.hs#L42-L111
{`(addInteger |subtractInteger |multiplyInteger |divideInteger |quotientInteger |remainderInteger |modInteger |equalsInteger |lessThanInteger |lessThanEqualsInteger )`, NameBuiltin, nil},
{`(appendByteString |consByteString |sliceByteString |lengthOfByteString |indexByteString |equalsByteString |lessThanByteString |lessThanEqualsByteString )`, NameBuiltin, nil},
{`(sha2_256 |sha3_256 |blake2b_256 |verifySignature )`, NameBuiltin, nil},
{`(appendString |equalsString |encodeUtf8 |decodeUtf8 )`, NameBuiltin, nil},
{`(ifThenElse )`, NameBuiltin, nil},
{`(chooseUnit )`, NameBuiltin, nil},
{`(trace )`, NameBuiltin, nil},
{`(fstPair |sndPair )`, NameBuiltin, nil},
{`(chooseList |mkCons |headList |tailList |nullList )`, NameBuiltin, nil},
{`(chooseData |constrData |mapData |listData |iData |bData |unConstrData |unMapData |unListData |unIData |unBData |equalsData )`, NameBuiltin, nil},
{`(mkPairData |mkNilData |mkNilPairData )`, NameBuiltin, nil},
// Name. Figure 1.
{`([a-zA-Z][a-zA-Z0-9_']*)`, Name, nil},
// Unicode String. Not in the specification.
{`"`, LiteralStringDouble, Push("string")},
},
"string": {
{`[^\\"]+`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)},
},
}
}

View File

@ -0,0 +1,38 @@
package p
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// PowerQuery lexer.
var PowerQuery = internal.Register(MustNewLazyLexer(
&Config{
Name: "PowerQuery",
Aliases: []string{"powerquery", "pq"},
Filenames: []string{"*.pq"},
MimeTypes: []string{"text/x-powerquery"},
DotAll: true,
CaseInsensitive: true,
},
powerqueryRules,
))
func powerqueryRules() Rules {
return Rules{
"root": {
{`\s+`, Text, nil},
{`//.*?\n`, CommentSingle, nil},
{`/\*.*?\*/`, CommentMultiline, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`(and|as|each|else|error|false|if|in|is|let|meta|not|null|or|otherwise|section|shared|then|true|try|type)\b`, Keyword, nil},
{`(#binary|#date|#datetime|#datetimezone|#duration|#infinity|#nan|#sections|#shared|#table|#time)\b`, KeywordType, nil},
{`(([a-zA-Z]|_)[\w|._]*|#"[^"]+")`, Name, nil},
{`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil},
{`([0-9]+\.[0-9]+|\.[0-9]+)([eE][0-9]+)?`, LiteralNumberFloat, nil},
{`[0-9]+`, LiteralNumberInteger, nil},
{`[\(\)\[\]\{\}]`, Punctuation, nil},
{`\.\.|\.\.\.|=>|<=|>=|<>|[@!?,;=<>\+\-\*\/&]`, Operator, nil},
},
}
}

View File

@ -9,113 +9,173 @@ import (
var Python = internal.Register(MustNewLazyLexer(
&Config{
Name: "Python",
Aliases: []string{"python", "py", "sage"},
Filenames: []string{"*.py", "*.pyw", "*.sc", "SConstruct", "SConscript", "*.tac", "*.sage"},
MimeTypes: []string{"text/x-python", "application/x-python"},
Aliases: []string{"python", "py", "sage", "python3", "py3"},
Filenames: []string{"*.py", "*.pyi", "*.pyw", "*.jy", "*.sage", "*.sc", "SConstruct", "SConscript", "*.bzl", "BUCK", "BUILD", "BUILD.bazel", "WORKSPACE", "*.tac"},
MimeTypes: []string{"text/x-python", "application/x-python", "text/x-python3", "application/x-python3"},
},
pythonRules,
))
func pythonRules() Rules {
const pythonIdentifier = `[_\p{L}][_\p{L}\p{N}]*`
return Rules{
"root": {
{`\n`, Text, nil},
{`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil},
{`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil},
{`[^\S\n]+`, Text, nil},
{`\A#!.+$`, CommentHashbang, nil},
{`#.*$`, CommentSingle, nil},
{`[]{}:(),;[]`, Punctuation, nil},
{`\\\n`, Text, nil},
{`\\`, Text, nil},
{`(in|is|and|or|not)\b`, OperatorWord, nil},
{`!=|==|<<|>>|[-~+/*%=<>&^|.]`, Operator, nil},
Include("keywords"),
{`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")},
{`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")},
{`(from)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("fromimport")},
{`(import)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")},
Include("builtins"),
Include("magicfuncs"),
Include("magicvars"),
Include("backtick"),
{`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")},
{`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")},
{`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")},
{`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")},
Include("expr"),
},
"expr": {
{`(?i)(rf|fr)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("rfstringescape", "tdqf")},
{`(?i)(rf|fr)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("rfstringescape", "tsqf")},
{`(?i)(rf|fr)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("rfstringescape", "dqf")},
{`(?i)(rf|fr)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("rfstringescape", "sqf")},
{`([fF])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("fstringescape", "tdqf")},
{`([fF])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("fstringescape", "tsqf")},
{`([fF])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("fstringescape", "dqf")},
{`([fF])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("fstringescape", "sqf")},
{`(?i)(rb|br|r)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")},
{`(?i)(rb|br|r)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")},
{`(?i)(rb|br|r)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")},
{`(?i)(rb|br|r)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")},
{`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")},
{`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")},
{`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")},
{`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")},
Include("name"),
{`[^\S\n]+`, Text, nil},
Include("numbers"),
{`!=|==|<<|>>|:=|[-~+/*%=<>&^|.]`, Operator, nil},
{`[]{}:(),;[]`, Punctuation, nil},
{`(in|is|and|or|not)\b`, OperatorWord, nil},
Include("expr-keywords"),
Include("builtins"),
Include("magicfuncs"),
Include("magicvars"),
Include("name"),
},
"expr-inside-fstring": {
{`[{([]`, Punctuation, Push("expr-inside-fstring-inner")},
{`(=\s*)?(\![sraf])?\}`, LiteralStringInterpol, Pop(1)},
{`(=\s*)?(\![sraf])?:`, LiteralStringInterpol, Pop(1)},
{`\s+`, Text, nil},
Include("expr"),
},
"expr-inside-fstring-inner": {
{`[{([]`, Punctuation, Push("expr-inside-fstring-inner")},
{`[])}]`, Punctuation, Pop(1)},
{`\s+`, Text, nil},
Include("expr"),
},
"expr-keywords": {
{Words(``, `\b`, `async for`, `await`, `else`, `for`, `if`, `lambda`, `yield`, `yield from`), Keyword, nil},
{Words(``, `\b`, `True`, `False`, `None`), KeywordConstant, nil},
},
"keywords": {
{Words(``, `\b`, `assert`, `break`, `continue`, `del`, `elif`, `else`, `except`, `exec`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil},
{Words(``, `\b`, `assert`, `async`, `await`, `break`, `continue`, `del`, `elif`, `else`, `except`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `raise`, `nonlocal`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil},
{Words(``, `\b`, `True`, `False`, `None`), KeywordConstant, nil},
},
"builtins": {
{Words(`(?<!\.)`, `\b`, `__import__`, `abs`, `all`, `any`, `apply`, `basestring`, `bin`, `bool`, `buffer`, `bytearray`, `bytes`, `callable`, `chr`, `classmethod`, `cmp`, `coerce`, `compile`, `complex`, `delattr`, `dict`, `dir`, `divmod`, `enumerate`, `eval`, `execfile`, `exit`, `file`, `filter`, `float`, `frozenset`, `getattr`, `globals`, `hasattr`, `hash`, `hex`, `id`, `input`, `int`, `intern`, `isinstance`, `issubclass`, `iter`, `len`, `list`, `locals`, `long`, `map`, `max`, `min`, `next`, `object`, `oct`, `open`, `ord`, `pow`, `property`, `range`, `raw_input`, `reduce`, `reload`, `repr`, `reversed`, `round`, `set`, `setattr`, `slice`, `sorted`, `staticmethod`, `str`, `sum`, `super`, `tuple`, `type`, `unichr`, `unicode`, `vars`, `xrange`, `zip`), NameBuiltin, nil},
{`(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls)\b`, NameBuiltinPseudo, nil},
{Words(`(?<!\.)`, `\b`, `ArithmeticError`, `AssertionError`, `AttributeError`, `BaseException`, `DeprecationWarning`, `EOFError`, `EnvironmentError`, `Exception`, `FloatingPointError`, `FutureWarning`, `GeneratorExit`, `IOError`, `ImportError`, `ImportWarning`, `IndentationError`, `IndexError`, `KeyError`, `KeyboardInterrupt`, `LookupError`, `MemoryError`, `NameError`, `NotImplemented`, `NotImplementedError`, `OSError`, `OverflowError`, `OverflowWarning`, `PendingDeprecationWarning`, `ReferenceError`, `RuntimeError`, `RuntimeWarning`, `StandardError`, `StopIteration`, `SyntaxError`, `SyntaxWarning`, `SystemError`, `SystemExit`, `TabError`, `TypeError`, `UnboundLocalError`, `UnicodeDecodeError`, `UnicodeEncodeError`, `UnicodeError`, `UnicodeTranslateError`, `UnicodeWarning`, `UserWarning`, `ValueError`, `VMSError`, `Warning`, `WindowsError`, `ZeroDivisionError`), NameException, nil},
{Words(`(?<!\.)`, `\b`, `__import__`, `abs`, `all`, `any`, `bin`, `bool`, `bytearray`, `bytes`, `chr`, `classmethod`, `compile`, `complex`, `delattr`, `dict`, `dir`, `divmod`, `enumerate`, `eval`, `filter`, `float`, `format`, `frozenset`, `getattr`, `globals`, `hasattr`, `hash`, `hex`, `id`, `input`, `int`, `isinstance`, `issubclass`, `iter`, `len`, `list`, `locals`, `map`, `max`, `memoryview`, `min`, `next`, `object`, `oct`, `open`, `ord`, `pow`, `print`, `property`, `range`, `repr`, `reversed`, `round`, `set`, `setattr`, `slice`, `sorted`, `staticmethod`, `str`, `sum`, `super`, `tuple`, `type`, `vars`, `zip`), NameBuiltin, nil},
{`(?<!\.)(self|Ellipsis|NotImplemented|cls)\b`, NameBuiltinPseudo, nil},
{Words(`(?<!\.)`, `\b`, `ArithmeticError`, `AssertionError`, `AttributeError`, `BaseException`, `BufferError`, `BytesWarning`, `DeprecationWarning`, `EOFError`, `EnvironmentError`, `Exception`, `FloatingPointError`, `FutureWarning`, `GeneratorExit`, `IOError`, `ImportError`, `ImportWarning`, `IndentationError`, `IndexError`, `KeyError`, `KeyboardInterrupt`, `LookupError`, `MemoryError`, `NameError`, `NotImplementedError`, `OSError`, `OverflowError`, `PendingDeprecationWarning`, `ReferenceError`, `ResourceWarning`, `RuntimeError`, `RuntimeWarning`, `StopIteration`, `SyntaxError`, `SyntaxWarning`, `SystemError`, `SystemExit`, `TabError`, `TypeError`, `UnboundLocalError`, `UnicodeDecodeError`, `UnicodeEncodeError`, `UnicodeError`, `UnicodeTranslateError`, `UnicodeWarning`, `UserWarning`, `ValueError`, `VMSError`, `Warning`, `WindowsError`, `ZeroDivisionError`, `BlockingIOError`, `ChildProcessError`, `ConnectionError`, `BrokenPipeError`, `ConnectionAbortedError`, `ConnectionRefusedError`, `ConnectionResetError`, `FileExistsError`, `FileNotFoundError`, `InterruptedError`, `IsADirectoryError`, `NotADirectoryError`, `PermissionError`, `ProcessLookupError`, `TimeoutError`, `StopAsyncIteration`, `ModuleNotFoundError`, `RecursionError`), NameException, nil},
},
"magicfuncs": {
{Words(``, `\b`, `__abs__`, `__add__`, `__and__`, `__call__`, `__cmp__`, `__coerce__`, `__complex__`, `__contains__`, `__del__`, `__delattr__`, `__delete__`, `__delitem__`, `__delslice__`, `__div__`, `__divmod__`, `__enter__`, `__eq__`, `__exit__`, `__float__`, `__floordiv__`, `__ge__`, `__get__`, `__getattr__`, `__getattribute__`, `__getitem__`, `__getslice__`, `__gt__`, `__hash__`, `__hex__`, `__iadd__`, `__iand__`, `__idiv__`, `__ifloordiv__`, `__ilshift__`, `__imod__`, `__imul__`, `__index__`, `__init__`, `__instancecheck__`, `__int__`, `__invert__`, `__iop__`, `__ior__`, `__ipow__`, `__irshift__`, `__isub__`, `__iter__`, `__itruediv__`, `__ixor__`, `__le__`, `__len__`, `__long__`, `__lshift__`, `__lt__`, `__missing__`, `__mod__`, `__mul__`, `__ne__`, `__neg__`, `__new__`, `__nonzero__`, `__oct__`, `__op__`, `__or__`, `__pos__`, `__pow__`, `__radd__`, `__rand__`, `__rcmp__`, `__rdiv__`, `__rdivmod__`, `__repr__`, `__reversed__`, `__rfloordiv__`, `__rlshift__`, `__rmod__`, `__rmul__`, `__rop__`, `__ror__`, `__rpow__`, `__rrshift__`, `__rshift__`, `__rsub__`, `__rtruediv__`, `__rxor__`, `__set__`, `__setattr__`, `__setitem__`, `__setslice__`, `__str__`, `__sub__`, `__subclasscheck__`, `__truediv__`, `__unicode__`, `__xor__`), NameFunctionMagic, nil},
{Words(``, `\b`, `__abs__`, `__add__`, `__aenter__`, `__aexit__`, `__aiter__`, `__and__`, `__anext__`, `__await__`, `__bool__`, `__bytes__`, `__call__`, `__complex__`, `__contains__`, `__del__`, `__delattr__`, `__delete__`, `__delitem__`, `__dir__`, `__divmod__`, `__enter__`, `__eq__`, `__exit__`, `__float__`, `__floordiv__`, `__format__`, `__ge__`, `__get__`, `__getattr__`, `__getattribute__`, `__getitem__`, `__gt__`, `__hash__`, `__iadd__`, `__iand__`, `__ifloordiv__`, `__ilshift__`, `__imatmul__`, `__imod__`, `__imul__`, `__index__`, `__init__`, `__instancecheck__`, `__int__`, `__invert__`, `__ior__`, `__ipow__`, `__irshift__`, `__isub__`, `__iter__`, `__itruediv__`, `__ixor__`, `__le__`, `__len__`, `__length_hint__`, `__lshift__`, `__lt__`, `__matmul__`, `__missing__`, `__mod__`, `__mul__`, `__ne__`, `__neg__`, `__new__`, `__next__`, `__or__`, `__pos__`, `__pow__`, `__prepare__`, `__radd__`, `__rand__`, `__rdivmod__`, `__repr__`, `__reversed__`, `__rfloordiv__`, `__rlshift__`, `__rmatmul__`, `__rmod__`, `__rmul__`, `__ror__`, `__round__`, `__rpow__`, `__rrshift__`, `__rshift__`, `__rsub__`, `__rtruediv__`, `__rxor__`, `__set__`, `__setattr__`, `__setitem__`, `__str__`, `__sub__`, `__subclasscheck__`, `__truediv__`, `__xor__`), NameFunctionMagic, nil},
},
"magicvars": {
{Words(``, `\b`, `__bases__`, `__class__`, `__closure__`, `__code__`, `__defaults__`, `__dict__`, `__doc__`, `__file__`, `__func__`, `__globals__`, `__metaclass__`, `__module__`, `__mro__`, `__name__`, `__self__`, `__slots__`, `__weakref__`), NameVariableMagic, nil},
{Words(``, `\b`, `__annotations__`, `__bases__`, `__class__`, `__closure__`, `__code__`, `__defaults__`, `__dict__`, `__doc__`, `__file__`, `__func__`, `__globals__`, `__kwdefaults__`, `__module__`, `__mro__`, `__name__`, `__objclass__`, `__qualname__`, `__self__`, `__slots__`, `__weakref__`), NameVariableMagic, nil},
},
"numbers": {
{`(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?`, LiteralNumberFloat, nil},
{`\d+[eE][+-]?[0-9]+j?`, LiteralNumberFloat, nil},
{`0[0-7]+j?`, LiteralNumberOct, nil},
{`0[bB][01]+`, LiteralNumberBin, nil},
{`0[xX][a-fA-F0-9_]+`, LiteralNumberHex, nil},
{`\d+L`, LiteralNumberIntegerLong, nil},
{`[\d_]+j?`, LiteralNumberInteger, nil},
},
"backtick": {
{"`.*?`", LiteralStringBacktick, nil},
{`(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)([eE][+-]?\d(?:_?\d)*)?`, LiteralNumberFloat, nil},
{`\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?`, LiteralNumberFloat, nil},
{`0[oO](?:_?[0-7])+`, LiteralNumberOct, nil},
{`0[bB](?:_?[01])+`, LiteralNumberBin, nil},
{`0[xX](?:_?[a-fA-F0-9])+`, LiteralNumberHex, nil},
{`\d(?:_?\d)*`, LiteralNumberInteger, nil},
},
"name": {
{`@[\w.]+`, NameDecorator, nil},
{`[a-zA-Z_]\w*`, Name, nil},
{`@` + pythonIdentifier, NameDecorator, nil},
{`@`, Operator, nil},
{pythonIdentifier, Name, nil},
},
"funcname": {
Include("magicfuncs"),
{`[a-zA-Z_]\w*`, NameFunction, Pop(1)},
{pythonIdentifier, NameFunction, Pop(1)},
Default(Pop(1)),
},
"classname": {
{`[a-zA-Z_]\w*`, NameClass, Pop(1)},
{pythonIdentifier, NameClass, Pop(1)},
},
"import": {
{`(?:[ \t]|\\\n)+`, Text, nil},
{`as\b`, KeywordNamespace, nil},
{`,`, Operator, nil},
{`[a-zA-Z_][\w.]*`, NameNamespace, nil},
{`(\s+)(as)(\s+)`, ByGroups(Text, Keyword, Text), nil},
{`\.`, NameNamespace, nil},
{pythonIdentifier, NameNamespace, nil},
{`(\s*)(,)(\s*)`, ByGroups(Text, Operator, Text), nil},
Default(Pop(1)),
},
"fromimport": {
{`(?:[ \t]|\\\n)+`, Text, nil},
{`import\b`, KeywordNamespace, Pop(1)},
{`(\s+)(import)\b`, ByGroups(Text, KeywordNamespace), Pop(1)},
{`\.`, NameNamespace, nil},
{`None\b`, NameBuiltinPseudo, Pop(1)},
{`[a-zA-Z_.][\w.]*`, NameNamespace, nil},
{pythonIdentifier, NameNamespace, nil},
Default(Pop(1)),
},
"rfstringescape": {
{`\{\{`, LiteralStringEscape, nil},
{`\}\}`, LiteralStringEscape, nil},
},
"fstringescape": {
Include("rfstringescape"),
Include("stringescape"),
},
"stringescape": {
{`\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})`, LiteralStringEscape, nil},
},
"strings-single": {
{`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil},
{`[^\\\'"%\n]+`, LiteralStringSingle, nil},
"fstrings-single": {
{`\}`, LiteralStringInterpol, nil},
{`\{`, LiteralStringInterpol, Push("expr-inside-fstring")},
{`[^\\\'"{}\n]+`, LiteralStringSingle, nil},
{`[\'"\\]`, LiteralStringSingle, nil},
{`%`, LiteralStringSingle, nil},
},
"fstrings-double": {
{`\}`, LiteralStringInterpol, nil},
{`\{`, LiteralStringInterpol, Push("expr-inside-fstring")},
{`[^\\\'"{}\n]+`, LiteralStringDouble, nil},
{`[\'"\\]`, LiteralStringDouble, nil},
},
"strings-single": {
{`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsaux%]`, LiteralStringInterpol, nil},
{`\{((\w+)((\.\w+)|(\[[^\]]+\]))*)?(\![sra])?(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}`, LiteralStringInterpol, nil},
{`[^\\\'"%{\n]+`, LiteralStringSingle, nil},
{`[\'"\\]`, LiteralStringSingle, nil},
{`%|(\{{1,2})`, LiteralStringSingle, nil},
},
"strings-double": {
{`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil},
{`[^\\\'"%\n]+`, LiteralStringDouble, nil},
{`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsaux%]`, LiteralStringInterpol, nil},
{`\{((\w+)((\.\w+)|(\[[^\]]+\]))*)?(\![sra])?(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}`, LiteralStringInterpol, nil},
{`[^\\\'"%{\n]+`, LiteralStringDouble, nil},
{`[\'"\\]`, LiteralStringDouble, nil},
{`%`, LiteralStringDouble, nil},
{`%|(\{{1,2})`, LiteralStringDouble, nil},
},
"dqf": {
{`"`, LiteralStringDouble, Pop(1)},
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil},
Include("fstrings-double"),
},
"sqf": {
{`'`, LiteralStringSingle, Pop(1)},
{`\\\\|\\'|\\\n`, LiteralStringEscape, nil},
Include("fstrings-single"),
},
"dqs": {
{`"`, LiteralStringDouble, Pop(1)},
@ -127,6 +187,16 @@ func pythonRules() Rules {
{`\\\\|\\'|\\\n`, LiteralStringEscape, nil},
Include("strings-single"),
},
"tdqf": {
{`"""`, LiteralStringDouble, Pop(1)},
Include("fstrings-double"),
{`\n`, LiteralStringDouble, nil},
},
"tsqf": {
{`'''`, LiteralStringSingle, Pop(1)},
Include("fstrings-single"),
{`\n`, LiteralStringSingle, nil},
},
"tdqs": {
{`"""`, LiteralStringDouble, Pop(1)},
Include("strings-double"),

141
vendor/github.com/alecthomas/chroma/lexers/p/python2.go generated vendored Normal file
View File

@ -0,0 +1,141 @@
package p
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Python2 lexer.
var Python2 = internal.Register(MustNewLazyLexer(
&Config{
Name: "Python 2",
Aliases: []string{"python2", "py2"},
Filenames: []string{},
MimeTypes: []string{"text/x-python2", "application/x-python2"},
},
python2Rules,
))
func python2Rules() Rules {
return Rules{
"root": {
{`\n`, Text, nil},
{`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil},
{`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil},
{`[^\S\n]+`, Text, nil},
{`\A#!.+$`, CommentHashbang, nil},
{`#.*$`, CommentSingle, nil},
{`[]{}:(),;[]`, Punctuation, nil},
{`\\\n`, Text, nil},
{`\\`, Text, nil},
{`(in|is|and|or|not)\b`, OperatorWord, nil},
{`!=|==|<<|>>|[-~+/*%=<>&^|.]`, Operator, nil},
Include("keywords"),
{`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")},
{`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")},
{`(from)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("fromimport")},
{`(import)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")},
Include("builtins"),
Include("magicfuncs"),
Include("magicvars"),
Include("backtick"),
{`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")},
{`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")},
{`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")},
{`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")},
{`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")},
{`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")},
{`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")},
{`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")},
Include("name"),
Include("numbers"),
},
"keywords": {
{Words(``, `\b`, `assert`, `break`, `continue`, `del`, `elif`, `else`, `except`, `exec`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil},
},
"builtins": {
{Words(`(?<!\.)`, `\b`, `__import__`, `abs`, `all`, `any`, `apply`, `basestring`, `bin`, `bool`, `buffer`, `bytearray`, `bytes`, `callable`, `chr`, `classmethod`, `cmp`, `coerce`, `compile`, `complex`, `delattr`, `dict`, `dir`, `divmod`, `enumerate`, `eval`, `execfile`, `exit`, `file`, `filter`, `float`, `frozenset`, `getattr`, `globals`, `hasattr`, `hash`, `hex`, `id`, `input`, `int`, `intern`, `isinstance`, `issubclass`, `iter`, `len`, `list`, `locals`, `long`, `map`, `max`, `min`, `next`, `object`, `oct`, `open`, `ord`, `pow`, `property`, `range`, `raw_input`, `reduce`, `reload`, `repr`, `reversed`, `round`, `set`, `setattr`, `slice`, `sorted`, `staticmethod`, `str`, `sum`, `super`, `tuple`, `type`, `unichr`, `unicode`, `vars`, `xrange`, `zip`), NameBuiltin, nil},
{`(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls)\b`, NameBuiltinPseudo, nil},
{Words(`(?<!\.)`, `\b`, `ArithmeticError`, `AssertionError`, `AttributeError`, `BaseException`, `DeprecationWarning`, `EOFError`, `EnvironmentError`, `Exception`, `FloatingPointError`, `FutureWarning`, `GeneratorExit`, `IOError`, `ImportError`, `ImportWarning`, `IndentationError`, `IndexError`, `KeyError`, `KeyboardInterrupt`, `LookupError`, `MemoryError`, `NameError`, `NotImplementedError`, `OSError`, `OverflowError`, `OverflowWarning`, `PendingDeprecationWarning`, `ReferenceError`, `RuntimeError`, `RuntimeWarning`, `StandardError`, `StopIteration`, `SyntaxError`, `SyntaxWarning`, `SystemError`, `SystemExit`, `TabError`, `TypeError`, `UnboundLocalError`, `UnicodeDecodeError`, `UnicodeEncodeError`, `UnicodeError`, `UnicodeTranslateError`, `UnicodeWarning`, `UserWarning`, `ValueError`, `VMSError`, `Warning`, `WindowsError`, `ZeroDivisionError`), NameException, nil},
},
"magicfuncs": {
{Words(``, `\b`, `__abs__`, `__add__`, `__and__`, `__call__`, `__cmp__`, `__coerce__`, `__complex__`, `__contains__`, `__del__`, `__delattr__`, `__delete__`, `__delitem__`, `__delslice__`, `__div__`, `__divmod__`, `__enter__`, `__eq__`, `__exit__`, `__float__`, `__floordiv__`, `__ge__`, `__get__`, `__getattr__`, `__getattribute__`, `__getitem__`, `__getslice__`, `__gt__`, `__hash__`, `__hex__`, `__iadd__`, `__iand__`, `__idiv__`, `__ifloordiv__`, `__ilshift__`, `__imod__`, `__imul__`, `__index__`, `__init__`, `__instancecheck__`, `__int__`, `__invert__`, `__iop__`, `__ior__`, `__ipow__`, `__irshift__`, `__isub__`, `__iter__`, `__itruediv__`, `__ixor__`, `__le__`, `__len__`, `__long__`, `__lshift__`, `__lt__`, `__missing__`, `__mod__`, `__mul__`, `__ne__`, `__neg__`, `__new__`, `__nonzero__`, `__oct__`, `__op__`, `__or__`, `__pos__`, `__pow__`, `__radd__`, `__rand__`, `__rcmp__`, `__rdiv__`, `__rdivmod__`, `__repr__`, `__reversed__`, `__rfloordiv__`, `__rlshift__`, `__rmod__`, `__rmul__`, `__rop__`, `__ror__`, `__rpow__`, `__rrshift__`, `__rshift__`, `__rsub__`, `__rtruediv__`, `__rxor__`, `__set__`, `__setattr__`, `__setitem__`, `__setslice__`, `__str__`, `__sub__`, `__subclasscheck__`, `__truediv__`, `__unicode__`, `__xor__`), NameFunctionMagic, nil},
},
"magicvars": {
{Words(``, `\b`, `__bases__`, `__class__`, `__closure__`, `__code__`, `__defaults__`, `__dict__`, `__doc__`, `__file__`, `__func__`, `__globals__`, `__metaclass__`, `__module__`, `__mro__`, `__name__`, `__self__`, `__slots__`, `__weakref__`), NameVariableMagic, nil},
},
"numbers": {
{`(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?`, LiteralNumberFloat, nil},
{`\d+[eE][+-]?[0-9]+j?`, LiteralNumberFloat, nil},
{`0[0-7]+j?`, LiteralNumberOct, nil},
{`0[bB][01]+`, LiteralNumberBin, nil},
{`0[xX][a-fA-F0-9]+`, LiteralNumberHex, nil},
{`\d+L`, LiteralNumberIntegerLong, nil},
{`\d+j?`, LiteralNumberInteger, nil},
},
"backtick": {
{"`.*?`", LiteralStringBacktick, nil},
},
"name": {
{`@[\w.]+`, NameDecorator, nil},
{`[a-zA-Z_]\w*`, Name, nil},
},
"funcname": {
Include("magicfuncs"),
{`[a-zA-Z_]\w*`, NameFunction, Pop(1)},
Default(Pop(1)),
},
"classname": {
{`[a-zA-Z_]\w*`, NameClass, Pop(1)},
},
"import": {
{`(?:[ \t]|\\\n)+`, Text, nil},
{`as\b`, KeywordNamespace, nil},
{`,`, Operator, nil},
{`[a-zA-Z_][\w.]*`, NameNamespace, nil},
Default(Pop(1)),
},
"fromimport": {
{`(?:[ \t]|\\\n)+`, Text, nil},
{`import\b`, KeywordNamespace, Pop(1)},
{`None\b`, NameBuiltinPseudo, Pop(1)},
{`[a-zA-Z_.][\w.]*`, NameNamespace, nil},
Default(Pop(1)),
},
"stringescape": {
{`\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})`, LiteralStringEscape, nil},
},
"strings-single": {
{`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil},
{`[^\\\'"%\n]+`, LiteralStringSingle, nil},
{`[\'"\\]`, LiteralStringSingle, nil},
{`%`, LiteralStringSingle, nil},
},
"strings-double": {
{`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil},
{`[^\\\'"%\n]+`, LiteralStringDouble, nil},
{`[\'"\\]`, LiteralStringDouble, nil},
{`%`, LiteralStringDouble, nil},
},
"dqs": {
{`"`, LiteralStringDouble, Pop(1)},
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil},
Include("strings-double"),
},
"sqs": {
{`'`, LiteralStringSingle, Pop(1)},
{`\\\\|\\'|\\\n`, LiteralStringEscape, nil},
Include("strings-single"),
},
"tdqs": {
{`"""`, LiteralStringDouble, Pop(1)},
Include("strings-double"),
{`\n`, LiteralStringDouble, nil},
},
"tsqs": {
{`'''`, LiteralStringSingle, Pop(1)},
Include("strings-single"),
{`\n`, LiteralStringSingle, nil},
},
}
}

File diff suppressed because one or more lines are too long

1721
vendor/github.com/alecthomas/chroma/lexers/r/raku.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -63,7 +63,7 @@ func restructuredtextRules() Rules {
}
}
func rstCodeBlock(groups []string, lexer Lexer) Iterator {
func rstCodeBlock(groups []string, state *LexerState) Iterator {
iterators := []Iterator{}
tokens := []Token{
{Punctuation, groups[1]},
@ -75,7 +75,7 @@ func rstCodeBlock(groups []string, lexer Lexer) Iterator {
{Text, groups[7]},
}
code := strings.Join(groups[8:], "")
lexer = internal.Get(groups[6])
lexer := internal.Get(groups[6])
if lexer == nil {
tokens = append(tokens, Token{String, code})
iterators = append(iterators, Literator(tokens...))

View File

@ -9,9 +9,9 @@ import (
var Rust = internal.Register(MustNewLazyLexer(
&Config{
Name: "Rust",
Aliases: []string{"rust"},
Aliases: []string{"rust", "rs"},
Filenames: []string{"*.rs", "*.rs.in"},
MimeTypes: []string{"text/rust"},
MimeTypes: []string{"text/rust", "text/x-rust"},
EnsureNL: true,
},
rustRules,
@ -35,20 +35,22 @@ func rustRules() Rules {
{`r#*"(?:\\.|[^\\;])*"#*`, LiteralString, nil},
{`"(?:\\.|[^\\"])*"`, LiteralString, nil},
{`\$([a-zA-Z_]\w*|\(,?|\),?|,?)`, CommentPreproc, nil},
{Words(``, `\b`, `as`, `async`, `await`, `const`, `crate`, `else`, `extern`, `for`, `if`, `impl`, `in`, `loop`, `match`, `move`, `mut`, `pub`, `ref`, `return`, `static`, `super`, `trait`, `unsafe`, `use`, `where`, `while`), Keyword, nil},
{Words(``, `\b`, `abstract`, `become`, `box`, `do`, `final`, `macro`, `override`, `priv`, `try`, `typeof`, `unsized`, `virtual`, `yield`), KeywordReserved, nil},
{Words(``, `\b`, `as`, `async`, `await`, `box`, `const`, `crate`, `dyn`, `else`, `extern`, `for`, `if`, `impl`, `in`, `loop`, `match`, `move`, `mut`, `pub`, `ref`, `return`, `static`, `super`, `trait`, `unsafe`, `use`, `where`, `while`), Keyword, nil},
{Words(``, `\b`, `abstract`, `become`, `do`, `final`, `macro`, `override`, `priv`, `typeof`, `try`, `unsized`, `virtual`, `yield`), KeywordReserved, nil},
{`(true|false)\b`, KeywordConstant, nil},
{`self\b`, NameBuiltinPseudo, nil},
{`mod\b`, Keyword, Push("modname")},
{`let\b`, KeywordDeclaration, nil},
{`fn\b`, Keyword, Push("funcname")},
{`(struct|enum|type|union)\b`, Keyword, Push("typename")},
{`(default)(\s+)(type|fn)\b`, ByGroups(Keyword, Text, Keyword), nil},
{Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `u128`, `i8`, `i16`, `i32`, `i64`, `i128`, `usize`, `isize`, `f32`, `f64`, `str`, `bool`), KeywordType, nil},
{`self\b`, NameBuiltinPseudo, nil},
{Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `SliceConcatExt`, `String`, `ToString`, `Vec`), NameBuiltin, nil},
{Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `u128`, `i8`, `i16`, `i32`, `i64`, `i128`, `usize`, `isize`, `f32`, `f64`, `char`, `str`, `bool`), KeywordType, nil},
{`[sS]elf\b`, NameBuiltinPseudo, nil},
{Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Unpin`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `drop`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `String`, `ToString`, `Vec`), NameBuiltin, nil},
{Words(``, `!`, `asm`, `assert`, `assert_eq`, `assert_ne`, `cfg`, `column`, `compile_error`, `concat`, `concat_idents`, `dbg`, `debug_assert`, `debug_assert_eq`, `debug_assert_ne`, `env`, `eprint`, `eprintln`, `file`, `format`, `format_args`, `format_args_nl`, `global_asm`, `include`, `include_bytes`, `include_str`, `is_aarch64_feature_detected`, `is_arm_feature_detected`, `is_mips64_feature_detected`, `is_mips_feature_detected`, `is_powerpc64_feature_detected`, `is_powerpc_feature_detected`, `is_x86_feature_detected`, `line`, `llvm_asm`, `log_syntax`, `macro_rules`, `matches`, `module_path`, `option_env`, `panic`, `print`, `println`, `stringify`, `thread_local`, `todo`, `trace_macros`, `unimplemented`, `unreachable`, `vec`, `write`, `writeln`), NameFunctionMagic, nil},
{`::\b`, Text, nil},
{`(?::|->)`, Text, Push("typename")},
{`(break|continue)(\s*)(\'[A-Za-z_]\w*)?`, ByGroups(Keyword, TextWhitespace, NameLabel), nil},
{`(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?`, ByGroups(Keyword, TextWhitespace, NameLabel), nil},
{`'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0|\\u\{[0-9a-fA-F]{1,6}\}|.)'`, LiteralStringChar, nil},
{`b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0|\\u\{[0-9a-fA-F]{1,6}\}|.)'`, LiteralStringChar, nil},
{`0b[01_]+`, LiteralNumberBin, Push("number_lit")},
@ -57,15 +59,15 @@ func rustRules() Rules {
{`[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)`, LiteralNumberFloat, Push("number_lit")},
{`[0-9][0-9_]*`, LiteralNumberInteger, Push("number_lit")},
{`b"`, LiteralString, Push("bytestring")},
{`b?r(#*)".*?"\1`, LiteralString, nil},
{`'static`, NameBuiltin, nil},
{`'[a-zA-Z_]\w*`, NameAttribute, nil},
{`(?s)b?r(#*)".*?"\1`, LiteralString, nil},
{`'`, Operator, Push("lifetime")},
{`\.\.=?`, Operator, nil},
{`[{}()\[\],.;]`, Punctuation, nil},
{`[+\-*/%&|<>^!~@=:?]`, Operator, nil},
{`(r#)?[a-zA-Z_]\w*`, Name, nil},
{`r#[a-zA-Z_]\w*`, Name, nil},
{`#!?\[`, CommentPreproc, Push("attribute[")},
{`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, TextWhitespace, Punctuation), Push("macro{")},
{`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, Punctuation), Push("macro(")},
{`#`, Text, nil},
},
"comment": {
{`[^*/]+`, CommentMultiline, nil},
@ -92,11 +94,17 @@ func rustRules() Rules {
"typename": {
{`\s+`, Text, nil},
{`&`, KeywordPseudo, nil},
{Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `SliceConcatExt`, `String`, `ToString`, `Vec`), NameBuiltin, nil},
{Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `i8`, `i16`, `i32`, `i64`, `usize`, `isize`, `f32`, `f64`, `str`, `bool`), KeywordType, nil},
{`'`, Operator, Push("lifetime")},
{Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Unpin`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `drop`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `String`, `ToString`, `Vec`), NameBuiltin, nil},
{Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `u128`, `i8`, `i16`, `i32`, `i64`, `i128`, `usize`, `isize`, `f32`, `f64`, `char`, `str`, `bool`), KeywordType, nil},
{`[a-zA-Z_]\w*`, NameClass, Pop(1)},
Default(Pop(1)),
},
"lifetime": {
{`(static|_)`, NameBuiltin, nil},
{`[a-zA-Z_]+\w*`, NameAttribute, nil},
Default(Pop(1)),
},
"number_lit": {
{`[ui](8|16|32|64|size)`, Keyword, Pop(1)},
{`f(32|64)`, Keyword, Pop(1)},
@ -112,28 +120,14 @@ func rustRules() Rules {
{`\\x[89a-fA-F][0-9a-fA-F]`, LiteralStringEscape, nil},
Include("string"),
},
"macro{": {
{`\{`, Operator, Push()},
{`\}`, Operator, Pop(1)},
},
"macro(": {
{`\(`, Operator, Push()},
{`\)`, Operator, Pop(1)},
},
"attribute_common": {
{`"`, LiteralString, Push("string")},
{`\[`, CommentPreproc, Push("attribute[")},
{`\(`, CommentPreproc, Push("attribute(")},
},
"attribute[": {
Include("attribute_common"),
{`\];?`, CommentPreproc, Pop(1)},
{`[^"\]]+`, CommentPreproc, nil},
},
"attribute(": {
Include("attribute_common"),
{`\);?`, CommentPreproc, Pop(1)},
{`[^")]+`, CommentPreproc, nil},
{`\]`, CommentPreproc, Pop(1)},
{`[^"\]\[]+`, CommentPreproc, nil},
},
}
}

View File

@ -22,8 +22,8 @@ var Scala = internal.Register(MustNewLazyLexer(
func scalaRules() Rules {
var (
scalaOp = "[-~\\^\\*!%&\\\\<>\\|+=:/?@\xa6-\xa7\xa9\xac\xae\xb0-\xb1\xb6\xd7\xf7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+"
scalaUpper = "[A-Z\\$_\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]"
scalaLetter = `[a-zA-Z\\$_ªµºÀ-ÖØ-öø-ʯͰ-ͳͶ-ͷͻ-ͽΆΈ-ϵϷ-ҁҊ-Ֆա-ևא-ײء-ؿف-يٮ-ٯٱ-ۓەۮ-ۯۺ-ۼۿܐܒ-ܯݍ-ޥޱߊ-ߪऄ-हऽॐक़-ॡॲ-ॿঅ-হঽৎড়-ৡৰ-ৱਅ-ਹਖ਼-ਫ਼ੲ-ੴઅ-હઽૐ-ૡଅ-ହଽଡ଼-ୡୱஃ-ஹௐఅ-ఽౘ-ౡಅ-ಹಽೞ-ೡഅ-ഽൠ-ൡൺ-ൿඅ-ෆก-ะา-ำเ-ๅກ-ະາ-ຳຽ-ໄໜ-ༀཀ-ཬྈ-ྋက-ဪဿၐ-ၕၚ-ၝၡၥ-ၦၮ-ၰၵ-ႁႎႠ-ჺᄀ-ፚᎀ-ᎏᎠ-ᙬᙯ-ᙶᚁ-ᚚᚠ-ᛪᛮ-ᜑᜠ-ᜱᝀ-ᝑᝠ-ᝰក-ឳៜᠠ-ᡂᡄ-ᢨᢪ-ᤜᥐ-ᦩᧁ-ᧇᨀ-ᨖᬅ-ᬳᭅ-ᭋᮃ-ᮠᮮ-ᮯᰀ-ᰣᱍ-ᱏᱚ-ᱷᴀ-ᴫᵢ-ᵷᵹ-ᶚḀ-ᾼιῂ-ῌῐ-Ίῠ-Ῥῲ-ῼⁱⁿℂℇℊ--ℝℤΩℨK--ℹℼ-ℿⅅ-ⅉⅎⅠ-ↈⰀ-ⱼⲀ-ⳤⴀ-ⵥⶀ-ⷞ〆-〇〡-〩〸-〺〼ぁ-ゖゟァ-ヺヿ-ㆎㆠ-ㆷㇰ-ㇿ㐀-䶵一-ꀔꀖ-ꒌꔀ-ꘋꘐ-ꘟꘪ-ꙮꚀ-ꚗꜢ-ꝯꝱ-ꞇꞋ-ꠁꠃ-ꠅꠇ-ꠊꠌ-ꠢꡀ-ꡳꢂ-ꢳꤊ-ꤥꤰ-ꥆꨀ-ꨨꩀ-ꩂꩄ-ꩋ가-힣豈-יִײַ-ﬨשׁ-ﴽﵐ-ﷻﹰ-ﻼA--zヲ-ッア-ンᅠ-ᅵ]`
scalaUpper = `[\\$_\p{Lu}]`
scalaLetter = `[\\$_\p{L}]`
scalaIDRest = fmt.Sprintf(`%s(?:%s|[0-9])*(?:(?<=_)%s)?`, scalaLetter, scalaLetter, scalaOp)
)

View File

@ -30,7 +30,7 @@ func schemeLangRules() Rules {
{`-?\d+`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`'[\w!$%&*+,/:<=>?@^~|-]+`, LiteralStringSymbol, nil},
{`#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)`, LiteralStringChar, nil},
{`#\\(alarm|backspace|delete|esc|linefeed|newline|page|return|space|tab|vtab|x[0-9a-zA-Z]{1,5}|.)`, LiteralStringChar, nil},
{`(#t|#f)`, NameConstant, nil},
{"('|#|`|,@|,|\\.)", Operator, nil},
{`(lambda |define |if |else |cond |and |or |case |let |let\* |letrec |begin |do |delay |set\! |\=\> |quote |quasiquote |unquote |unquote\-splicing |define\-syntax |let\-syntax |letrec\-syntax |syntax\-rules )`, Keyword, nil},

File diff suppressed because one or more lines are too long

37
vendor/github.com/alecthomas/chroma/lexers/s/sieve.go generated vendored Normal file
View File

@ -0,0 +1,37 @@
package s
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Sieve lexer.
var Sieve = internal.Register(MustNewLazyLexer(
&Config{
Name: "Sieve",
Aliases: []string{"sieve"},
Filenames: []string{"*.siv", "*.sieve"},
MimeTypes: []string{},
},
func() Rules {
return Rules{
"root": {
{`\s+`, Text, nil},
{`[();,{}\[\]]`, Punctuation, nil},
{`(?i)require`, KeywordNamespace, nil},
{`(?i)(:)(addresses|all|contains|content|create|copy|comparator|count|days|detail|domain|fcc|flags|from|handle|importance|is|localpart|length|lowerfirst|lower|matches|message|mime|options|over|percent|quotewildcard|raw|regex|specialuse|subject|text|under|upperfirst|upper|value)`, ByGroups(NameTag, NameTag), nil},
{`(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|ereject|exists|false|fileinto|if|hasflag|header|keep|notify_method_capability|notify|not|redirect|reject|removeflag|setflag|size|spamtest|stop|string|true|vacation|virustest)`, NameBuiltin, nil},
{`(?i)set`, KeywordDeclaration, nil},
{`([0-9.]+)([kmgKMG])?`, ByGroups(LiteralNumber, LiteralNumber), nil},
{`#.*$`, CommentSingle, nil},
{`/\*.*\*/`, CommentMultiline, nil},
{`"[^"]*?"`, LiteralString, nil},
{`text:`, NameTag, Push("text")},
},
"text": {
{`[^.].*?\n`, LiteralString, nil},
{`^\.`, Punctuation, Pop(1)},
},
}
},
))

View File

@ -39,17 +39,16 @@ func solidityRules() Rules {
{`/[*][\w\W]*`, CommentMultiline, nil},
},
"keywords-other": {
{Words(``, `\b`, `for`, `in`, `while`, `do`, `break`, `return`, `returns`, `continue`, `if`, `else`, `throw`, `new`, `delete`), Keyword, nil},
{Words(``, `\b`, `for`, `in`, `while`, `do`, `break`, `return`, `returns`, `continue`, `if`, `else`, `try`, `catch`, `throw`, `_`, `new`, `delete`, `is`, `as`, `from`, `memory`, `storage`), Keyword, nil},
{`assembly\b`, Keyword, Push("assembly")},
{Words(``, `\b`, `contract`, `interface`, `enum`, `event`, `function`, `library`, `mapping`, `modifier`, `struct`, `var`), KeywordDeclaration, nil},
{`(contract|interface|enum|event|struct)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameClass), nil},
{`(function|modifier)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameFunction), nil},
{Words(``, `\b`, `contract`, `interface`, `enum`, `event`, `constructor`, `function`, `library`, `mapping`, `modifier`, `struct`, `var`), KeywordDeclaration, nil},
{Words(``, `\b`, `abstract`, `external`, `internal`, `private`, `public`), Keyword, nil},
{Words(``, `\b`, `anonymous`, `constant`, `immutable`, `indexed`, `override`, `payable`, `pure`, `view`, `virtual`), Keyword, nil},
{`(import|using)\b`, KeywordNamespace, nil},
{`pragma (solidity|experimental)\b`, KeywordReserved, nil},
{`(_|as|constant|default|from|is)\b`, KeywordReserved, nil},
{`payable\b`, KeywordReserved, nil},
{`(memory|storage)\b`, KeywordReserved, nil},
{`(external|internal|private|public)\b`, KeywordReserved, nil},
{`(anonymous|indexed)\b`, KeywordReserved, nil},
{`(abstract|pure|static|view)\b`, KeywordReserved, nil},
{`pragma (solidity|experimental)\b`, Keyword, nil},
{Words(``, `\b`, `after`, `alias`, `apply`, `auto`, `case`, `copyof`, `default`, `define`, `final`, `implements`, `inline`, `let`, `macro`, `match`, `mutable`, `null`, `of`, `partial`, `promise`, `reference`, `relocatable`, `sealed`, `sizeof`, `static`, `supports`, `switch`, `typedef`, `typeof`, `unchecked`), KeywordReserved, nil},
{`(true|false)\b`, KeywordConstant, nil},
{`(wei|finney|szabo|ether)\b`, KeywordConstant, nil},
{`(seconds|minutes|hours|days|weeks|years)\b`, KeywordConstant, nil},
@ -99,15 +98,20 @@ func solidityRules() Rules {
{`\+\+|--|\*\*|\?|:|~|&&|\|\||=>|==?|!=?|(<<|>>>?|[-<>+*%&|^/])=?`, Operator, nil},
{`[{(\[;,]`, Punctuation, nil},
{`[})\].]`, Punctuation, nil},
{`(block|msg|now|this|super|tx)\b`, NameBuiltin, nil},
{`(sender|origin)\b`, NameBuiltin, nil},
{`(gas|value)\b`, NameBuiltin, nil},
{`(abi|block|msg|tx)\b`, NameBuiltin, nil},
{`(?!abi\.)(decode|encode|encodePacked|encodeWithSelector|encodeWithSignature|encodeWithSelector)\b`, NameBuiltin, nil},
{`(?!block\.)(chainid|coinbase|difficulty|gaslimit|number|timestamp)\b`, NameBuiltin, nil},
{`(?!msg\.)(data|gas|sender|value)\b`, NameBuiltin, nil},
{`(?!tx\.)(gasprice|origin)\b`, NameBuiltin, nil},
{`(type)(\()([a-zA-Z_]\w*)(\))`, ByGroups(NameBuiltin, Punctuation, NameClass, Punctuation), nil},
{`(?!type\([a-zA-Z_]\w*\)\.)(creationCode|interfaceId|max|min|name|runtimeCode)\b`, NameBuiltin, nil},
{`(now|this|super|gasleft)\b`, NameBuiltin, nil},
{`(selfdestruct|suicide)\b`, NameBuiltin, nil},
{`(balance|send|transfer)\b`, NameBuiltin, nil},
{`(?!0x[0-9a-fA-F]+\.)(balance|code|codehash|send|transfer)\b`, NameBuiltin, nil},
{`(assert|revert|require)\b`, NameBuiltin, nil},
{`(call|callcode|delegatecall)\b`, NameBuiltin, nil},
{`selector\b`, NameBuiltin, nil},
{`(addmod|ecrecover|keccak256|mulmod|ripemd160|sha256|sha3)\b`, NameFunction, nil},
{`(addmod|blockhash|ecrecover|keccak256|mulmod|ripemd160|sha256|sha3)\b`, NameBuiltin, nil},
{`[a-zA-Z_]\w*`, Name, nil},
},
}

View File

@ -23,9 +23,9 @@ func sparqlRules() Rules {
{`((?i)select|construct|describe|ask|where|filter|group\s+by|minus|distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|offset|bindings|load|clear|drop|create|add|move|copy|insert\s+data|delete\s+data|delete\s+where|delete|insert|using\s+named|using|graph|default|named|all|optional|service|silent|bind|union|not\s+in|in|as|having|to|prefix|base)\b`, Keyword, nil},
{`(a)\b`, Keyword, nil},
{"(<(?:[^<>\"{}|^`\\\\\\x00-\\x20])*>)", NameLabel, nil},
{`(_:[0-9a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_](?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_\-0-9·̀-ͯ‿-⁀.]*[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_\-0-9·̀-ͯ‿-⁀])?)`, NameLabel, nil},
{`[?$][0-9a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_][a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_0-9·̀-ͯ‿-⁀]*`, NameVariable, nil},
{`([a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>](?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_\-0-9·̀-ͯ‿-⁀.]*[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_\-0-9·̀-ͯ‿-⁀])?)?(\:)((?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_:0-9]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))(?:(?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_\-0-9·̀-ͯ‿-⁀.:]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))*(?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-<2D>_\-0-9·̀-ͯ‿-⁀:]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%])))?)?`, ByGroups(NameNamespace, Punctuation, NameTag), nil},
{`(_:[_\p{L}\p{N}](?:[-_.\p{L}\p{N}]*[-_\p{L}\p{N}])?)`, NameLabel, nil},
{`[?$][_\p{L}\p{N}]+`, NameVariable, nil},
{`([\p{L}][-_.\p{L}\p{N}]*)?(\:)((?:[_:\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))(?:(?:[-_:.\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))*(?:[-_:\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%])))?)?`, ByGroups(NameNamespace, Punctuation, NameTag), nil},
{`((?i)str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|contains|strstarts|strends|strbefore|strafter|year|month|day|hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|count|sum|min|max|avg|sample|group_concat|separator)\b`, NameFunction, nil},
{`(true|false)`, KeywordConstant, nil},
{`[+\-]?(\d+\.\d*[eE][+-]?\d+|\.?\d+[eE][+-]?\d+)`, LiteralNumberFloat, nil},

73
vendor/github.com/alecthomas/chroma/lexers/s/svelte.go generated vendored Normal file
View File

@ -0,0 +1,73 @@
package s
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/h"
"github.com/alecthomas/chroma/lexers/internal"
"github.com/alecthomas/chroma/lexers/t"
)
// Svelte lexer.
var Svelte = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
&Config{
Name: "Svelte",
Aliases: []string{"svelte"},
Filenames: []string{"*.svelte"},
MimeTypes: []string{"application/x-svelte"},
DotAll: true,
},
svelteRules,
)))
func svelteRules() Rules {
return Rules{
"root": {
// Let HTML handle the comments, including comments containing script and style tags
{`<!--`, Other, Push("comment")},
{
// Highlight script and style tags based on lang attribute
// and allow attributes besides lang
`(<\s*(?:script|style).*?lang\s*=\s*['"])` +
`(.+?)(['"].*?>)` +
`(.+?)` +
`(<\s*/\s*(?:script|style)\s*>)`,
UsingByGroup(internal.Get, 2, 4, Other, Other, Other, Other, Other),
nil,
},
{
// Make sure `{` is not inside script or style tags
`(?<!<\s*(?:script|style)(?:(?!(?:script|style)\s*>).)*?)` +
`{` +
`(?!(?:(?!<\s*(?:script|style)).)*?(?:script|style)\s*>)`,
Punctuation,
Push("templates"),
},
// on:submit|preventDefault
{`(?<=\s+on:\w+(?:\|\w+)*)\|(?=\w+)`, Operator, nil},
{`.+?`, Other, nil},
},
"comment": {
{`-->`, Other, Pop(1)},
{`.+?`, Other, nil},
},
"templates": {
{`}`, Punctuation, Pop(1)},
// Let TypeScript handle strings and the curly braces inside them
{`(?<!(?<!\\)\\)(['"` + "`])" + `.*?(?<!(?<!\\)\\)\1`, Using(t.TypeScript), nil},
// If there is another opening curly brace push to templates again
{"{", Punctuation, Push("templates")},
{`@(debug|html)\b`, Keyword, nil},
{
`(#await)(\s+)(\w+)(\s+)(then|catch)(\s+)(\w+)`,
ByGroups(Keyword, Text, Using(t.TypeScript), Text,
Keyword, Text, Using(t.TypeScript),
),
nil,
},
{`(#|/)(await|each|if|key)\b`, Keyword, nil},
{`(:else)(\s+)(if)?\b`, ByGroups(Keyword, Text, Keyword), nil},
{`:(catch|then)\b`, Keyword, nil},
{`[^{}]+`, Using(t.TypeScript), nil},
},
}
}

View File

@ -7,9 +7,10 @@ import (
var SYSTEMD = internal.Register(MustNewLazyLexer(
&Config{
Name: "SYSTEMD",
Aliases: []string{"systemd"},
Filenames: []string{"*.service"},
Name: "SYSTEMD",
Aliases: []string{"systemd"},
// Suspects: man systemd.index | grep -E 'systemd\..*configuration'
Filenames: []string{"*.automount", "*.device", "*.dnssd", "*.link", "*.mount", "*.netdev", "*.network", "*.path", "*.scope", "*.service", "*.slice", "*.socket", "*.swap", "*.target", "*.timer"},
MimeTypes: []string{"text/plain"},
},
systemdRules,

View File

@ -27,7 +27,7 @@ func tomlRules() Rules {
{`"(\\\\|\\"|[^"])*"`, StringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, StringSingle, nil},
{`[.,=\[\]{}]`, Punctuation, nil},
{`[^\W\d]\w*`, NameOther, nil},
{`[A-Za-z0-9_-]+`, NameOther, nil},
},
}
}

File diff suppressed because one or more lines are too long

View File

@ -10,7 +10,7 @@ var XML = internal.Register(MustNewLazyLexer(
&Config{
Name: "XML",
Aliases: []string{"xml"},
Filenames: []string{"*.xml", "*.xsl", "*.rss", "*.xslt", "*.xsd", "*.wsdl", "*.wsf", "*.svg"},
Filenames: []string{"*.xml", "*.xsl", "*.rss", "*.xslt", "*.xsd", "*.wsdl", "*.wsf", "*.svg", "*.csproj", "*.vcxproj", "*.fsproj"},
MimeTypes: []string{"text/xml", "application/xml", "image/svg+xml", "application/rss+xml", "application/atom+xml"},
DotAll: true,
},

53
vendor/github.com/alecthomas/chroma/lexers/z/zed.go generated vendored Normal file
View File

@ -0,0 +1,53 @@
package z
import (
"strings"
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Zed lexer.
var Zed = internal.Register(MustNewLazyLexer(
&Config{
Name: "Zed",
Aliases: []string{"zed"},
Filenames: []string{"*.zed"},
MimeTypes: []string{"text/zed"},
},
zedRules,
).SetAnalyser(func(text string) float32 {
if strings.Contains(text, "definition ") && strings.Contains(text, "relation ") && strings.Contains(text, "permission ") {
return 0.9
}
if strings.Contains(text, "definition ") {
return 0.5
}
if strings.Contains(text, "relation ") {
return 0.5
}
if strings.Contains(text, "permission ") {
return 0.25
}
return 0.0
}))
func zedRules() Rules {
return Rules{
"root": {
{`\n`, TextWhitespace, nil},
{`\s+`, TextWhitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil},
{`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil},
{Words(``, `\b`, `definition`), KeywordType, nil},
{Words(``, `\b`, `relation`), KeywordNamespace, nil},
{Words(``, `\b`, `permission`), KeywordDeclaration, nil},
{`[a-zA-Z_]\w*/`, NameNamespace, nil},
{`[a-zA-Z_]\w*`, Name, nil},
{`#[a-zA-Z_]\w*`, NameVariable, nil},
{`[+%=><|^!?/\-*&~:]`, Operator, nil},
{`[{}()\[\],.;]`, Punctuation, nil},
},
}
}

View File

@ -2,9 +2,6 @@ Generated with:
g 'class.*RegexLexer' | pawk --strict -F: '"pygments.lexers.%s.%s" % (f[0].split(".")[0], f[2].split()[1].split("(")[0])' > lexers.txt
perl6:
Requires a bunch of helpers that I do not have the time to convert.
kotlin:
invalid unicode escape sequences
FIXED: Have to disable wide Unicode characters in unistring.py

View File

@ -3,7 +3,9 @@ package chroma
import (
"fmt"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"sync"
"time"
@ -22,25 +24,57 @@ type Rule struct {
// An Emitter takes group matches and returns tokens.
type Emitter interface {
// Emit tokens for the given regex groups.
Emit(groups []string, lexer Lexer) Iterator
Emit(groups []string, state *LexerState) Iterator
}
// EmitterFunc is a function that is an Emitter.
type EmitterFunc func(groups []string, lexer Lexer) Iterator
type EmitterFunc func(groups []string, state *LexerState) Iterator
// Emit tokens for groups.
func (e EmitterFunc) Emit(groups []string, lexer Lexer) Iterator { return e(groups, lexer) }
func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator {
return e(groups, state)
}
// ByGroups emits a token for each matching group in the rule's regex.
func ByGroups(emitters ...Emitter) Emitter {
return EmitterFunc(func(groups []string, lexer Lexer) Iterator {
return EmitterFunc(func(groups []string, state *LexerState) Iterator {
iterators := make([]Iterator, 0, len(groups)-1)
if len(emitters) != len(groups)-1 {
iterators = append(iterators, Error.Emit(groups, lexer))
iterators = append(iterators, Error.Emit(groups, state))
// panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters))
} else {
for i, group := range groups[1:] {
iterators = append(iterators, emitters[i].Emit([]string{group}, lexer))
if emitters[i] != nil {
iterators = append(iterators, emitters[i].Emit([]string{group}, state))
}
}
}
return Concaterator(iterators...)
})
}
// ByGroupNames emits a token for each named matching group in the rule's regex.
func ByGroupNames(emitters map[string]Emitter) Emitter {
return EmitterFunc(func(groups []string, state *LexerState) Iterator {
iterators := make([]Iterator, 0, len(state.NamedGroups)-1)
if len(state.NamedGroups)-1 == 0 {
if emitter, ok := emitters[`0`]; ok {
iterators = append(iterators, emitter.Emit(groups, state))
} else {
iterators = append(iterators, Error.Emit(groups, state))
}
} else {
ruleRegex := state.Rules[state.State][state.Rule].Regexp
for i := 1; i < len(state.NamedGroups); i++ {
groupName := ruleRegex.GroupNameFromNumber(i)
group := state.NamedGroups[groupName]
if emitter, ok := emitters[groupName]; ok {
if emitter != nil {
iterators = append(iterators, emitter.Emit([]string{group}, state))
}
} else {
iterators = append(iterators, Error.Emit([]string{group}, state))
}
}
}
return Concaterator(iterators...)
@ -88,7 +122,7 @@ func ByGroups(emitters ...Emitter) Emitter {
// Note: panic's if the number emitters does not equal the number of matched
// groups in the regex.
func UsingByGroup(sublexerGetFunc func(string) Lexer, sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter {
return EmitterFunc(func(groups []string, lexer Lexer) Iterator {
return EmitterFunc(func(groups []string, state *LexerState) Iterator {
// bounds check
if len(emitters) != len(groups)-1 {
panic("UsingByGroup expects number of emitters to be the same as len(groups)-1")
@ -106,8 +140,8 @@ func UsingByGroup(sublexerGetFunc func(string) Lexer, sublexerNameGroup, codeGro
if err != nil {
panic(err)
}
} else {
iterators[i] = emitters[i].Emit([]string{group}, lexer)
} else if emitters[i] != nil {
iterators[i] = emitters[i].Emit([]string{group}, state)
}
}
@ -117,7 +151,7 @@ func UsingByGroup(sublexerGetFunc func(string) Lexer, sublexerNameGroup, codeGro
// Using returns an Emitter that uses a given Lexer for parsing and emitting.
func Using(lexer Lexer) Emitter {
return EmitterFunc(func(groups []string, _ Lexer) Iterator {
return EmitterFunc(func(groups []string, _ *LexerState) Iterator {
it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0])
if err != nil {
panic(err)
@ -127,9 +161,9 @@ func Using(lexer Lexer) Emitter {
}
// UsingSelf is like Using, but uses the current Lexer.
func UsingSelf(state string) Emitter {
return EmitterFunc(func(groups []string, lexer Lexer) Iterator {
it, err := lexer.Tokenise(&TokeniseOptions{State: state, Nested: true}, groups[0])
func UsingSelf(stateName string) Emitter {
return EmitterFunc(func(groups []string, state *LexerState) Iterator {
it, err := state.Lexer.Tokenise(&TokeniseOptions{State: stateName, Nested: true}, groups[0])
if err != nil {
panic(err)
}
@ -139,6 +173,9 @@ func UsingSelf(state string) Emitter {
// Words creates a regex that matches any of the given literal words.
func Words(prefix, suffix string, words ...string) string {
sort.Slice(words, func(i, j int) bool {
return len(words[j]) < len(words[i])
})
for i, word := range words {
words[i] = regexp.QuoteMeta(word)
}
@ -162,10 +199,10 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
type Rules map[string][]Rule
// Rename clones rules then a rule.
func (r Rules) Rename(old, new string) Rules {
func (r Rules) Rename(oldRule, newRule string) Rules {
r = r.Clone()
r[new] = r[old]
delete(r, old)
r[newRule] = r[oldRule]
delete(r, oldRule)
return r
}
@ -202,6 +239,12 @@ func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) {
if config == nil {
config = &Config{}
}
for _, glob := range append(config.Filenames, config.AliasFilenames...) {
_, err := filepath.Match(glob, "")
if err != nil {
return nil, fmt.Errorf("%s: %q is not a valid glob: %w", config.Name, glob, err)
}
}
return &RegexLexer{
config: config,
compilerFunc: rulesFunc,
@ -209,8 +252,10 @@ func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) {
}
// MustNewLexer creates a new Lexer or panics.
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
lexer, err := NewLexer(config, rules)
//
// Deprecated: Use MustNewLazyLexer instead.
func MustNewLexer(config *Config, rules Rules) *RegexLexer { // nolint: forbidigo
lexer, err := NewLexer(config, rules) // nolint: forbidigo
if err != nil {
panic(err)
}
@ -221,7 +266,9 @@ func MustNewLexer(config *Config, rules Rules) *RegexLexer {
//
// "rules" is a state machine transitition map. Each key is a state. Values are sets of rules
// that match input, optionally modify lexer state, and output tokens.
func NewLexer(config *Config, rules Rules) (*RegexLexer, error) {
//
// Deprecated: Use NewLazyLexer instead.
func NewLexer(config *Config, rules Rules) (*RegexLexer, error) { // nolint: forbidigo
return NewLazyLexer(config, func() Rules { return rules })
}
@ -254,6 +301,8 @@ type LexerState struct {
Rule int
// Group matches.
Groups []string
// Named Group matches.
NamedGroups map[string]string
// Custum context for mutators.
MutatorContext map[interface{}]interface{}
iteratorStack []Iterator
@ -297,7 +346,7 @@ func (l *LexerState) Iterator() Token { // nolint: gocognit
if !ok {
panic("unknown state " + l.State)
}
ruleIndex, rule, groups := matchRules(l.Text, l.Pos, selectedRule)
ruleIndex, rule, groups, namedGroups := matchRules(l.Text, l.Pos, selectedRule)
// No match.
if groups == nil {
// From Pygments :\
@ -315,6 +364,7 @@ func (l *LexerState) Iterator() Token { // nolint: gocognit
}
l.Rule = ruleIndex
l.Groups = groups
l.NamedGroups = namedGroups
l.Pos += utf8.RuneCountInString(groups[0])
if rule.Mutator != nil {
if err := rule.Mutator.Mutate(l); err != nil {
@ -322,7 +372,7 @@ func (l *LexerState) Iterator() Token { // nolint: gocognit
}
}
if rule.Type != nil {
l.iteratorStack = append(l.iteratorStack, rule.Type.Emit(l.Groups, l.Lexer))
l.iteratorStack = append(l.iteratorStack, rule.Type.Emit(l.Groups, l))
}
}
// Exhaust the IteratorStack, if any.
@ -391,7 +441,7 @@ func (r *RegexLexer) maybeCompile() (err error) {
pattern = "(?" + rule.flags + ")" + pattern
}
pattern = `\G` + pattern
rule.Regexp, err = regexp2.Compile(pattern, 0)
rule.Regexp, err = regexp2.Compile(pattern, regexp2.RE2)
if err != nil {
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
}
@ -486,18 +536,20 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
return state.Iterator, nil
}
func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule, []string) {
func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule, []string, map[string]string) {
for i, rule := range rules {
match, err := rule.Regexp.FindRunesMatchStartingAt(text, pos)
if match != nil && err == nil && match.Index == pos {
groups := []string{}
namedGroups := make(map[string]string)
for _, g := range match.Groups() {
namedGroups[g.Name] = g.String()
groups = append(groups, g.String())
}
return i, rule, groups
return i, rule, groups, namedGroups
}
}
return 0, &CompiledRule{}, nil
return 0, &CompiledRule{}, nil, nil
}
// replace \r and \r\n with \n

View File

@ -287,8 +287,10 @@ func (s *Style) synthesise(ttype TokenType) StyleEntry {
// If we don't have line numbers, use the text colour but 20% brighter/darker
case LineNumbers, LineNumbersTable:
return text
default:
return StyleEntry{}
}
return StyleEntry{}
}
func (s *Style) synthesisable(ttype TokenType) bool {

58
vendor/github.com/alecthomas/chroma/styles/doom-one.go generated vendored Normal file
View File

@ -0,0 +1,58 @@
package styles
import (
"github.com/alecthomas/chroma"
)
// Doom One style. Inspired by Atom One and Doom Emacs's Atom One theme
var DoomOne = Register(chroma.MustNewStyle("doom-one", chroma.StyleEntries{
chroma.Text: "#b0c4de",
chroma.Error: "#b0c4de",
chroma.Comment: "italic #8a93a5",
chroma.CommentHashbang: "bold",
chroma.Keyword: "#c678dd",
chroma.KeywordType: "#ef8383",
chroma.KeywordConstant: "bold #b756ff",
chroma.Operator: "#c7bf54",
chroma.OperatorWord: "bold #b756ff",
chroma.Punctuation: "#b0c4de",
chroma.Name: "#c1abea",
chroma.NameAttribute: "#b3d23c",
chroma.NameBuiltin: "#ef8383",
chroma.NameClass: "#76a9f9",
chroma.NameConstant: "bold #b756ff",
chroma.NameDecorator: "#e5c07b",
chroma.NameEntity: "#bda26f",
chroma.NameException: "bold #fd7474",
chroma.NameFunction: "#00b1f7",
chroma.NameProperty: "#cebc3a",
chroma.NameLabel: "#f5a40d",
chroma.NameNamespace: "#76a9f9",
chroma.NameTag: "#e06c75",
chroma.NameVariable: "#DCAEEA",
chroma.NameVariableGlobal: "bold #DCAEEA",
chroma.NameVariableInstance: "#e06c75",
chroma.Literal: "#98c379",
chroma.Number: "#d19a66",
chroma.String: "#98c379",
chroma.StringDoc: "#7e97c3",
chroma.StringDouble: "#63c381",
chroma.StringEscape: "bold #d26464",
chroma.StringHeredoc: "#98c379",
chroma.StringInterpol: "#98c379",
chroma.StringOther: "#70b33f",
chroma.StringRegex: "#56b6c2",
chroma.StringSingle: "#98c379",
chroma.StringSymbol: "#56b6c2",
chroma.Generic: "#b0c4de",
chroma.GenericEmph: "italic",
chroma.GenericHeading: "bold #a2cbff",
chroma.GenericInserted: "#a6e22e",
chroma.GenericOutput: "#a6e22e",
chroma.GenericUnderline: "underline",
chroma.GenericPrompt: "#a6e22e",
chroma.GenericStrong: "bold",
chroma.GenericSubheading: "#a2cbff",
chroma.GenericTraceback: "#a2cbff",
chroma.Background: "#b0c4de bg:#282c34",
}))

View File

@ -0,0 +1,71 @@
package styles
import (
"github.com/alecthomas/chroma"
)
// Doom One 2 style. Inspired by Atom One and Doom Emacs's Atom One theme
var DoomOne2 = Register(chroma.MustNewStyle("doom-one2", chroma.StyleEntries{
chroma.Text: "#b0c4de",
chroma.Error: "#b0c4de",
chroma.Comment: "italic #8a93a5",
chroma.CommentHashbang: "bold",
chroma.Keyword: "#76a9f9",
chroma.KeywordConstant: "#e5c07b",
chroma.KeywordType: "#e5c07b",
chroma.Operator: "#54b1c7",
chroma.OperatorWord: "bold #b756ff",
chroma.Punctuation: "#abb2bf",
chroma.Name: "#aa89ea",
chroma.NameAttribute: "#cebc3a",
chroma.NameBuiltin: "#e5c07b",
chroma.NameClass: "#ca72ff",
chroma.NameConstant: "bold",
chroma.NameDecorator: "#e5c07b",
chroma.NameEntity: "#bda26f",
chroma.NameException: "bold #fd7474",
chroma.NameFunction: "#00b1f7",
chroma.NameProperty: "#cebc3a",
chroma.NameLabel: "#f5a40d",
chroma.NameNamespace: "#ca72ff",
chroma.NameTag: "#76a9f9",
chroma.NameVariable: "#DCAEEA",
chroma.NameVariableClass: "#DCAEEA",
chroma.NameVariableGlobal: "bold #DCAEEA",
chroma.NameVariableInstance: "#e06c75",
chroma.NameVariableMagic: "#DCAEEA",
chroma.Literal: "#98c379",
chroma.LiteralDate: "#98c379",
chroma.Number: "#d19a66",
chroma.NumberBin: "#d19a66",
chroma.NumberFloat: "#d19a66",
chroma.NumberHex: "#d19a66",
chroma.NumberInteger: "#d19a66",
chroma.NumberIntegerLong: "#d19a66",
chroma.NumberOct: "#d19a66",
chroma.String: "#98c379",
chroma.StringAffix: "#98c379",
chroma.StringBacktick: "#98c379",
chroma.StringDelimiter: "#98c379",
chroma.StringDoc: "#7e97c3",
chroma.StringDouble: "#63c381",
chroma.StringEscape: "bold #d26464",
chroma.StringHeredoc: "#98c379",
chroma.StringInterpol: "#98c379",
chroma.StringOther: "#70b33f",
chroma.StringRegex: "#56b6c2",
chroma.StringSingle: "#98c379",
chroma.StringSymbol: "#56b6c2",
chroma.Generic: "#b0c4de",
chroma.GenericDeleted: "#b0c4de",
chroma.GenericEmph: "italic",
chroma.GenericHeading: "bold #a2cbff",
chroma.GenericInserted: "#a6e22e",
chroma.GenericOutput: "#a6e22e",
chroma.GenericUnderline: "underline",
chroma.GenericPrompt: "#a6e22e",
chroma.GenericStrong: "bold",
chroma.GenericSubheading: "#a2cbff",
chroma.GenericTraceback: "#a2cbff",
chroma.Background: "#b0c4de bg:#282c34",
}))

View File

@ -13,11 +13,11 @@ var Dracula = Register(chroma.MustNewStyle("dracula", chroma.StyleEntries{
chroma.CommentSingle: "#6272a4",
chroma.CommentSpecial: "#6272a4",
chroma.Generic: "#f8f8f2",
chroma.GenericDeleted: "#8b080b",
chroma.GenericDeleted: "#ff5555",
chroma.GenericEmph: "#f8f8f2 underline",
chroma.GenericError: "#f8f8f2",
chroma.GenericHeading: "#f8f8f2 bold",
chroma.GenericInserted: "#f8f8f2 bold",
chroma.GenericInserted: "#50fa7b bold",
chroma.GenericOutput: "#44475a",
chroma.GenericPrompt: "#f8f8f2",
chroma.GenericStrong: "#f8f8f2",

17
vendor/github.com/alecthomas/chroma/styles/hr_dark.go generated vendored Normal file
View File

@ -0,0 +1,17 @@
package styles
import (
"github.com/alecthomas/chroma"
)
// Theme based on HackerRank Dark Editor theme
var HrDark = Register(chroma.MustNewStyle("hrdark", chroma.StyleEntries{
chroma.Comment: "italic #828b96",
chroma.Keyword: "#ff636f",
chroma.OperatorWord: "#ff636f",
chroma.Name: "#58a1dd",
chroma.Literal: "#a6be9d",
chroma.Operator: "#ff636f",
chroma.Background: "#1d2432",
chroma.Other: "#fff",
}))

View File

@ -0,0 +1,19 @@
package styles
import (
"github.com/alecthomas/chroma"
)
// Theme based on HackerRank High Contrast Editor Theme
var HrHighContrast = Register(chroma.MustNewStyle("hr_high_contrast", chroma.StyleEntries{
chroma.Comment: "#5a8349",
chroma.Keyword: "#467faf",
chroma.OperatorWord: "#467faf",
chroma.Name: "#ffffff",
chroma.LiteralString: "#a87662",
chroma.LiteralNumber: "#fff",
chroma.LiteralStringBoolean: "#467faf",
chroma.Operator: "#e4e400",
chroma.Background: "#000",
chroma.Other: "#d5d500",
}))

View File

@ -0,0 +1,17 @@
package styles
import (
"github.com/alecthomas/chroma"
)
// 1S:Designer color palette
var OnesEnterprise = Register(chroma.MustNewStyle("onesenterprise", chroma.StyleEntries{
chroma.Text: "#000000",
chroma.Comment: "#008000",
chroma.CommentPreproc: "#963200",
chroma.Operator: "#FF0000",
chroma.Keyword: "#FF0000",
chroma.Punctuation: "#FF0000",
chroma.LiteralString: "#000000",
chroma.Name: "#0000FF",
}))

View File

@ -0,0 +1,52 @@
// Copyright 2018 Alethea Katherine Flowers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package styles
import (
"github.com/alecthomas/chroma"
)
// WitchHazel Style
var WitchHazel = Register(chroma.MustNewStyle("witchhazel", chroma.StyleEntries{
chroma.Text: "#F8F8F2",
chroma.Whitespace: "#A8757B",
chroma.Error: "#960050 bg:#1e0010",
chroma.Comment: "#b0bec5",
chroma.Keyword: "#C2FFDF",
chroma.KeywordNamespace: "#FFB8D1",
chroma.Operator: "#FFB8D1",
chroma.Punctuation: "#F8F8F2",
chroma.Name: "#F8F8F2",
chroma.NameAttribute: "#ceb1ff",
chroma.NameBuiltinPseudo: "#80cbc4",
chroma.NameClass: "#ceb1ff",
chroma.NameConstant: "#C5A3FF",
chroma.NameDecorator: "#ceb1ff",
chroma.NameException: "#ceb1ff",
chroma.NameFunction: "#ceb1ff",
chroma.NameProperty: "#F8F8F2",
chroma.NameTag: "#FFB8D1",
chroma.NameVariable: "#F8F8F2",
chroma.Number: "#C5A3FF",
chroma.Literal: "#ae81ff",
chroma.LiteralDate: "#e6db74",
chroma.String: "#1bc5e0",
chroma.GenericDeleted: "#f92672",
chroma.GenericEmph: "italic",
chroma.GenericInserted: "#a6e22e",
chroma.GenericStrong: "bold",
chroma.GenericSubheading: "#75715e",
chroma.Background: " bg:#433e56",
}))

View File

@ -21,7 +21,7 @@ var (
// Xcode dark style
var XcodeDark = Register(chroma.MustNewStyle("xcode-dark", chroma.StyleEntries{
chroma.Background: plainText + " bg: " + background,
chroma.Background: plainText + " bg:" + background,
chroma.Comment: comments,
chroma.CommentMultiline: comments,

View File

@ -9,14 +9,17 @@ func _() {
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[Background - -1]
_ = x[LineNumbers - -2]
_ = x[LineNumbersTable - -3]
_ = x[LineHighlight - -4]
_ = x[LineTable - -5]
_ = x[LineTableTD - -6]
_ = x[Error - -7]
_ = x[Other - -8]
_ = x[None - -9]
_ = x[PreWrapper - -2]
_ = x[Line - -3]
_ = x[LineNumbers - -4]
_ = x[LineNumbersTable - -5]
_ = x[LineHighlight - -6]
_ = x[LineTable - -7]
_ = x[LineTableTD - -8]
_ = x[CodeLine - -9]
_ = x[Error - -10]
_ = x[Other - -11]
_ = x[None - -12]
_ = x[EOFType-0]
_ = x[Keyword-1000]
_ = x[KeywordConstant-1001]
@ -105,104 +108,107 @@ func _() {
_ = x[TextPunctuation-8003]
}
const _TokenType_name = "NoneOtherErrorLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
const _TokenType_name = "NoneOtherErrorCodeLineLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersLinePreWrapperBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
var _TokenType_map = map[TokenType]string{
-9: _TokenType_name[0:4],
-8: _TokenType_name[4:9],
-7: _TokenType_name[9:14],
-6: _TokenType_name[14:25],
-5: _TokenType_name[25:34],
-4: _TokenType_name[34:47],
-3: _TokenType_name[47:63],
-2: _TokenType_name[63:74],
-1: _TokenType_name[74:84],
0: _TokenType_name[84:91],
1000: _TokenType_name[91:98],
1001: _TokenType_name[98:113],
1002: _TokenType_name[113:131],
1003: _TokenType_name[131:147],
1004: _TokenType_name[147:160],
1005: _TokenType_name[160:175],
1006: _TokenType_name[175:186],
2000: _TokenType_name[186:190],
2001: _TokenType_name[190:203],
2002: _TokenType_name[203:214],
2003: _TokenType_name[214:231],
2004: _TokenType_name[231:240],
2005: _TokenType_name[240:252],
2006: _TokenType_name[252:265],
2007: _TokenType_name[265:275],
2008: _TokenType_name[275:288],
2009: _TokenType_name[288:300],
2010: _TokenType_name[300:317],
2011: _TokenType_name[317:328],
2012: _TokenType_name[328:337],
2013: _TokenType_name[337:350],
2014: _TokenType_name[350:362],
2015: _TokenType_name[362:371],
2016: _TokenType_name[371:381],
2017: _TokenType_name[381:393],
2018: _TokenType_name[393:400],
2019: _TokenType_name[400:412],
2020: _TokenType_name[412:433],
2021: _TokenType_name[433:450],
2022: _TokenType_name[450:468],
2023: _TokenType_name[468:488],
2024: _TokenType_name[488:505],
3000: _TokenType_name[505:512],
3001: _TokenType_name[512:523],
3002: _TokenType_name[523:535],
3100: _TokenType_name[535:548],
3101: _TokenType_name[548:566],
3102: _TokenType_name[566:583],
3103: _TokenType_name[583:604],
3104: _TokenType_name[604:624],
3105: _TokenType_name[624:641],
3106: _TokenType_name[641:663],
3107: _TokenType_name[663:679],
3108: _TokenType_name[679:698],
3109: _TokenType_name[698:717],
3110: _TokenType_name[717:737],
3111: _TokenType_name[737:758],
3112: _TokenType_name[758:775],
3113: _TokenType_name[775:793],
3114: _TokenType_name[793:811],
3115: _TokenType_name[811:830],
3116: _TokenType_name[830:849],
3200: _TokenType_name[849:862],
3201: _TokenType_name[862:878],
3202: _TokenType_name[878:896],
3203: _TokenType_name[896:912],
3204: _TokenType_name[912:932],
3205: _TokenType_name[932:956],
3206: _TokenType_name[956:972],
4000: _TokenType_name[972:980],
4001: _TokenType_name[980:992],
5000: _TokenType_name[992:1003],
6000: _TokenType_name[1003:1010],
6001: _TokenType_name[1010:1025],
6002: _TokenType_name[1025:1041],
6003: _TokenType_name[1041:1054],
6004: _TokenType_name[1054:1068],
6100: _TokenType_name[1068:1082],
6101: _TokenType_name[1082:1100],
7000: _TokenType_name[1100:1107],
7001: _TokenType_name[1107:1121],
7002: _TokenType_name[1121:1132],
7003: _TokenType_name[1132:1144],
7004: _TokenType_name[1144:1158],
7005: _TokenType_name[1158:1173],
7006: _TokenType_name[1173:1186],
7007: _TokenType_name[1186:1199],
7008: _TokenType_name[1199:1212],
7009: _TokenType_name[1212:1229],
7010: _TokenType_name[1229:1245],
7011: _TokenType_name[1245:1261],
8000: _TokenType_name[1261:1265],
8001: _TokenType_name[1265:1279],
8002: _TokenType_name[1279:1289],
8003: _TokenType_name[1289:1304],
-12: _TokenType_name[0:4],
-11: _TokenType_name[4:9],
-10: _TokenType_name[9:14],
-9: _TokenType_name[14:22],
-8: _TokenType_name[22:33],
-7: _TokenType_name[33:42],
-6: _TokenType_name[42:55],
-5: _TokenType_name[55:71],
-4: _TokenType_name[71:82],
-3: _TokenType_name[82:86],
-2: _TokenType_name[86:96],
-1: _TokenType_name[96:106],
0: _TokenType_name[106:113],
1000: _TokenType_name[113:120],
1001: _TokenType_name[120:135],
1002: _TokenType_name[135:153],
1003: _TokenType_name[153:169],
1004: _TokenType_name[169:182],
1005: _TokenType_name[182:197],
1006: _TokenType_name[197:208],
2000: _TokenType_name[208:212],
2001: _TokenType_name[212:225],
2002: _TokenType_name[225:236],
2003: _TokenType_name[236:253],
2004: _TokenType_name[253:262],
2005: _TokenType_name[262:274],
2006: _TokenType_name[274:287],
2007: _TokenType_name[287:297],
2008: _TokenType_name[297:310],
2009: _TokenType_name[310:322],
2010: _TokenType_name[322:339],
2011: _TokenType_name[339:350],
2012: _TokenType_name[350:359],
2013: _TokenType_name[359:372],
2014: _TokenType_name[372:384],
2015: _TokenType_name[384:393],
2016: _TokenType_name[393:403],
2017: _TokenType_name[403:415],
2018: _TokenType_name[415:422],
2019: _TokenType_name[422:434],
2020: _TokenType_name[434:455],
2021: _TokenType_name[455:472],
2022: _TokenType_name[472:490],
2023: _TokenType_name[490:510],
2024: _TokenType_name[510:527],
3000: _TokenType_name[527:534],
3001: _TokenType_name[534:545],
3002: _TokenType_name[545:557],
3100: _TokenType_name[557:570],
3101: _TokenType_name[570:588],
3102: _TokenType_name[588:605],
3103: _TokenType_name[605:626],
3104: _TokenType_name[626:646],
3105: _TokenType_name[646:663],
3106: _TokenType_name[663:685],
3107: _TokenType_name[685:701],
3108: _TokenType_name[701:720],
3109: _TokenType_name[720:739],
3110: _TokenType_name[739:759],
3111: _TokenType_name[759:780],
3112: _TokenType_name[780:797],
3113: _TokenType_name[797:815],
3114: _TokenType_name[815:833],
3115: _TokenType_name[833:852],
3116: _TokenType_name[852:871],
3200: _TokenType_name[871:884],
3201: _TokenType_name[884:900],
3202: _TokenType_name[900:918],
3203: _TokenType_name[918:934],
3204: _TokenType_name[934:954],
3205: _TokenType_name[954:978],
3206: _TokenType_name[978:994],
4000: _TokenType_name[994:1002],
4001: _TokenType_name[1002:1014],
5000: _TokenType_name[1014:1025],
6000: _TokenType_name[1025:1032],
6001: _TokenType_name[1032:1047],
6002: _TokenType_name[1047:1063],
6003: _TokenType_name[1063:1076],
6004: _TokenType_name[1076:1090],
6100: _TokenType_name[1090:1104],
6101: _TokenType_name[1104:1122],
7000: _TokenType_name[1122:1129],
7001: _TokenType_name[1129:1143],
7002: _TokenType_name[1143:1154],
7003: _TokenType_name[1154:1166],
7004: _TokenType_name[1166:1180],
7005: _TokenType_name[1180:1195],
7006: _TokenType_name[1195:1208],
7007: _TokenType_name[1208:1221],
7008: _TokenType_name[1221:1234],
7009: _TokenType_name[1234:1251],
7010: _TokenType_name[1251:1267],
7011: _TokenType_name[1267:1283],
8000: _TokenType_name[1283:1287],
8001: _TokenType_name[1287:1301],
8002: _TokenType_name[1301:1311],
8003: _TokenType_name[1311:1326],
}
func (i TokenType) String() string {

View File

@ -38,6 +38,10 @@ func (t *TokenType) UnmarshalJSON(data []byte) error {
const (
// Default background style.
Background TokenType = -1 - iota
// PreWrapper style.
PreWrapper
// Line style.
Line
// Line numbers in output.
LineNumbers
// Line numbers in output when in table.
@ -48,6 +52,8 @@ const (
LineTable
// Line numbers table TD wrapper style.
LineTableTD
// Code line wrapper style.
CodeLine
// Input that could not be tokenised.
Error
// Other is used by the Delegate lexer to indicate which tokens should be handled by the delegate.
@ -219,12 +225,15 @@ const (
var (
StandardTypes = map[TokenType]string{
Background: "chroma",
Background: "bg",
PreWrapper: "chroma",
Line: "line",
LineNumbers: "ln",
LineNumbersTable: "lnt",
LineHighlight: "hl",
LineTable: "lntable",
LineTableTD: "lntd",
CodeLine: "cl",
Text: "",
Whitespace: "w",
Error: "err",
@ -342,6 +351,6 @@ func (t TokenType) InSubCategory(other TokenType) bool {
return t/100 == other/100
}
func (t TokenType) Emit(groups []string, lexer Lexer) Iterator {
func (t TokenType) Emit(groups []string, _ *LexerState) Iterator {
return Literator(Token{Type: t, Value: groups[0]})
}