mirror of https://github.com/cheat/cheat.git
chore(deps): update dependencies
This commit is contained in:
parent
e847956b02
commit
0aca411279
5
go.mod
5
go.mod
|
@ -3,16 +3,15 @@ module github.com/cheat/cheat
|
||||||
go 1.14
|
go 1.14
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/alecthomas/chroma v0.8.2
|
github.com/alecthomas/chroma v0.9.1
|
||||||
github.com/davecgh/go-spew v1.1.1
|
github.com/davecgh/go-spew v1.1.1
|
||||||
github.com/dlclark/regexp2 v1.4.0 // indirect
|
|
||||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815
|
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815
|
||||||
github.com/kr/text v0.2.0 // indirect
|
github.com/kr/text v0.2.0 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.12
|
github.com/mattn/go-isatty v0.0.12
|
||||||
github.com/mitchellh/go-homedir v1.1.0
|
github.com/mitchellh/go-homedir v1.1.0
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
|
||||||
github.com/sergi/go-diff v1.1.0 // indirect
|
github.com/sergi/go-diff v1.1.0 // indirect
|
||||||
golang.org/x/sys v0.0.0-20201126233918-771906719818 // indirect
|
golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 // indirect
|
||||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
||||||
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0
|
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
|
|
9
go.sum
9
go.sum
|
@ -1,7 +1,7 @@
|
||||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||||
github.com/alecthomas/chroma v0.8.2 h1:x3zkuE2lUk/RIekyAJ3XRqSCP4zwWDfcw/YJCuCAACg=
|
github.com/alecthomas/chroma v0.9.1 h1:cBmvQqRImzR5aWqdMxYZByND4S7BCS/g0svZb28h0Dc=
|
||||||
github.com/alecthomas/chroma v0.8.2/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
github.com/alecthomas/chroma v0.9.1/go.mod h1:eMuEnpA18XbG/WhOWtCzJHS7WqEtDAI+HxdwoW0nVSk=
|
||||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||||
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
|
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
|
||||||
|
@ -13,7 +13,6 @@ github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
|
||||||
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
||||||
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
|
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
|
||||||
|
@ -45,8 +44,8 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201126233918-771906719818 h1:f1CIuDlJhwANEC2MM87MBEVMr3jl5bifgsfj90XAF9c=
|
golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 h1:dXfMednGJh/SUUFjTLsWJz3P+TQt9qnR11GgeI3vWKs=
|
||||||
golang.org/x/sys v0.0.0-20201126233918-771906719818/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
.PHONY: chromad upload all
|
.PHONY: chromad upload all
|
||||||
|
|
||||||
|
VERSION ?= $(shell git describe --tags --dirty --always)
|
||||||
|
|
||||||
all: README.md tokentype_string.go
|
all: README.md tokentype_string.go
|
||||||
|
|
||||||
README.md: lexers/*/*.go
|
README.md: lexers/*/*.go
|
||||||
|
@ -9,10 +11,8 @@ tokentype_string.go: types.go
|
||||||
go generate
|
go generate
|
||||||
|
|
||||||
chromad:
|
chromad:
|
||||||
(cd ./cmd/chromad && go get github.com/GeertJohan/go.rice/rice@master && go install github.com/GeertJohan/go.rice/rice)
|
|
||||||
rm -f chromad
|
rm -f chromad
|
||||||
(export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -o ../../chromad .)
|
(export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .)
|
||||||
rice append -i ./cmd/chromad --exec=./chromad
|
|
||||||
|
|
||||||
upload: chromad
|
upload: chromad
|
||||||
scp chromad root@swapoff.org: && \
|
scp chromad root@swapoff.org: && \
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CircleCI](https://img.shields.io/circleci/project/github/alecthomas/chroma.svg)](https://circleci.com/gh/alecthomas/chroma) [![Go Report Card](https://goreportcard.com/badge/github.com/alecthomas/chroma)](https://goreportcard.com/report/github.com/alecthomas/chroma) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://gophers.slack.com/messages/CN9DS8YF3)
|
# Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CircleCI](https://img.shields.io/circleci/project/github/alecthomas/chroma.svg)](https://circleci.com/gh/alecthomas/chroma) [![Go Report Card](https://goreportcard.com/badge/github.com/alecthomas/chroma)](https://goreportcard.com/report/github.com/alecthomas/chroma) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/)
|
||||||
|
|
||||||
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
|
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ Prefix | Language
|
||||||
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
|
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
|
||||||
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, BlitzBasic, BNF, Brainfuck
|
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, BlitzBasic, BNF, Brainfuck
|
||||||
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
|
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
|
||||||
D | D, Dart, Diff, Django/Jinja, Docker, DTD
|
D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan
|
||||||
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
|
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
|
||||||
F | Factor, Fish, Forth, Fortran, FSharp
|
F | Factor, Fish, Forth, Fortran, FSharp
|
||||||
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
|
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
|
||||||
|
@ -216,7 +216,7 @@ python3 ~/Projects/chroma/_tools/pygments2chroma.py \
|
||||||
&& gofmt -s -w ~/Projects/chroma/lexers/*.go
|
&& gofmt -s -w ~/Projects/chroma/lexers/*.go
|
||||||
```
|
```
|
||||||
|
|
||||||
See notes in [pygments-lexers.go](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
|
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
|
||||||
for a list of lexers, and notes on some of the issues importing them.
|
for a list of lexers, and notes on some of the issues importing them.
|
||||||
|
|
||||||
<a id="markdown-formatters" name="formatters"></a>
|
<a id="markdown-formatters" name="formatters"></a>
|
||||||
|
|
|
@ -17,6 +17,20 @@ var c = chroma.MustParseColour
|
||||||
|
|
||||||
var ttyTables = map[int]*ttyTable{
|
var ttyTables = map[int]*ttyTable{
|
||||||
8: {
|
8: {
|
||||||
|
foreground: map[chroma.Colour]string{
|
||||||
|
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
|
||||||
|
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
|
||||||
|
c("#555555"): "\033[1m\033[30m", c("#ff0000"): "\033[1m\033[31m", c("#00ff00"): "\033[1m\033[32m", c("#ffff00"): "\033[1m\033[33m",
|
||||||
|
c("#0000ff"): "\033[1m\033[34m", c("#ff00ff"): "\033[1m\033[35m", c("#00ffff"): "\033[1m\033[36m", c("#ffffff"): "\033[1m\033[37m",
|
||||||
|
},
|
||||||
|
background: map[chroma.Colour]string{
|
||||||
|
c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
|
||||||
|
c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
|
||||||
|
c("#555555"): "\033[1m\033[40m", c("#ff0000"): "\033[1m\033[41m", c("#00ff00"): "\033[1m\033[42m", c("#ffff00"): "\033[1m\033[43m",
|
||||||
|
c("#0000ff"): "\033[1m\033[44m", c("#ff00ff"): "\033[1m\033[45m", c("#00ffff"): "\033[1m\033[46m", c("#ffffff"): "\033[1m\033[47m",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
16: {
|
||||||
foreground: map[chroma.Colour]string{
|
foreground: map[chroma.Colour]string{
|
||||||
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
|
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
|
||||||
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
|
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
|
||||||
|
@ -227,15 +241,11 @@ type indexedTTYFormatter struct {
|
||||||
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
|
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
|
||||||
theme := styleToEscapeSequence(c.table, style)
|
theme := styleToEscapeSequence(c.table, style)
|
||||||
for token := it(); token != chroma.EOF; token = it() {
|
for token := it(); token != chroma.EOF; token = it() {
|
||||||
// TODO: Cache token lookups?
|
|
||||||
clr, ok := theme[token.Type]
|
clr, ok := theme[token.Type]
|
||||||
if !ok {
|
if !ok {
|
||||||
clr, ok = theme[token.Type.SubCategory()]
|
clr, ok = theme[token.Type.SubCategory()]
|
||||||
if !ok {
|
if !ok {
|
||||||
clr = theme[token.Type.Category()]
|
clr = theme[token.Type.Category()]
|
||||||
// if !ok {
|
|
||||||
// clr = theme[chroma.InheritStyle]
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if clr != "" {
|
if clr != "" {
|
||||||
|
@ -249,10 +259,22 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TTY is an 8-colour terminal formatter.
|
||||||
|
//
|
||||||
|
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||||
|
var TTY = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
|
||||||
|
|
||||||
// TTY8 is an 8-colour terminal formatter.
|
// TTY8 is an 8-colour terminal formatter.
|
||||||
//
|
//
|
||||||
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||||
var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
|
var TTY8 = Register("terminal8", &indexedTTYFormatter{ttyTables[8]})
|
||||||
|
|
||||||
|
// TTY16 is a 16-colour terminal formatter.
|
||||||
|
//
|
||||||
|
// It uses \033[3xm for normal colours and \033[90Xm for bright colours.
|
||||||
|
//
|
||||||
|
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||||
|
var TTY16 = Register("terminal16", &indexedTTYFormatter{ttyTables[16]})
|
||||||
|
|
||||||
// TTY256 is a 256-colour terminal formatter.
|
// TTY256 is a 256-colour terminal formatter.
|
||||||
//
|
//
|
||||||
|
|
|
@ -8,7 +8,7 @@ require (
|
||||||
github.com/alecthomas/kong v0.2.4
|
github.com/alecthomas/kong v0.2.4
|
||||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
|
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
|
||||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
|
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
|
||||||
github.com/dlclark/regexp2 v1.2.0
|
github.com/dlclark/regexp2 v1.4.0
|
||||||
github.com/mattn/go-colorable v0.1.6
|
github.com/mattn/go-colorable v0.1.6
|
||||||
github.com/mattn/go-isatty v0.0.12
|
github.com/mattn/go-isatty v0.0.12
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
|
|
|
@ -13,6 +13,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
||||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
|
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
||||||
|
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
|
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
|
||||||
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||||
|
|
|
@ -4,7 +4,7 @@ import "strings"
|
||||||
|
|
||||||
// An Iterator across tokens.
|
// An Iterator across tokens.
|
||||||
//
|
//
|
||||||
// nil will be returned at the end of the Token stream.
|
// EOF will be returned at the end of the Token stream.
|
||||||
//
|
//
|
||||||
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
|
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
|
||||||
type Iterator func() Token
|
type Iterator func() Token
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// ABAP lexer.
|
// ABAP lexer.
|
||||||
var Abap = internal.Register(MustNewLexer(
|
var Abap = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "ABAP",
|
Name: "ABAP",
|
||||||
Aliases: []string{"abap"},
|
Aliases: []string{"abap"},
|
||||||
|
@ -14,7 +14,11 @@ var Abap = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-abap"},
|
MimeTypes: []string{"text/x-abap"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
abapRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func abapRules() Rules {
|
||||||
|
return Rules{
|
||||||
"common": {
|
"common": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`^\*.*$`, CommentSingle, nil},
|
{`^\*.*$`, CommentSingle, nil},
|
||||||
|
@ -52,5 +56,5 @@ var Abap = internal.Register(MustNewLexer(
|
||||||
{`[/;:()\[\],.]`, Punctuation, nil},
|
{`[/;:()\[\],.]`, Punctuation, nil},
|
||||||
{`(!)(\w+)`, ByGroups(Operator, Name), nil},
|
{`(!)(\w+)`, ByGroups(Operator, Name), nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Abnf lexer.
|
// Abnf lexer.
|
||||||
var Abnf = internal.Register(MustNewLexer(
|
var Abnf = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "ABNF",
|
Name: "ABNF",
|
||||||
Aliases: []string{"abnf"},
|
Aliases: []string{"abnf"},
|
||||||
Filenames: []string{"*.abnf"},
|
Filenames: []string{"*.abnf"},
|
||||||
MimeTypes: []string{"text/x-abnf"},
|
MimeTypes: []string{"text/x-abnf"},
|
||||||
},
|
},
|
||||||
Rules{
|
abnfRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func abnfRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`;.*$`, CommentSingle, nil},
|
{`;.*$`, CommentSingle, nil},
|
||||||
{`(%[si])?"[^"]*"`, Literal, nil},
|
{`(%[si])?"[^"]*"`, Literal, nil},
|
||||||
|
@ -34,5 +38,5 @@ var Abnf = internal.Register(MustNewLexer(
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`.`, Text, nil},
|
{`.`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Actionscript lexer.
|
// Actionscript lexer.
|
||||||
var Actionscript = internal.Register(MustNewLexer(
|
var Actionscript = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "ActionScript",
|
Name: "ActionScript",
|
||||||
Aliases: []string{"as", "actionscript"},
|
Aliases: []string{"as", "actionscript"},
|
||||||
|
@ -15,7 +15,11 @@ var Actionscript = internal.Register(MustNewLexer(
|
||||||
NotMultiline: true,
|
NotMultiline: true,
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
actionscriptRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func actionscriptRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
@ -35,5 +39,5 @@ var Actionscript = internal.Register(MustNewLexer(
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Actionscript 3 lexer.
|
// Actionscript 3 lexer.
|
||||||
var Actionscript3 = internal.Register(MustNewLexer(
|
var Actionscript3 = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "ActionScript 3",
|
Name: "ActionScript 3",
|
||||||
Aliases: []string{"as3", "actionscript3"},
|
Aliases: []string{"as3", "actionscript3"},
|
||||||
|
@ -14,7 +14,11 @@ var Actionscript3 = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"},
|
MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
actionscript3Rules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func actionscript3Rules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")},
|
{`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")},
|
||||||
|
@ -52,5 +56,5 @@ var Actionscript3 = internal.Register(MustNewLexer(
|
||||||
{`,`, Operator, Pop(1)},
|
{`,`, Operator, Pop(1)},
|
||||||
Default(Pop(1)),
|
Default(Pop(1)),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Ada lexer.
|
// Ada lexer.
|
||||||
var Ada = internal.Register(MustNewLexer(
|
var Ada = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Ada",
|
Name: "Ada",
|
||||||
Aliases: []string{"ada", "ada95", "ada2005"},
|
Aliases: []string{"ada", "ada95", "ada2005"},
|
||||||
|
@ -14,7 +14,11 @@ var Ada = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-ada"},
|
MimeTypes: []string{"text/x-ada"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
adaRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func adaRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^\S\n]+`, Text, nil},
|
{`[^\S\n]+`, Text, nil},
|
||||||
{`--.*?\n`, CommentSingle, nil},
|
{`--.*?\n`, CommentSingle, nil},
|
||||||
|
@ -110,5 +114,5 @@ var Ada = internal.Register(MustNewLexer(
|
||||||
{`\)`, Punctuation, Pop(1)},
|
{`\)`, Punctuation, Pop(1)},
|
||||||
Include("root"),
|
Include("root"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
package a
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Al lexer.
|
||||||
|
var Al = internal.Register(MustNewLazyLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "AL",
|
||||||
|
Aliases: []string{"al"},
|
||||||
|
Filenames: []string{"*.al", "*.dal"},
|
||||||
|
MimeTypes: []string{"text/x-al"},
|
||||||
|
DotAll: true,
|
||||||
|
CaseInsensitive: true,
|
||||||
|
},
|
||||||
|
alRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
// https://github.com/microsoft/AL/blob/master/grammar/alsyntax.tmlanguage
|
||||||
|
func alRules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"root": {
|
||||||
|
{`\s+`, TextWhitespace, nil},
|
||||||
|
{`(?s)\/\*.*?\\*\*\/`, CommentMultiline, nil},
|
||||||
|
{`(?s)//.*?\n`, CommentSingle, nil},
|
||||||
|
{`\"([^\"])*\"`, Text, nil},
|
||||||
|
{`'([^'])*'`, LiteralString, nil},
|
||||||
|
{`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil},
|
||||||
|
{`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil},
|
||||||
|
{`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil},
|
||||||
|
// Added new objects types of BC 2021 wave 1 (REPORTEXTENSION|Entitlement|PermissionSet|PermissionSetExtension)
|
||||||
|
{`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|Entitlement|PermissionSet|PermissionSetExtension))\b`, Keyword, nil},
|
||||||
|
{`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil},
|
||||||
|
{`\b([<>]=|<>|<|>)\b?`, Operator, nil},
|
||||||
|
{`\b(\-|\+|\/|\*)\b`, Operator, nil},
|
||||||
|
{`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil},
|
||||||
|
{`\b(?i:(ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil},
|
||||||
|
{`\s*[(\.\.)&\|]\s*`, Operator, nil},
|
||||||
|
{`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil},
|
||||||
|
{`[;:,]`, Punctuation, nil},
|
||||||
|
{`#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
|
||||||
|
{`\w+`, Text, nil},
|
||||||
|
{`.`, Text, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Angular2 lexer.
|
// Angular2 lexer.
|
||||||
var Angular2 = internal.Register(MustNewLexer(
|
var Angular2 = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Angular2",
|
Name: "Angular2",
|
||||||
Aliases: []string{"ng2"},
|
Aliases: []string{"ng2"},
|
||||||
Filenames: []string{},
|
Filenames: []string{},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
angular2Rules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func angular2Rules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^{([*#]+`, Other, nil},
|
{`[^{([*#]+`, Other, nil},
|
||||||
{`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")},
|
{`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")},
|
||||||
|
@ -38,5 +42,5 @@ var Angular2 = internal.Register(MustNewLexer(
|
||||||
{`'.*?'`, LiteralString, Pop(1)},
|
{`'.*?'`, LiteralString, Pop(1)},
|
||||||
{`[^\s>]+`, LiteralString, Pop(1)},
|
{`[^\s>]+`, LiteralString, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// ANTLR lexer.
|
// ANTLR lexer.
|
||||||
var ANTLR = internal.Register(MustNewLexer(
|
var ANTLR = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "ANTLR",
|
Name: "ANTLR",
|
||||||
Aliases: []string{"antlr"},
|
Aliases: []string{"antlr"},
|
||||||
Filenames: []string{},
|
Filenames: []string{},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
antlrRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func antlrRules() Rules {
|
||||||
|
return Rules{
|
||||||
"whitespace": {
|
"whitespace": {
|
||||||
{`\s+`, TextWhitespace, nil},
|
{`\s+`, TextWhitespace, nil},
|
||||||
},
|
},
|
||||||
|
@ -97,5 +101,5 @@ var ANTLR = internal.Register(MustNewLexer(
|
||||||
{`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil},
|
{`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil},
|
||||||
{`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil},
|
{`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Apacheconf lexer.
|
// Apacheconf lexer.
|
||||||
var Apacheconf = internal.Register(MustNewLexer(
|
var Apacheconf = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "ApacheConf",
|
Name: "ApacheConf",
|
||||||
Aliases: []string{"apacheconf", "aconf", "apache"},
|
Aliases: []string{"apacheconf", "aconf", "apache"},
|
||||||
|
@ -14,7 +14,11 @@ var Apacheconf = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-apacheconf"},
|
MimeTypes: []string{"text/x-apacheconf"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
apacheconfRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func apacheconfRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`(#.*?)$`, Comment, nil},
|
{`(#.*?)$`, Comment, nil},
|
||||||
|
@ -34,5 +38,5 @@ var Apacheconf = internal.Register(MustNewLexer(
|
||||||
{`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil},
|
{`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil},
|
||||||
{`[^\s"\\]+`, Text, nil},
|
{`[^\s"\\]+`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Apl lexer.
|
// Apl lexer.
|
||||||
var Apl = internal.Register(MustNewLexer(
|
var Apl = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "APL",
|
Name: "APL",
|
||||||
Aliases: []string{"apl"},
|
Aliases: []string{"apl"},
|
||||||
Filenames: []string{"*.apl"},
|
Filenames: []string{"*.apl"},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
aplRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func aplRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`[⍝#].*$`, CommentSingle, nil},
|
{`[⍝#].*$`, CommentSingle, nil},
|
||||||
|
@ -32,5 +36,5 @@ var Apl = internal.Register(MustNewLexer(
|
||||||
{`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil},
|
{`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil},
|
||||||
{`[{}]`, KeywordType, nil},
|
{`[{}]`, KeywordType, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Applescript lexer.
|
// Applescript lexer.
|
||||||
var Applescript = internal.Register(MustNewLexer(
|
var Applescript = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "AppleScript",
|
Name: "AppleScript",
|
||||||
Aliases: []string{"applescript"},
|
Aliases: []string{"applescript"},
|
||||||
|
@ -14,7 +14,11 @@ var Applescript = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
applescriptRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func applescriptRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`¬\n`, LiteralStringEscape, nil},
|
{`¬\n`, LiteralStringEscape, nil},
|
||||||
|
@ -51,5 +55,5 @@ var Applescript = internal.Register(MustNewLexer(
|
||||||
{`[^*(]+`, CommentMultiline, nil},
|
{`[^*(]+`, CommentMultiline, nil},
|
||||||
{`[*(]`, CommentMultiline, nil},
|
{`[*(]`, CommentMultiline, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Arduino lexer.
|
// Arduino lexer.
|
||||||
var Arduino = internal.Register(MustNewLexer(
|
var Arduino = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Arduino",
|
Name: "Arduino",
|
||||||
Aliases: []string{"arduino"},
|
Aliases: []string{"arduino"},
|
||||||
|
@ -14,7 +14,11 @@ var Arduino = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-arduino"},
|
MimeTypes: []string{"text/x-arduino"},
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
arduinoRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func arduinoRules() Rules {
|
||||||
|
return Rules{
|
||||||
"statements": {
|
"statements": {
|
||||||
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil},
|
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil},
|
||||||
{`char(16_t|32_t)\b`, KeywordType, nil},
|
{`char(16_t|32_t)\b`, KeywordType, nil},
|
||||||
|
@ -106,5 +110,5 @@ var Arduino = internal.Register(MustNewLexer(
|
||||||
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
|
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
|
||||||
{`.*?\n`, Comment, nil},
|
{`.*?\n`, Comment, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Awk lexer.
|
// Awk lexer.
|
||||||
var Awk = internal.Register(MustNewLexer(
|
var Awk = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Awk",
|
Name: "Awk",
|
||||||
Aliases: []string{"awk", "gawk", "mawk", "nawk"},
|
Aliases: []string{"awk", "gawk", "mawk", "nawk"},
|
||||||
Filenames: []string{"*.awk"},
|
Filenames: []string{"*.awk"},
|
||||||
MimeTypes: []string{"application/x-awk"},
|
MimeTypes: []string{"application/x-awk"},
|
||||||
},
|
},
|
||||||
Rules{
|
awkRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func awkRules() Rules {
|
||||||
|
return Rules{
|
||||||
"commentsandwhitespace": {
|
"commentsandwhitespace": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`#.*$`, CommentSingle, nil},
|
{`#.*$`, CommentSingle, nil},
|
||||||
|
@ -44,5 +48,5 @@ var Awk = internal.Register(MustNewLexer(
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Ballerina lexer.
|
// Ballerina lexer.
|
||||||
var Ballerina = internal.Register(MustNewLexer(
|
var Ballerina = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Ballerina",
|
Name: "Ballerina",
|
||||||
Aliases: []string{"ballerina"},
|
Aliases: []string{"ballerina"},
|
||||||
|
@ -14,7 +14,11 @@ var Ballerina = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-ballerina"},
|
MimeTypes: []string{"text/x-ballerina"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
ballerinaRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func ballerinaRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^\S\n]+`, Text, nil},
|
{`[^\S\n]+`, Text, nil},
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
@ -42,5 +46,5 @@ var Ballerina = internal.Register(MustNewLexer(
|
||||||
"import": {
|
"import": {
|
||||||
{`[\w.]+`, NameNamespace, Pop(1)},
|
{`[\w.]+`, NameNamespace, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -7,17 +7,27 @@ import (
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// TODO(moorereason): can this be factored away?
|
||||||
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`)
|
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`)
|
||||||
|
|
||||||
// Bash lexer.
|
// Bash lexer.
|
||||||
var Bash = internal.Register(MustNewLexer(
|
var Bash = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Bash",
|
Name: "Bash",
|
||||||
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"},
|
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"},
|
||||||
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
|
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
|
||||||
MimeTypes: []string{"application/x-sh", "application/x-shellscript"},
|
MimeTypes: []string{"application/x-sh", "application/x-shellscript"},
|
||||||
},
|
},
|
||||||
Rules{
|
bashRules,
|
||||||
|
).SetAnalyser(func(text string) float32 {
|
||||||
|
if bashAnalyserRe.FindString(text) != "" {
|
||||||
|
return 1.0
|
||||||
|
}
|
||||||
|
return 0.0
|
||||||
|
}))
|
||||||
|
|
||||||
|
func bashRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("basic"),
|
Include("basic"),
|
||||||
{"`", LiteralStringBacktick, Push("backticks")},
|
{"`", LiteralStringBacktick, Push("backticks")},
|
||||||
|
@ -86,10 +96,5 @@ var Bash = internal.Register(MustNewLexer(
|
||||||
{"`", LiteralStringBacktick, Pop(1)},
|
{"`", LiteralStringBacktick, Pop(1)},
|
||||||
Include("root"),
|
Include("root"),
|
||||||
},
|
},
|
||||||
},
|
|
||||||
).SetAnalyser(func(text string) float32 {
|
|
||||||
if bashAnalyserRe.FindString(text) != "" {
|
|
||||||
return 1.0
|
|
||||||
}
|
}
|
||||||
return 0.0
|
}
|
||||||
}))
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Batchfile lexer.
|
// Batchfile lexer.
|
||||||
var Batchfile = internal.Register(MustNewLexer(
|
var Batchfile = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Batchfile",
|
Name: "Batchfile",
|
||||||
Aliases: []string{"bat", "batch", "dosbatch", "winbatch"},
|
Aliases: []string{"bat", "batch", "dosbatch", "winbatch"},
|
||||||
|
@ -14,7 +14,11 @@ var Batchfile = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"application/x-dos-batch"},
|
MimeTypes: []string{"application/x-dos-batch"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
batchfileRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func batchfileRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil},
|
{`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil},
|
||||||
{`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")},
|
{`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")},
|
||||||
|
@ -190,5 +194,5 @@ var Batchfile = internal.Register(MustNewLexer(
|
||||||
{`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)},
|
{`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)},
|
||||||
Default(Pop(1)),
|
Default(Pop(1)),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Bibtex lexer.
|
// Bibtex lexer.
|
||||||
var Bibtex = internal.Register(MustNewLexer(
|
var Bibtex = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "BibTeX",
|
Name: "BibTeX",
|
||||||
Aliases: []string{"bib", "bibtex"},
|
Aliases: []string{"bib", "bibtex"},
|
||||||
|
@ -15,7 +15,11 @@ var Bibtex = internal.Register(MustNewLexer(
|
||||||
NotMultiline: true,
|
NotMultiline: true,
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
bibtexRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func bibtexRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("whitespace"),
|
Include("whitespace"),
|
||||||
{`@comment`, Comment, nil},
|
{`@comment`, Comment, nil},
|
||||||
|
@ -72,5 +76,5 @@ var Bibtex = internal.Register(MustNewLexer(
|
||||||
"whitespace": {
|
"whitespace": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Blitzbasic lexer.
|
// Blitzbasic lexer.
|
||||||
var Blitzbasic = internal.Register(MustNewLexer(
|
var Blitzbasic = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "BlitzBasic",
|
Name: "BlitzBasic",
|
||||||
Aliases: []string{"blitzbasic", "b3d", "bplus"},
|
Aliases: []string{"blitzbasic", "b3d", "bplus"},
|
||||||
|
@ -14,7 +14,11 @@ var Blitzbasic = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-bb"},
|
MimeTypes: []string{"text/x-bb"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
blitzbasicRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func blitzbasicRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[ \t]+`, Text, nil},
|
{`[ \t]+`, Text, nil},
|
||||||
{`;.*?\n`, CommentSingle, nil},
|
{`;.*?\n`, CommentSingle, nil},
|
||||||
|
@ -44,5 +48,5 @@ var Blitzbasic = internal.Register(MustNewLexer(
|
||||||
{`"C?`, LiteralStringDouble, Pop(1)},
|
{`"C?`, LiteralStringDouble, Pop(1)},
|
||||||
{`[^"]+`, LiteralStringDouble, nil},
|
{`[^"]+`, LiteralStringDouble, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,19 +6,23 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Bnf lexer.
|
// Bnf lexer.
|
||||||
var Bnf = internal.Register(MustNewLexer(
|
var Bnf = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "BNF",
|
Name: "BNF",
|
||||||
Aliases: []string{"bnf"},
|
Aliases: []string{"bnf"},
|
||||||
Filenames: []string{"*.bnf"},
|
Filenames: []string{"*.bnf"},
|
||||||
MimeTypes: []string{"text/x-bnf"},
|
MimeTypes: []string{"text/x-bnf"},
|
||||||
},
|
},
|
||||||
Rules{
|
bnfRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func bnfRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil},
|
{`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil},
|
||||||
{`::=`, Operator, nil},
|
{`::=`, Operator, nil},
|
||||||
{`[^<>:]+`, Text, nil},
|
{`[^<>:]+`, Text, nil},
|
||||||
{`.`, Text, nil},
|
{`.`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Brainfuck lexer.
|
// Brainfuck lexer.
|
||||||
var Brainfuck = internal.Register(MustNewLexer(
|
var Brainfuck = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Brainfuck",
|
Name: "Brainfuck",
|
||||||
Aliases: []string{"brainfuck", "bf"},
|
Aliases: []string{"brainfuck", "bf"},
|
||||||
Filenames: []string{"*.bf", "*.b"},
|
Filenames: []string{"*.bf", "*.b"},
|
||||||
MimeTypes: []string{"application/x-brainfuck"},
|
MimeTypes: []string{"application/x-brainfuck"},
|
||||||
},
|
},
|
||||||
Rules{
|
brainfuckRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func brainfuckRules() Rules {
|
||||||
|
return Rules{
|
||||||
"common": {
|
"common": {
|
||||||
{`[.,]+`, NameTag, nil},
|
{`[.,]+`, NameTag, nil},
|
||||||
{`[+-]+`, NameBuiltin, nil},
|
{`[+-]+`, NameBuiltin, nil},
|
||||||
|
@ -30,5 +34,5 @@ var Brainfuck = internal.Register(MustNewLexer(
|
||||||
{`\]`, Keyword, Pop(1)},
|
{`\]`, Keyword, Pop(1)},
|
||||||
Include("common"),
|
Include("common"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,19 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// C lexer.
|
// C lexer.
|
||||||
var C = internal.Register(MustNewLexer(
|
var C = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "C",
|
Name: "C",
|
||||||
Aliases: []string{"c"},
|
Aliases: []string{"c"},
|
||||||
Filenames: []string{"*.c", "*.h", "*.idc"},
|
Filenames: []string{"*.c", "*.h", "*.idc"},
|
||||||
MimeTypes: []string{"text/x-chdr", "text/x-csrc"},
|
MimeTypes: []string{"text/x-chdr", "text/x-csrc"},
|
||||||
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
cRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cRules() Rules {
|
||||||
|
return Rules{
|
||||||
"whitespace": {
|
"whitespace": {
|
||||||
{`^#if\s+0`, CommentPreproc, Push("if0")},
|
{`^#if\s+0`, CommentPreproc, Push("if0")},
|
||||||
{`^#`, CommentPreproc, Push("macro")},
|
{`^#`, CommentPreproc, Push("macro")},
|
||||||
|
@ -87,5 +92,5 @@ var C = internal.Register(MustNewLexer(
|
||||||
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
|
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
|
||||||
{`.*?\n`, Comment, nil},
|
{`.*?\n`, Comment, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,143 +6,149 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// caddyfileCommon are the rules common to both of the lexer variants
|
// caddyfileCommon are the rules common to both of the lexer variants
|
||||||
var caddyfileCommon = Rules{
|
func caddyfileCommonRules() Rules {
|
||||||
"site_block_common": {
|
return Rules{
|
||||||
// Import keyword
|
"site_block_common": {
|
||||||
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
|
// Import keyword
|
||||||
// Matcher definition
|
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
|
||||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
// Matcher definition
|
||||||
// Matcher token stub for docs
|
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||||
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
|
// Matcher token stub for docs
|
||||||
// These cannot have matchers but may have things that look like
|
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
|
||||||
// matchers in their arguments, so we just parse as a subdirective.
|
// These cannot have matchers but may have things that look like
|
||||||
{`try_files`, Keyword, Push("subdirective")},
|
// matchers in their arguments, so we just parse as a subdirective.
|
||||||
// These are special, they can nest more directives
|
{`try_files`, Keyword, Push("subdirective")},
|
||||||
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
|
// These are special, they can nest more directives
|
||||||
// Any other directive
|
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
|
||||||
{`[^\s#]+`, Keyword, Push("directive")},
|
// Any other directive
|
||||||
Include("base"),
|
{`[^\s#]+`, Keyword, Push("directive")},
|
||||||
},
|
Include("base"),
|
||||||
"matcher": {
|
},
|
||||||
{`\{`, Punctuation, Push("block")},
|
"matcher": {
|
||||||
// Not can be one-liner
|
{`\{`, Punctuation, Push("block")},
|
||||||
{`not`, Keyword, Push("deep_not_matcher")},
|
// Not can be one-liner
|
||||||
// Any other same-line matcher
|
{`not`, Keyword, Push("deep_not_matcher")},
|
||||||
{`[^\s#]+`, Keyword, Push("arguments")},
|
// Any other same-line matcher
|
||||||
// Terminators
|
{`[^\s#]+`, Keyword, Push("arguments")},
|
||||||
{`\n`, Text, Pop(1)},
|
// Terminators
|
||||||
{`\}`, Punctuation, Pop(1)},
|
{`\n`, Text, Pop(1)},
|
||||||
Include("base"),
|
{`\}`, Punctuation, Pop(1)},
|
||||||
},
|
Include("base"),
|
||||||
"block": {
|
},
|
||||||
{`\}`, Punctuation, Pop(2)},
|
"block": {
|
||||||
// Not can be one-liner
|
{`\}`, Punctuation, Pop(2)},
|
||||||
{`not`, Keyword, Push("not_matcher")},
|
// Not can be one-liner
|
||||||
// Any other subdirective
|
{`not`, Keyword, Push("not_matcher")},
|
||||||
{`[^\s#]+`, Keyword, Push("subdirective")},
|
// Any other subdirective
|
||||||
Include("base"),
|
{`[^\s#]+`, Keyword, Push("subdirective")},
|
||||||
},
|
Include("base"),
|
||||||
"nested_block": {
|
},
|
||||||
{`\}`, Punctuation, Pop(2)},
|
"nested_block": {
|
||||||
// Matcher definition
|
{`\}`, Punctuation, Pop(2)},
|
||||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
// Matcher definition
|
||||||
// Something that starts with literally < is probably a docs stub
|
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||||
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
|
// Something that starts with literally < is probably a docs stub
|
||||||
// Any other directive
|
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
|
||||||
{`[^\s#]+`, Keyword, Push("nested_directive")},
|
// Any other directive
|
||||||
Include("base"),
|
{`[^\s#]+`, Keyword, Push("nested_directive")},
|
||||||
},
|
Include("base"),
|
||||||
"not_matcher": {
|
},
|
||||||
{`\}`, Punctuation, Pop(2)},
|
"not_matcher": {
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
{`\}`, Punctuation, Pop(2)},
|
||||||
{`[^\s#]+`, Keyword, Push("arguments")},
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
{`\s+`, Text, nil},
|
{`[^\s#]+`, Keyword, Push("arguments")},
|
||||||
},
|
{`\s+`, Text, nil},
|
||||||
"deep_not_matcher": {
|
},
|
||||||
{`\}`, Punctuation, Pop(2)},
|
"deep_not_matcher": {
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
{`\}`, Punctuation, Pop(2)},
|
||||||
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
{`\s+`, Text, nil},
|
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
|
||||||
},
|
{`\s+`, Text, nil},
|
||||||
"directive": {
|
},
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
"directive": {
|
||||||
Include("matcher_token"),
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
Include("comments_pop_1"),
|
Include("matcher_token"),
|
||||||
{`\n`, Text, Pop(1)},
|
Include("comments_pop_1"),
|
||||||
Include("base"),
|
{`\n`, Text, Pop(1)},
|
||||||
},
|
Include("base"),
|
||||||
"nested_directive": {
|
},
|
||||||
{`\{(?=\s)`, Punctuation, Push("nested_block")},
|
"nested_directive": {
|
||||||
Include("matcher_token"),
|
{`\{(?=\s)`, Punctuation, Push("nested_block")},
|
||||||
Include("comments_pop_1"),
|
Include("matcher_token"),
|
||||||
{`\n`, Text, Pop(1)},
|
Include("comments_pop_1"),
|
||||||
Include("base"),
|
{`\n`, Text, Pop(1)},
|
||||||
},
|
Include("base"),
|
||||||
"subdirective": {
|
},
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
"subdirective": {
|
||||||
Include("comments_pop_1"),
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
{`\n`, Text, Pop(1)},
|
Include("comments_pop_1"),
|
||||||
Include("base"),
|
{`\n`, Text, Pop(1)},
|
||||||
},
|
Include("base"),
|
||||||
"arguments": {
|
},
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
"arguments": {
|
||||||
Include("comments_pop_2"),
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
{`\\\n`, Text, nil}, // Skip escaped newlines
|
Include("comments_pop_2"),
|
||||||
{`\n`, Text, Pop(2)},
|
{`\\\n`, Text, nil}, // Skip escaped newlines
|
||||||
Include("base"),
|
{`\n`, Text, Pop(2)},
|
||||||
},
|
Include("base"),
|
||||||
"deep_subdirective": {
|
},
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
"deep_subdirective": {
|
||||||
Include("comments_pop_3"),
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
{`\n`, Text, Pop(3)},
|
Include("comments_pop_3"),
|
||||||
Include("base"),
|
{`\n`, Text, Pop(3)},
|
||||||
},
|
Include("base"),
|
||||||
"matcher_token": {
|
},
|
||||||
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
|
"matcher_token": {
|
||||||
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
|
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
|
||||||
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
|
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
|
||||||
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
|
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
|
||||||
},
|
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
|
||||||
"comments": {
|
},
|
||||||
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
|
"comments": {
|
||||||
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
|
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
|
||||||
},
|
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
|
||||||
"comments_pop_1": {
|
},
|
||||||
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
|
"comments_pop_1": {
|
||||||
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
|
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
|
||||||
},
|
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
|
||||||
"comments_pop_2": {
|
},
|
||||||
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
|
"comments_pop_2": {
|
||||||
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
|
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
|
||||||
},
|
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
|
||||||
"comments_pop_3": {
|
},
|
||||||
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
|
"comments_pop_3": {
|
||||||
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
|
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
|
||||||
},
|
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
|
||||||
"base": {
|
},
|
||||||
Include("comments"),
|
"base": {
|
||||||
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
|
Include("comments"),
|
||||||
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
|
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
|
||||||
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
|
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
|
||||||
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
|
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
|
||||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
|
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
|
||||||
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
|
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
|
||||||
{`\]|\|`, Punctuation, nil},
|
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
|
||||||
{`[^\s#{}$\]]+`, LiteralString, nil},
|
{`\]|\|`, Punctuation, nil},
|
||||||
{`/[^\s#]*`, Name, nil},
|
{`[^\s#{}$\]]+`, LiteralString, nil},
|
||||||
{`\s+`, Text, nil},
|
{`/[^\s#]*`, Name, nil},
|
||||||
},
|
{`\s+`, Text, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Caddyfile lexer.
|
// Caddyfile lexer.
|
||||||
var Caddyfile = internal.Register(MustNewLexer(
|
var Caddyfile = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Caddyfile",
|
Name: "Caddyfile",
|
||||||
Aliases: []string{"caddyfile", "caddy"},
|
Aliases: []string{"caddyfile", "caddy"},
|
||||||
Filenames: []string{"Caddyfile*"},
|
Filenames: []string{"Caddyfile*"},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
caddyfileRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func caddyfileRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("comments"),
|
Include("comments"),
|
||||||
// Global options block
|
// Global options block
|
||||||
|
@ -186,21 +192,25 @@ var Caddyfile = internal.Register(MustNewLexer(
|
||||||
{`\}`, Punctuation, Pop(2)},
|
{`\}`, Punctuation, Pop(2)},
|
||||||
Include("site_block_common"),
|
Include("site_block_common"),
|
||||||
},
|
},
|
||||||
}.Merge(caddyfileCommon),
|
}.Merge(caddyfileCommonRules())
|
||||||
))
|
}
|
||||||
|
|
||||||
// Caddyfile directive-only lexer.
|
// Caddyfile directive-only lexer.
|
||||||
var CaddyfileDirectives = internal.Register(MustNewLexer(
|
var CaddyfileDirectives = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Caddyfile Directives",
|
Name: "Caddyfile Directives",
|
||||||
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
|
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
|
||||||
Filenames: []string{},
|
Filenames: []string{},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
caddyfileDirectivesRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func caddyfileDirectivesRules() Rules {
|
||||||
|
return Rules{
|
||||||
// Same as "site_block" in Caddyfile
|
// Same as "site_block" in Caddyfile
|
||||||
"root": {
|
"root": {
|
||||||
Include("site_block_common"),
|
Include("site_block_common"),
|
||||||
},
|
},
|
||||||
}.Merge(caddyfileCommon),
|
}.Merge(caddyfileCommonRules())
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cap'N'Proto Proto lexer.
|
// Cap'N'Proto Proto lexer.
|
||||||
var CapNProto = internal.Register(MustNewLexer(
|
var CapNProto = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Cap'n Proto",
|
Name: "Cap'n Proto",
|
||||||
Aliases: []string{"capnp"},
|
Aliases: []string{"capnp"},
|
||||||
Filenames: []string{"*.capnp"},
|
Filenames: []string{"*.capnp"},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
capNProtoRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func capNProtoRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`#.*?$`, CommentSingle, nil},
|
{`#.*?$`, CommentSingle, nil},
|
||||||
{`@[0-9a-zA-Z]*`, NameDecorator, nil},
|
{`@[0-9a-zA-Z]*`, NameDecorator, nil},
|
||||||
|
@ -57,5 +61,5 @@ var CapNProto = internal.Register(MustNewLexer(
|
||||||
{`[])]`, NameAttribute, Pop(1)},
|
{`[])]`, NameAttribute, Pop(1)},
|
||||||
Default(Pop(1)),
|
Default(Pop(1)),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Ceylon lexer.
|
// Ceylon lexer.
|
||||||
var Ceylon = internal.Register(MustNewLexer(
|
var Ceylon = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Ceylon",
|
Name: "Ceylon",
|
||||||
Aliases: []string{"ceylon"},
|
Aliases: []string{"ceylon"},
|
||||||
|
@ -14,7 +14,11 @@ var Ceylon = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-ceylon"},
|
MimeTypes: []string{"text/x-ceylon"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
ceylonRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func ceylonRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil},
|
{`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil},
|
||||||
{`[^\S\n]+`, Text, nil},
|
{`[^\S\n]+`, Text, nil},
|
||||||
|
@ -59,5 +63,5 @@ var Ceylon = internal.Register(MustNewLexer(
|
||||||
{`\*/`, CommentMultiline, Pop(1)},
|
{`\*/`, CommentMultiline, Pop(1)},
|
||||||
{`[*/]`, CommentMultiline, nil},
|
{`[*/]`, CommentMultiline, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cfengine3 lexer.
|
// Cfengine3 lexer.
|
||||||
var Cfengine3 = internal.Register(MustNewLexer(
|
var Cfengine3 = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "CFEngine3",
|
Name: "CFEngine3",
|
||||||
Aliases: []string{"cfengine3", "cf3"},
|
Aliases: []string{"cfengine3", "cf3"},
|
||||||
Filenames: []string{"*.cf"},
|
Filenames: []string{"*.cf"},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
cfengine3Rules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cfengine3Rules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`#.*?\n`, Comment, nil},
|
{`#.*?\n`, Comment, nil},
|
||||||
{`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil},
|
{`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil},
|
||||||
|
@ -52,5 +56,5 @@ var Cfengine3 = internal.Register(MustNewLexer(
|
||||||
{`\w+`, NameVariable, nil},
|
{`\w+`, NameVariable, nil},
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Chaiscript lexer.
|
// Chaiscript lexer.
|
||||||
var Chaiscript = internal.Register(MustNewLexer(
|
var Chaiscript = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "ChaiScript",
|
Name: "ChaiScript",
|
||||||
Aliases: []string{"chai", "chaiscript"},
|
Aliases: []string{"chai", "chaiscript"},
|
||||||
|
@ -14,7 +14,11 @@ var Chaiscript = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"},
|
MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
chaiscriptRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func chaiscriptRules() Rules {
|
||||||
|
return Rules{
|
||||||
"commentsandwhitespace": {
|
"commentsandwhitespace": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
@ -59,5 +63,5 @@ var Chaiscript = internal.Register(MustNewLexer(
|
||||||
{`[^\\"$]+`, LiteralStringDouble, nil},
|
{`[^\\"$]+`, LiteralStringDouble, nil},
|
||||||
{`"`, LiteralStringDouble, Pop(1)},
|
{`"`, LiteralStringDouble, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -7,14 +7,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cheetah lexer.
|
// Cheetah lexer.
|
||||||
var Cheetah = internal.Register(MustNewLexer(
|
var Cheetah = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Cheetah",
|
Name: "Cheetah",
|
||||||
Aliases: []string{"cheetah", "spitfire"},
|
Aliases: []string{"cheetah", "spitfire"},
|
||||||
Filenames: []string{"*.tmpl", "*.spt"},
|
Filenames: []string{"*.tmpl", "*.spt"},
|
||||||
MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"},
|
MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"},
|
||||||
},
|
},
|
||||||
Rules{
|
cheetahRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cheetahRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`(##[^\n]*)$`, ByGroups(Comment), nil},
|
{`(##[^\n]*)$`, ByGroups(Comment), nil},
|
||||||
{`#[*](.|\n)*?[*]#`, Comment, nil},
|
{`#[*](.|\n)*?[*]#`, Comment, nil},
|
||||||
|
@ -33,5 +37,5 @@ var Cheetah = internal.Register(MustNewLexer(
|
||||||
`, Other, nil},
|
`, Other, nil},
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -230,7 +230,7 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Common Lisp lexer.
|
// Common Lisp lexer.
|
||||||
var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
|
var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Common Lisp",
|
Name: "Common Lisp",
|
||||||
Aliases: []string{"common-lisp", "cl", "lisp"},
|
Aliases: []string{"common-lisp", "cl", "lisp"},
|
||||||
|
@ -238,7 +238,19 @@ var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-common-lisp"},
|
MimeTypes: []string{"text/x-common-lisp"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
commonLispRules,
|
||||||
|
), TypeMapping{
|
||||||
|
{NameVariable, NameFunction, clBuiltinFunctions},
|
||||||
|
{NameVariable, Keyword, clSpecialForms},
|
||||||
|
{NameVariable, NameBuiltin, clMacros},
|
||||||
|
{NameVariable, Keyword, clLambdaListKeywords},
|
||||||
|
{NameVariable, Keyword, clDeclarations},
|
||||||
|
{NameVariable, KeywordType, clBuiltinTypes},
|
||||||
|
{NameVariable, NameClass, clBuiltinClasses},
|
||||||
|
}))
|
||||||
|
|
||||||
|
func commonLispRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Default(Push("body")),
|
Default(Push("body")),
|
||||||
},
|
},
|
||||||
|
@ -294,13 +306,5 @@ var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
|
||||||
{`\(`, Punctuation, Push("body")},
|
{`\(`, Punctuation, Push("body")},
|
||||||
{`\)`, Punctuation, Pop(1)},
|
{`\)`, Punctuation, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
), TypeMapping{
|
}
|
||||||
{NameVariable, NameFunction, clBuiltinFunctions},
|
|
||||||
{NameVariable, Keyword, clSpecialForms},
|
|
||||||
{NameVariable, NameBuiltin, clMacros},
|
|
||||||
{NameVariable, Keyword, clLambdaListKeywords},
|
|
||||||
{NameVariable, Keyword, clDeclarations},
|
|
||||||
{NameVariable, KeywordType, clBuiltinTypes},
|
|
||||||
{NameVariable, NameClass, clBuiltinClasses},
|
|
||||||
}))
|
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Clojure lexer.
|
// Clojure lexer.
|
||||||
var Clojure = internal.Register(MustNewLexer(
|
var Clojure = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Clojure",
|
Name: "Clojure",
|
||||||
Aliases: []string{"clojure", "clj"},
|
Aliases: []string{"clojure", "clj"},
|
||||||
Filenames: []string{"*.clj"},
|
Filenames: []string{"*.clj"},
|
||||||
MimeTypes: []string{"text/x-clojure", "application/x-clojure"},
|
MimeTypes: []string{"text/x-clojure", "application/x-clojure"},
|
||||||
},
|
},
|
||||||
Rules{
|
clojureRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func clojureRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`;.*$`, CommentSingle, nil},
|
{`;.*$`, CommentSingle, nil},
|
||||||
{`[,\s]+`, Text, nil},
|
{`[,\s]+`, Text, nil},
|
||||||
|
@ -34,5 +38,5 @@ var Clojure = internal.Register(MustNewLexer(
|
||||||
{`(\{|\})`, Punctuation, nil},
|
{`(\{|\})`, Punctuation, nil},
|
||||||
{`(\(|\))`, Punctuation, nil},
|
{`(\(|\))`, Punctuation, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cmake lexer.
|
// Cmake lexer.
|
||||||
var Cmake = internal.Register(MustNewLexer(
|
var Cmake = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "CMake",
|
Name: "CMake",
|
||||||
Aliases: []string{"cmake"},
|
Aliases: []string{"cmake"},
|
||||||
Filenames: []string{"*.cmake", "CMakeLists.txt"},
|
Filenames: []string{"*.cmake", "CMakeLists.txt"},
|
||||||
MimeTypes: []string{"text/x-cmake"},
|
MimeTypes: []string{"text/x-cmake"},
|
||||||
},
|
},
|
||||||
Rules{
|
cmakeRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cmakeRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")},
|
{`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")},
|
||||||
Include("keywords"),
|
Include("keywords"),
|
||||||
|
@ -40,5 +44,5 @@ var Cmake = internal.Register(MustNewLexer(
|
||||||
{`[ \t]+`, Text, nil},
|
{`[ \t]+`, Text, nil},
|
||||||
{`#.*\n`, Comment, nil},
|
{`#.*\n`, Comment, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cobol lexer.
|
// Cobol lexer.
|
||||||
var Cobol = internal.Register(MustNewLexer(
|
var Cobol = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "COBOL",
|
Name: "COBOL",
|
||||||
Aliases: []string{"cobol"},
|
Aliases: []string{"cobol"},
|
||||||
|
@ -14,7 +14,11 @@ var Cobol = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-cobol"},
|
MimeTypes: []string{"text/x-cobol"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
cobolRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cobolRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("comment"),
|
Include("comment"),
|
||||||
Include("strings"),
|
Include("strings"),
|
||||||
|
@ -47,5 +51,5 @@ var Cobol = internal.Register(MustNewLexer(
|
||||||
{`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil},
|
{`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil},
|
||||||
{`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil},
|
{`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Coffeescript lexer.
|
// Coffeescript lexer.
|
||||||
var Coffeescript = internal.Register(MustNewLexer(
|
var Coffeescript = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "CoffeeScript",
|
Name: "CoffeeScript",
|
||||||
Aliases: []string{"coffee-script", "coffeescript", "coffee"},
|
Aliases: []string{"coffee-script", "coffeescript", "coffee"},
|
||||||
|
@ -15,7 +15,11 @@ var Coffeescript = internal.Register(MustNewLexer(
|
||||||
NotMultiline: true,
|
NotMultiline: true,
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
coffeescriptRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func coffeescriptRules() Rules {
|
||||||
|
return Rules{
|
||||||
"commentsandwhitespace": {
|
"commentsandwhitespace": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`###[^#].*?###`, CommentMultiline, nil},
|
{`###[^#].*?###`, CommentMultiline, nil},
|
||||||
|
@ -87,5 +91,5 @@ var Coffeescript = internal.Register(MustNewLexer(
|
||||||
{`#|\\.|\'|"`, LiteralString, nil},
|
{`#|\\.|\'|"`, LiteralString, nil},
|
||||||
Include("strings"),
|
Include("strings"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cfstatement lexer.
|
// Cfstatement lexer.
|
||||||
var Cfstatement = internal.Register(MustNewLexer(
|
var Cfstatement = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "cfstatement",
|
Name: "cfstatement",
|
||||||
Aliases: []string{"cfs"},
|
Aliases: []string{"cfs"},
|
||||||
|
@ -15,7 +15,11 @@ var Cfstatement = internal.Register(MustNewLexer(
|
||||||
NotMultiline: true,
|
NotMultiline: true,
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
cfstatementRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cfstatementRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
{`/\*(?:.|\n)*?\*/`, CommentMultiline, nil},
|
{`/\*(?:.|\n)*?\*/`, CommentMultiline, nil},
|
||||||
|
@ -44,5 +48,5 @@ var Cfstatement = internal.Register(MustNewLexer(
|
||||||
{`#`, LiteralStringDouble, nil},
|
{`#`, LiteralStringDouble, nil},
|
||||||
{`"`, LiteralStringDouble, Pop(1)},
|
{`"`, LiteralStringDouble, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Coq lexer.
|
// Coq lexer.
|
||||||
var Coq = internal.Register(MustNewLexer(
|
var Coq = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Coq",
|
Name: "Coq",
|
||||||
Aliases: []string{"coq"},
|
Aliases: []string{"coq"},
|
||||||
Filenames: []string{"*.v"},
|
Filenames: []string{"*.v"},
|
||||||
MimeTypes: []string{"text/x-coq"},
|
MimeTypes: []string{"text/x-coq"},
|
||||||
},
|
},
|
||||||
Rules{
|
coqRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func coqRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil},
|
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil},
|
||||||
|
@ -59,5 +63,5 @@ var Coq = internal.Register(MustNewLexer(
|
||||||
{`[a-z][a-z0-9_\']*`, Name, Pop(1)},
|
{`[a-z][a-z0-9_\']*`, Name, Pop(1)},
|
||||||
Default(Pop(1)),
|
Default(Pop(1)),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// CPP lexer.
|
// CPP lexer.
|
||||||
var CPP = internal.Register(MustNewLexer(
|
var CPP = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "C++",
|
Name: "C++",
|
||||||
Aliases: []string{"cpp", "c++"},
|
Aliases: []string{"cpp", "c++"},
|
||||||
|
@ -14,7 +14,11 @@ var CPP = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"},
|
MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"},
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
cppRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cppRules() Rules {
|
||||||
|
return Rules{
|
||||||
"statements": {
|
"statements": {
|
||||||
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil},
|
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil},
|
||||||
{`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")},
|
{`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")},
|
||||||
|
@ -102,5 +106,5 @@ var CPP = internal.Register(MustNewLexer(
|
||||||
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
|
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
|
||||||
{`.*?\n`, Comment, nil},
|
{`.*?\n`, Comment, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// CassandraCQL lexer.
|
// CassandraCQL lexer.
|
||||||
var CassandraCQL = internal.Register(MustNewLexer(
|
var CassandraCQL = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Cassandra CQL",
|
Name: "Cassandra CQL",
|
||||||
Aliases: []string{"cassandra", "cql"},
|
Aliases: []string{"cassandra", "cql"},
|
||||||
|
@ -15,7 +15,11 @@ var CassandraCQL = internal.Register(MustNewLexer(
|
||||||
NotMultiline: true,
|
NotMultiline: true,
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
cassandraCQLRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cassandraCQLRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, TextWhitespace, nil},
|
{`\s+`, TextWhitespace, nil},
|
||||||
{`(--|\/\/).*\n?`, CommentSingle, nil},
|
{`(--|\/\/).*\n?`, CommentSingle, nil},
|
||||||
|
@ -23,7 +27,8 @@ var CassandraCQL = internal.Register(MustNewLexer(
|
||||||
{`(ascii|bigint|blob|boolean|counter|date|decimal|double|float|frozen|inet|int|list|map|set|smallint|text|time|timestamp|timeuuid|tinyint|tuple|uuid|varchar|varint)\b`, NameBuiltin, nil},
|
{`(ascii|bigint|blob|boolean|counter|date|decimal|double|float|frozen|inet|int|list|map|set|smallint|text|time|timestamp|timeuuid|tinyint|tuple|uuid|varchar|varint)\b`, NameBuiltin, nil},
|
||||||
{Words(``, `\b`, `ADD`, `AGGREGATE`, `ALL`, `ALLOW`, `ALTER`, `AND`, `ANY`, `APPLY`, `AS`, `ASC`, `AUTHORIZE`, `BATCH`, `BEGIN`, `BY`, `CLUSTERING`, `COLUMNFAMILY`, `COMPACT`, `CONSISTENCY`, `COUNT`, `CREATE`, `CUSTOM`, `DELETE`, `DESC`, `DISTINCT`, `DROP`, `EACH_QUORUM`, `ENTRIES`, `EXISTS`, `FILTERING`, `FROM`, `FULL`, `GRANT`, `IF`, `IN`, `INDEX`, `INFINITY`, `INSERT`, `INTO`, `KEY`, `KEYS`, `KEYSPACE`, `KEYSPACES`, `LEVEL`, `LIMIT`, `LOCAL_ONE`, `LOCAL_QUORUM`, `MATERIALIZED`, `MODIFY`, `NAN`, `NORECURSIVE`, `NOSUPERUSER`, `NOT`, `OF`, `ON`, `ONE`, `ORDER`, `PARTITION`, `PASSWORD`, `PER`, `PERMISSION`, `PERMISSIONS`, `PRIMARY`, `QUORUM`, `RENAME`, `REVOKE`, `SCHEMA`, `SELECT`, `STATIC`, `STORAGE`, `SUPERUSER`, `TABLE`, `THREE`, `TO`, `TOKEN`, `TRUNCATE`, `TTL`, `TWO`, `TYPE`, `UNLOGGED`, `UPDATE`, `USE`, `USER`, `USERS`, `USING`, `VALUES`, `VIEW`, `WHERE`, `WITH`, `WRITETIME`, `REPLICATION`, `OR`, `REPLACE`, `FUNCTION`, `CALLED`, `INPUT`, `RETURNS`, `LANGUAGE`, `ROLE`, `ROLES`, `TRIGGER`, `DURABLE_WRITES`, `LOGIN`, `OPTIONS`, `LOGGED`, `SFUNC`, `STYPE`, `FINALFUNC`, `INITCOND`, `IS`, `CONTAINS`, `JSON`, `PAGING`, `OFF`), Keyword, nil},
|
{Words(``, `\b`, `ADD`, `AGGREGATE`, `ALL`, `ALLOW`, `ALTER`, `AND`, `ANY`, `APPLY`, `AS`, `ASC`, `AUTHORIZE`, `BATCH`, `BEGIN`, `BY`, `CLUSTERING`, `COLUMNFAMILY`, `COMPACT`, `CONSISTENCY`, `COUNT`, `CREATE`, `CUSTOM`, `DELETE`, `DESC`, `DISTINCT`, `DROP`, `EACH_QUORUM`, `ENTRIES`, `EXISTS`, `FILTERING`, `FROM`, `FULL`, `GRANT`, `IF`, `IN`, `INDEX`, `INFINITY`, `INSERT`, `INTO`, `KEY`, `KEYS`, `KEYSPACE`, `KEYSPACES`, `LEVEL`, `LIMIT`, `LOCAL_ONE`, `LOCAL_QUORUM`, `MATERIALIZED`, `MODIFY`, `NAN`, `NORECURSIVE`, `NOSUPERUSER`, `NOT`, `OF`, `ON`, `ONE`, `ORDER`, `PARTITION`, `PASSWORD`, `PER`, `PERMISSION`, `PERMISSIONS`, `PRIMARY`, `QUORUM`, `RENAME`, `REVOKE`, `SCHEMA`, `SELECT`, `STATIC`, `STORAGE`, `SUPERUSER`, `TABLE`, `THREE`, `TO`, `TOKEN`, `TRUNCATE`, `TTL`, `TWO`, `TYPE`, `UNLOGGED`, `UPDATE`, `USE`, `USER`, `USERS`, `USING`, `VALUES`, `VIEW`, `WHERE`, `WITH`, `WRITETIME`, `REPLICATION`, `OR`, `REPLACE`, `FUNCTION`, `CALLED`, `INPUT`, `RETURNS`, `LANGUAGE`, `ROLE`, `ROLES`, `TRIGGER`, `DURABLE_WRITES`, `LOGIN`, `OPTIONS`, `LOGGED`, `SFUNC`, `STYPE`, `FINALFUNC`, `INITCOND`, `IS`, `CONTAINS`, `JSON`, `PAGING`, `OFF`), Keyword, nil},
|
||||||
{"[+*/<>=~!@#%^&|`?-]+", Operator, nil},
|
{"[+*/<>=~!@#%^&|`?-]+", Operator, nil},
|
||||||
{`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`,
|
{
|
||||||
|
`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`,
|
||||||
UsingByGroup(
|
UsingByGroup(
|
||||||
internal.Get,
|
internal.Get,
|
||||||
1, 6,
|
1, 6,
|
||||||
|
@ -65,5 +70,5 @@ var CassandraCQL = internal.Register(MustNewLexer(
|
||||||
{`[^\$]+`, LiteralStringHeredoc, nil},
|
{`[^\$]+`, LiteralStringHeredoc, nil},
|
||||||
{`\$\$`, LiteralStringHeredoc, Pop(1)},
|
{`\$\$`, LiteralStringHeredoc, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Crystal lexer.
|
// Crystal lexer.
|
||||||
var Crystal = internal.Register(MustNewLexer(
|
var Crystal = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Crystal",
|
Name: "Crystal",
|
||||||
Aliases: []string{"cr", "crystal"},
|
Aliases: []string{"cr", "crystal"},
|
||||||
|
@ -14,7 +14,11 @@ var Crystal = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-crystal"},
|
MimeTypes: []string{"text/x-crystal"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
crystalRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func crystalRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`#.*?$`, CommentSingle, nil},
|
{`#.*?$`, CommentSingle, nil},
|
||||||
{Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil},
|
{Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil},
|
||||||
|
@ -258,5 +262,5 @@ var Crystal = internal.Register(MustNewLexer(
|
||||||
{`[\\#<>]`, LiteralStringRegex, nil},
|
{`[\\#<>]`, LiteralStringRegex, nil},
|
||||||
{`[^\\#<>]+`, LiteralStringRegex, nil},
|
{`[^\\#<>]+`, LiteralStringRegex, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// CSharp lexer.
|
// CSharp lexer.
|
||||||
var CSharp = internal.Register(MustNewLexer(
|
var CSharp = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "C#",
|
Name: "C#",
|
||||||
Aliases: []string{"csharp", "c#"},
|
Aliases: []string{"csharp", "c#"},
|
||||||
|
@ -15,7 +15,11 @@ var CSharp = internal.Register(MustNewLexer(
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
cSharpRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cSharpRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`^\s*\[.*?\]`, NameAttribute, nil},
|
{`^\s*\[.*?\]`, NameAttribute, nil},
|
||||||
{`[^\S\n]+`, Text, nil},
|
{`[^\S\n]+`, Text, nil},
|
||||||
|
@ -29,7 +33,7 @@ var CSharp = internal.Register(MustNewLexer(
|
||||||
{`\$@?"(""|[^"])*"`, LiteralString, nil},
|
{`\$@?"(""|[^"])*"`, LiteralString, nil},
|
||||||
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil},
|
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil},
|
||||||
{`'\\.'|'[^\\]'`, LiteralStringChar, nil},
|
{`'\\.'|'[^\\]'`, LiteralStringChar, nil},
|
||||||
{`[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil},
|
{`0[xX][0-9a-fA-F]+[Ll]?|[0-9_](\.[0-9]*)?([eE][+-]?[0-9]+)?[flFLdD]?`, LiteralNumber, nil},
|
||||||
{`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
|
{`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
|
||||||
{`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil},
|
{`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil},
|
||||||
{`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|interface|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil},
|
{`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|interface|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil},
|
||||||
|
@ -47,5 +51,5 @@ var CSharp = internal.Register(MustNewLexer(
|
||||||
{`(?=\()`, Text, Pop(1)},
|
{`(?=\()`, Text, Pop(1)},
|
||||||
{`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)},
|
{`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// CSS lexer.
|
// CSS lexer.
|
||||||
var CSS = internal.Register(MustNewLexer(
|
var CSS = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "CSS",
|
Name: "CSS",
|
||||||
Aliases: []string{"css"},
|
Aliases: []string{"css"},
|
||||||
Filenames: []string{"*.css"},
|
Filenames: []string{"*.css"},
|
||||||
MimeTypes: []string{"text/css"},
|
MimeTypes: []string{"text/css"},
|
||||||
},
|
},
|
||||||
Rules{
|
cssRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cssRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("basics"),
|
Include("basics"),
|
||||||
},
|
},
|
||||||
|
@ -39,6 +43,18 @@ var CSS = internal.Register(MustNewLexer(
|
||||||
Include("basics"),
|
Include("basics"),
|
||||||
{`\}`, Punctuation, Pop(2)},
|
{`\}`, Punctuation, Pop(2)},
|
||||||
},
|
},
|
||||||
|
"atparenthesis": {
|
||||||
|
Include("common-values"),
|
||||||
|
{`/\*(?:.|\n)*?\*/`, Comment, nil},
|
||||||
|
Include("numeric-values"),
|
||||||
|
{`[*+/-]`, Operator, nil},
|
||||||
|
{`[,]`, Punctuation, nil},
|
||||||
|
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||||
|
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||||
|
{`[a-zA-Z_-]\w*`, Name, nil},
|
||||||
|
{`\(`, Punctuation, Push("atparenthesis")},
|
||||||
|
{`\)`, Punctuation, Pop(1)},
|
||||||
|
},
|
||||||
"content": {
|
"content": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`\}`, Punctuation, Pop(1)},
|
{`\}`, Punctuation, Pop(1)},
|
||||||
|
@ -73,6 +89,7 @@ var CSS = internal.Register(MustNewLexer(
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||||
{`[a-zA-Z_-]\w*`, Name, nil},
|
{`[a-zA-Z_-]\w*`, Name, nil},
|
||||||
|
{`\(`, Punctuation, Push("atparenthesis")},
|
||||||
{`\)`, Punctuation, Pop(1)},
|
{`\)`, Punctuation, Pop(1)},
|
||||||
},
|
},
|
||||||
"common-values": {
|
"common-values": {
|
||||||
|
@ -100,5 +117,5 @@ var CSS = internal.Register(MustNewLexer(
|
||||||
{`%`, KeywordType, nil},
|
{`%`, KeywordType, nil},
|
||||||
Default(Pop(1)),
|
Default(Pop(1)),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cython lexer.
|
// Cython lexer.
|
||||||
var Cython = internal.Register(MustNewLexer(
|
var Cython = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Cython",
|
Name: "Cython",
|
||||||
Aliases: []string{"cython", "pyx", "pyrex"},
|
Aliases: []string{"cython", "pyx", "pyrex"},
|
||||||
Filenames: []string{"*.pyx", "*.pxd", "*.pxi"},
|
Filenames: []string{"*.pyx", "*.pxd", "*.pxi"},
|
||||||
MimeTypes: []string{"text/x-cython", "application/x-cython"},
|
MimeTypes: []string{"text/x-cython", "application/x-cython"},
|
||||||
},
|
},
|
||||||
Rules{
|
cythonRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func cythonRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\n`, Text, nil},
|
{`\n`, Text, nil},
|
||||||
{`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil},
|
{`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil},
|
||||||
|
@ -131,5 +135,5 @@ var Cython = internal.Register(MustNewLexer(
|
||||||
Include("strings"),
|
Include("strings"),
|
||||||
Include("nl"),
|
Include("nl"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// PHP lexer for pure PHP code (not embedded in HTML).
|
// PHP lexer for pure PHP code (not embedded in HTML).
|
||||||
var PHP = internal.Register(MustNewLexer(
|
var PHP = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "PHP",
|
Name: "PHP",
|
||||||
Aliases: []string{"php", "php3", "php4", "php5"},
|
Aliases: []string{"php", "php3", "php4", "php5"},
|
||||||
|
@ -16,65 +16,71 @@ var PHP = internal.Register(MustNewLexer(
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
phpCommonRules.Rename("php", "root"),
|
phpRules,
|
||||||
))
|
))
|
||||||
|
|
||||||
var phpCommonRules = Rules{
|
func phpRules() Rules {
|
||||||
"php": {
|
return phpCommonRules().Rename("php", "root")
|
||||||
{`\?>`, CommentPreproc, Pop(1)},
|
}
|
||||||
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
|
|
||||||
{`\s+`, Text, nil},
|
func phpCommonRules() Rules {
|
||||||
{`#.*?\n`, CommentSingle, nil},
|
return Rules{
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
"php": {
|
||||||
{`/\*\*/`, CommentMultiline, nil},
|
{`\?>`, CommentPreproc, Pop(1)},
|
||||||
{`/\*\*.*?\*/`, LiteralStringDoc, nil},
|
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
|
||||||
{`/\*.*?\*/`, CommentMultiline, nil},
|
{`\s+`, Text, nil},
|
||||||
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
|
{`#.*?\n`, CommentSingle, nil},
|
||||||
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
{`\?`, Operator, nil},
|
{`/\*\*/`, CommentMultiline, nil},
|
||||||
{`[\[\]{}();,]+`, Punctuation, nil},
|
{`/\*\*.*?\*/`, LiteralStringDoc, nil},
|
||||||
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
|
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||||
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
|
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
|
||||||
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
|
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
|
||||||
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
|
{`\?`, Operator, nil},
|
||||||
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
|
{`[\[\]{}();,]+`, Punctuation, nil},
|
||||||
{`(true|false|null)\b`, KeywordConstant, nil},
|
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
|
||||||
Include("magicconstants"),
|
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
|
||||||
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
|
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
|
||||||
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
|
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
|
||||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
|
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
|
||||||
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
|
{`(true|false|null)\b`, KeywordConstant, nil},
|
||||||
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
|
Include("magicconstants"),
|
||||||
{`0[0-7]+`, LiteralNumberOct, nil},
|
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
|
||||||
{`0x[a-f0-9]+`, LiteralNumberHex, nil},
|
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
|
||||||
{`\d+`, LiteralNumberInteger, nil},
|
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
|
||||||
{`0b[01]+`, LiteralNumberBin, nil},
|
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
|
||||||
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
|
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
|
||||||
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
|
{`0[0-7]+`, LiteralNumberOct, nil},
|
||||||
{`"`, LiteralStringDouble, Push("string")},
|
{`0x[a-f0-9_]+`, LiteralNumberHex, nil},
|
||||||
},
|
{`[\d_]+`, LiteralNumberInteger, nil},
|
||||||
"magicfuncs": {
|
{`0b[01]+`, LiteralNumberBin, nil},
|
||||||
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
|
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
|
||||||
},
|
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
|
||||||
"magicconstants": {
|
{`"`, LiteralStringDouble, Push("string")},
|
||||||
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
|
},
|
||||||
},
|
"magicfuncs": {
|
||||||
"classname": {
|
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
|
||||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
|
},
|
||||||
},
|
"magicconstants": {
|
||||||
"functionname": {
|
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
|
||||||
Include("magicfuncs"),
|
},
|
||||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
|
"classname": {
|
||||||
Default(Pop(1)),
|
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
|
||||||
},
|
},
|
||||||
"string": {
|
"functionname": {
|
||||||
{`"`, LiteralStringDouble, Pop(1)},
|
Include("magicfuncs"),
|
||||||
{`[^{$"\\]+`, LiteralStringDouble, nil},
|
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
|
||||||
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
|
Default(Pop(1)),
|
||||||
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
|
},
|
||||||
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
"string": {
|
||||||
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
{`"`, LiteralStringDouble, Pop(1)},
|
||||||
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
|
{`[^{$"\\]+`, LiteralStringDouble, nil},
|
||||||
{`[${\\]`, LiteralStringDouble, nil},
|
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
|
||||||
},
|
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
|
||||||
|
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
||||||
|
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
||||||
|
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
|
||||||
|
{`[${\\]`, LiteralStringDouble, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// PHTML lexer is PHP in HTML.
|
// PHTML lexer is PHP in HTML.
|
||||||
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "PHTML",
|
Name: "PHTML",
|
||||||
Aliases: []string{"phtml"},
|
Aliases: []string{"phtml"},
|
||||||
|
@ -19,16 +19,20 @@ var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
phtmlRules,
|
||||||
"root": {
|
|
||||||
{`<\?(php)?`, CommentPreproc, Push("php")},
|
|
||||||
{`[^<]+`, Other, nil},
|
|
||||||
{`<`, Other, nil},
|
|
||||||
},
|
|
||||||
}.Merge(phpCommonRules),
|
|
||||||
).SetAnalyser(func(text string) float32 {
|
).SetAnalyser(func(text string) float32 {
|
||||||
if strings.Contains(text, "<?php") {
|
if strings.Contains(text, "<?php") {
|
||||||
return 0.5
|
return 0.5
|
||||||
}
|
}
|
||||||
return 0.0
|
return 0.0
|
||||||
})))
|
})))
|
||||||
|
|
||||||
|
func phtmlRules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"root": {
|
||||||
|
{`<\?(php)?`, CommentPreproc, Push("php")},
|
||||||
|
{`[^<]+`, Other, nil},
|
||||||
|
{`<`, Other, nil},
|
||||||
|
},
|
||||||
|
}.Merge(phpCommonRules())
|
||||||
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// D lexer. https://dlang.org/spec/lex.html
|
// D lexer. https://dlang.org/spec/lex.html
|
||||||
var D = internal.Register(MustNewLexer(
|
var D = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "D",
|
Name: "D",
|
||||||
Aliases: []string{"d"},
|
Aliases: []string{"d"},
|
||||||
|
@ -14,7 +14,11 @@ var D = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-d"},
|
MimeTypes: []string{"text/x-d"},
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
dRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func dRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^\S\n]+`, Text, nil},
|
{`[^\S\n]+`, Text, nil},
|
||||||
|
|
||||||
|
@ -65,5 +69,5 @@ var D = internal.Register(MustNewLexer(
|
||||||
"import": {
|
"import": {
|
||||||
{`[\w.]+\*?`, NameNamespace, Pop(1)},
|
{`[\w.]+\*?`, NameNamespace, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Dart lexer.
|
// Dart lexer.
|
||||||
var Dart = internal.Register(MustNewLexer(
|
var Dart = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Dart",
|
Name: "Dart",
|
||||||
Aliases: []string{"dart"},
|
Aliases: []string{"dart"},
|
||||||
|
@ -14,7 +14,11 @@ var Dart = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-dart"},
|
MimeTypes: []string{"text/x-dart"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
dartRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func dartRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("string_literal"),
|
Include("string_literal"),
|
||||||
{`#!(.*?)$`, CommentPreproc, nil},
|
{`#!(.*?)$`, CommentPreproc, nil},
|
||||||
|
@ -87,5 +91,5 @@ var Dart = internal.Register(MustNewLexer(
|
||||||
Include("string_common"),
|
Include("string_common"),
|
||||||
{`(\$|\')+`, LiteralStringSingle, nil},
|
{`(\$|\')+`, LiteralStringSingle, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Diff lexer.
|
// Diff lexer.
|
||||||
var Diff = internal.Register(MustNewLexer(
|
var Diff = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Diff",
|
Name: "Diff",
|
||||||
Aliases: []string{"diff", "udiff"},
|
Aliases: []string{"diff", "udiff"},
|
||||||
|
@ -14,7 +14,11 @@ var Diff = internal.Register(MustNewLexer(
|
||||||
Filenames: []string{"*.diff", "*.patch"},
|
Filenames: []string{"*.diff", "*.patch"},
|
||||||
MimeTypes: []string{"text/x-diff", "text/x-patch"},
|
MimeTypes: []string{"text/x-diff", "text/x-patch"},
|
||||||
},
|
},
|
||||||
Rules{
|
diffRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func diffRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{` .*\n`, Text, nil},
|
{` .*\n`, Text, nil},
|
||||||
{`\+.*\n`, GenericInserted, nil},
|
{`\+.*\n`, GenericInserted, nil},
|
||||||
|
@ -25,5 +29,5 @@ var Diff = internal.Register(MustNewLexer(
|
||||||
{`=.*\n`, GenericHeading, nil},
|
{`=.*\n`, GenericHeading, nil},
|
||||||
{`.*\n`, Text, nil},
|
{`.*\n`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Django/Jinja lexer.
|
// Django/Jinja lexer.
|
||||||
var DjangoJinja = internal.Register(MustNewLexer(
|
var DjangoJinja = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Django/Jinja",
|
Name: "Django/Jinja",
|
||||||
Aliases: []string{"django", "jinja"},
|
Aliases: []string{"django", "jinja"},
|
||||||
|
@ -14,7 +14,11 @@ var DjangoJinja = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"application/x-django-templating", "application/x-jinja"},
|
MimeTypes: []string{"application/x-django-templating", "application/x-jinja"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
djangoJinjaRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func djangoJinjaRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^{]+`, Other, nil},
|
{`[^{]+`, Other, nil},
|
||||||
{`\{\{`, CommentPreproc, Push("var")},
|
{`\{\{`, CommentPreproc, Push("var")},
|
||||||
|
@ -49,5 +53,5 @@ var DjangoJinja = internal.Register(MustNewLexer(
|
||||||
Include("varnames"),
|
Include("varnames"),
|
||||||
{`.`, Punctuation, nil},
|
{`.`, Punctuation, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Docker lexer.
|
// Docker lexer.
|
||||||
var Docker = internal.Register(MustNewLexer(
|
var Docker = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Docker",
|
Name: "Docker",
|
||||||
Aliases: []string{"docker", "dockerfile"},
|
Aliases: []string{"docker", "dockerfile"},
|
||||||
|
@ -16,7 +16,11 @@ var Docker = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-dockerfile-config"},
|
MimeTypes: []string{"text/x-dockerfile-config"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
dockerRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func dockerRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`#.*`, Comment, nil},
|
{`#.*`, Comment, nil},
|
||||||
{`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using(b.Bash)), nil},
|
{`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using(b.Bash)), nil},
|
||||||
|
@ -27,5 +31,5 @@ var Docker = internal.Register(MustNewLexer(
|
||||||
{`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil},
|
{`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil},
|
||||||
{`(.*\\\n)*.+`, Using(b.Bash), nil},
|
{`(.*\\\n)*.+`, Using(b.Bash), nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Dtd lexer.
|
// Dtd lexer.
|
||||||
var Dtd = internal.Register(MustNewLexer(
|
var Dtd = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "DTD",
|
Name: "DTD",
|
||||||
Aliases: []string{"dtd"},
|
Aliases: []string{"dtd"},
|
||||||
|
@ -14,7 +14,11 @@ var Dtd = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"application/xml-dtd"},
|
MimeTypes: []string{"application/xml-dtd"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
dtdRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func dtdRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("common"),
|
Include("common"),
|
||||||
{`(<!ELEMENT)(\s+)(\S+)`, ByGroups(Keyword, Text, NameTag), Push("element")},
|
{`(<!ELEMENT)(\s+)(\S+)`, ByGroups(Keyword, Text, NameTag), Push("element")},
|
||||||
|
@ -65,5 +69,5 @@ var Dtd = internal.Register(MustNewLexer(
|
||||||
{`[^>\s|()?+*,]+`, NameAttribute, nil},
|
{`[^>\s|()?+*,]+`, NameAttribute, nil},
|
||||||
{`>`, Keyword, Pop(1)},
|
{`>`, Keyword, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
package d
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Dylan lexer.
|
||||||
|
var Dylan = internal.Register(MustNewLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "Dylan",
|
||||||
|
Aliases: []string{"dylan"},
|
||||||
|
Filenames: []string{"*.dylan", "*.dyl", "*.intr"},
|
||||||
|
MimeTypes: []string{"text/x-dylan"},
|
||||||
|
CaseInsensitive: true,
|
||||||
|
},
|
||||||
|
Rules{
|
||||||
|
"root": {
|
||||||
|
{`\s+`, Whitespace, nil},
|
||||||
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
{`([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Whitespace, LiteralString), nil},
|
||||||
|
Default(Push("code")),
|
||||||
|
},
|
||||||
|
"code": {
|
||||||
|
{`\s+`, Whitespace, nil},
|
||||||
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
{`/\*`, CommentMultiline, Push("comment")},
|
||||||
|
{`"`, LiteralString, Push("string")},
|
||||||
|
{`'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil},
|
||||||
|
{`#b[01]+`, LiteralNumberBin, nil},
|
||||||
|
{`#o[0-7]+`, LiteralNumberOct, nil},
|
||||||
|
{`[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)`, LiteralNumberFloat, nil},
|
||||||
|
{`[-+]?\d+`, LiteralNumberInteger, nil},
|
||||||
|
{`#x[0-9a-f]+`, LiteralNumberHex, nil},
|
||||||
|
|
||||||
|
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)`,
|
||||||
|
ByGroups(Operator, NameVariable, Operator, NameBuiltin), nil},
|
||||||
|
{`(\?)(:)(token|name|variable|expression|body|case-body|\*)`,
|
||||||
|
ByGroups(Operator, Operator, NameVariable), nil},
|
||||||
|
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, NameVariable), nil},
|
||||||
|
|
||||||
|
{`(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])`, Punctuation, nil},
|
||||||
|
{`:=`, Operator, nil},
|
||||||
|
{`#[tf]`, Literal, nil},
|
||||||
|
{`#"`, LiteralStringSymbol, Push("symbol")},
|
||||||
|
{`#[a-z0-9-]+`, Keyword, nil},
|
||||||
|
{`#(all-keys|include|key|next|rest)`, Keyword, nil},
|
||||||
|
{`[\w!&*<>|^$%@+~?/=-]+:`, KeywordConstant, nil},
|
||||||
|
{`<[\w!&*<>|^$%@+~?/=-]+>`, NameClass, nil},
|
||||||
|
{`\*[\w!&*<>|^$%@+~?/=-]+\*`, NameVariableGlobal, nil},
|
||||||
|
{`\$[\w!&*<>|^$%@+~?/=-]+`, NameConstant, nil},
|
||||||
|
{`(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(NameBuiltin, Whitespace, NameVariable), nil},
|
||||||
|
{`(error|signal|return|break)`, NameException, nil},
|
||||||
|
{`(\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, Name), nil},
|
||||||
|
},
|
||||||
|
"comment": {
|
||||||
|
{`[^*/]`, CommentMultiline, nil},
|
||||||
|
{`/\*`, CommentMultiline, Push()},
|
||||||
|
{`\*/`, CommentMultiline, Pop(1)},
|
||||||
|
{`[*/]`, CommentMultiline, nil},
|
||||||
|
},
|
||||||
|
"symbol": {
|
||||||
|
{`"`, LiteralStringSymbol, Pop(1)},
|
||||||
|
{`[^\\"]+`, LiteralStringSymbol, nil},
|
||||||
|
},
|
||||||
|
"string": {
|
||||||
|
{`"`, LiteralString, Pop(1)},
|
||||||
|
{`\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil},
|
||||||
|
{`[^\\"\n]+`, LiteralString, nil},
|
||||||
|
{`\\\n`, LiteralString, nil},
|
||||||
|
{`\\`, LiteralString, nil},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
))
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Ebnf lexer.
|
// Ebnf lexer.
|
||||||
var Ebnf = internal.Register(MustNewLexer(
|
var Ebnf = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "EBNF",
|
Name: "EBNF",
|
||||||
Aliases: []string{"ebnf"},
|
Aliases: []string{"ebnf"},
|
||||||
Filenames: []string{"*.ebnf"},
|
Filenames: []string{"*.ebnf"},
|
||||||
MimeTypes: []string{"text/x-ebnf"},
|
MimeTypes: []string{"text/x-ebnf"},
|
||||||
},
|
},
|
||||||
Rules{
|
ebnfRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func ebnfRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("whitespace"),
|
Include("whitespace"),
|
||||||
Include("comment_start"),
|
Include("comment_start"),
|
||||||
|
@ -47,5 +51,5 @@ var Ebnf = internal.Register(MustNewLexer(
|
||||||
"identifier": {
|
"identifier": {
|
||||||
{`([a-zA-Z][\w \-]*)`, Keyword, nil},
|
{`([a-zA-Z][\w \-]*)`, Keyword, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Elixir lexer.
|
// Elixir lexer.
|
||||||
var Elixir = internal.Register(MustNewLexer(
|
var Elixir = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Elixir",
|
Name: "Elixir",
|
||||||
Aliases: []string{"elixir", "ex", "exs"},
|
Aliases: []string{"elixir", "ex", "exs"},
|
||||||
Filenames: []string{"*.ex", "*.exs"},
|
Filenames: []string{"*.ex", "*.exs"},
|
||||||
MimeTypes: []string{"text/x-elixir"},
|
MimeTypes: []string{"text/x-elixir"},
|
||||||
},
|
},
|
||||||
Rules{
|
elixirRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func elixirRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`#.*$`, CommentSingle, nil},
|
{`#.*$`, CommentSingle, nil},
|
||||||
|
@ -273,5 +277,5 @@ var Elixir = internal.Register(MustNewLexer(
|
||||||
{`\\.`, LiteralStringOther, nil},
|
{`\\.`, LiteralStringOther, nil},
|
||||||
{`'[a-zA-Z]*`, LiteralStringOther, Pop(1)},
|
{`'[a-zA-Z]*`, LiteralStringOther, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Elm lexer.
|
// Elm lexer.
|
||||||
var Elm = internal.Register(MustNewLexer(
|
var Elm = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Elm",
|
Name: "Elm",
|
||||||
Aliases: []string{"elm"},
|
Aliases: []string{"elm"},
|
||||||
Filenames: []string{"*.elm"},
|
Filenames: []string{"*.elm"},
|
||||||
MimeTypes: []string{"text/x-elm"},
|
MimeTypes: []string{"text/x-elm"},
|
||||||
},
|
},
|
||||||
Rules{
|
elmRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func elmRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\{-`, CommentMultiline, Push("comment")},
|
{`\{-`, CommentMultiline, Push("comment")},
|
||||||
{`--.*`, CommentSingle, nil},
|
{`--.*`, CommentSingle, nil},
|
||||||
|
@ -55,5 +59,5 @@ var Elm = internal.Register(MustNewLexer(
|
||||||
{`\|\]`, NameEntity, Pop(1)},
|
{`\|\]`, NameEntity, Pop(1)},
|
||||||
{`.*\n`, NameEntity, nil},
|
{`.*\n`, NameEntity, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -522,14 +522,24 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
// EmacsLisp lexer.
|
// EmacsLisp lexer.
|
||||||
var EmacsLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
|
var EmacsLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "EmacsLisp",
|
Name: "EmacsLisp",
|
||||||
Aliases: []string{"emacs", "elisp", "emacs-lisp"},
|
Aliases: []string{"emacs", "elisp", "emacs-lisp"},
|
||||||
Filenames: []string{"*.el"},
|
Filenames: []string{"*.el"},
|
||||||
MimeTypes: []string{"text/x-elisp", "application/x-elisp"},
|
MimeTypes: []string{"text/x-elisp", "application/x-elisp"},
|
||||||
},
|
},
|
||||||
Rules{
|
emacsLispRules,
|
||||||
|
), TypeMapping{
|
||||||
|
{NameVariable, NameFunction, emacsBuiltinFunction},
|
||||||
|
{NameVariable, NameBuiltin, emacsSpecialForms},
|
||||||
|
{NameVariable, NameException, emacsErrorKeywords},
|
||||||
|
{NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)},
|
||||||
|
{NameVariable, KeywordPseudo, emacsLambdaListKeywords},
|
||||||
|
}))
|
||||||
|
|
||||||
|
func emacsLispRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Default(Push("body")),
|
Default(Push("body")),
|
||||||
},
|
},
|
||||||
|
@ -572,11 +582,5 @@ var EmacsLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
|
||||||
{`\\\n`, LiteralString, nil},
|
{`\\\n`, LiteralString, nil},
|
||||||
{`"`, LiteralString, Pop(1)},
|
{`"`, LiteralString, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
), TypeMapping{
|
}
|
||||||
{NameVariable, NameFunction, emacsBuiltinFunction},
|
|
||||||
{NameVariable, NameBuiltin, emacsSpecialForms},
|
|
||||||
{NameVariable, NameException, emacsErrorKeywords},
|
|
||||||
{NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)},
|
|
||||||
{NameVariable, KeywordPseudo, emacsLambdaListKeywords},
|
|
||||||
}))
|
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Erlang lexer.
|
// Erlang lexer.
|
||||||
var Erlang = internal.Register(MustNewLexer(
|
var Erlang = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Erlang",
|
Name: "Erlang",
|
||||||
Aliases: []string{"erlang"},
|
Aliases: []string{"erlang"},
|
||||||
Filenames: []string{"*.erl", "*.hrl", "*.es", "*.escript"},
|
Filenames: []string{"*.erl", "*.hrl", "*.es", "*.escript"},
|
||||||
MimeTypes: []string{"text/x-erlang"},
|
MimeTypes: []string{"text/x-erlang"},
|
||||||
},
|
},
|
||||||
Rules{
|
erlangRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func erlangRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`%.*\n`, Comment, nil},
|
{`%.*\n`, Comment, nil},
|
||||||
|
@ -62,5 +66,5 @@ var Erlang = internal.Register(MustNewLexer(
|
||||||
{`,`, Punctuation, Pop(1)},
|
{`,`, Punctuation, Pop(1)},
|
||||||
{`(?=\})`, Punctuation, Pop(1)},
|
{`(?=\})`, Punctuation, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Factor lexer.
|
// Factor lexer.
|
||||||
var Factor = internal.Register(MustNewLexer(
|
var Factor = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Factor",
|
Name: "Factor",
|
||||||
Aliases: []string{"factor"},
|
Aliases: []string{"factor"},
|
||||||
Filenames: []string{"*.factor"},
|
Filenames: []string{"*.factor"},
|
||||||
MimeTypes: []string{"text/x-factor"},
|
MimeTypes: []string{"text/x-factor"},
|
||||||
},
|
},
|
||||||
Rules{
|
factorRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func factorRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`#!.*$`, CommentPreproc, nil},
|
{`#!.*$`, CommentPreproc, nil},
|
||||||
Default(Push("base")),
|
Default(Push("base")),
|
||||||
|
@ -111,5 +115,5 @@ var Factor = internal.Register(MustNewLexer(
|
||||||
{`;\s`, Keyword, Pop(1)},
|
{`;\s`, Keyword, Pop(1)},
|
||||||
{`\S+`, NameFunction, nil},
|
{`\S+`, NameFunction, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Fish lexer.
|
// Fish lexer.
|
||||||
var Fish = internal.Register(MustNewLexer(
|
var Fish = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Fish",
|
Name: "Fish",
|
||||||
Aliases: []string{"fish", "fishshell"},
|
Aliases: []string{"fish", "fishshell"},
|
||||||
Filenames: []string{"*.fish", "*.load"},
|
Filenames: []string{"*.fish", "*.load"},
|
||||||
MimeTypes: []string{"application/x-fish"},
|
MimeTypes: []string{"application/x-fish"},
|
||||||
},
|
},
|
||||||
Rules{
|
fishRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func fishRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("basic"),
|
Include("basic"),
|
||||||
Include("data"),
|
Include("data"),
|
||||||
|
@ -61,5 +65,5 @@ var Fish = internal.Register(MustNewLexer(
|
||||||
{`\d+`, LiteralNumber, nil},
|
{`\d+`, LiteralNumber, nil},
|
||||||
Include("root"),
|
Include("root"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Forth lexer.
|
// Forth lexer.
|
||||||
var Forth = internal.Register(MustNewLexer(
|
var Forth = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Forth",
|
Name: "Forth",
|
||||||
Aliases: []string{"forth"},
|
Aliases: []string{"forth"},
|
||||||
|
@ -14,7 +14,11 @@ var Forth = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"application/x-forth"},
|
MimeTypes: []string{"application/x-forth"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
forthRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func forthRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`\\.*?\n`, CommentSingle, nil},
|
{`\\.*?\n`, CommentSingle, nil},
|
||||||
|
@ -36,5 +40,5 @@ var Forth = internal.Register(MustNewLexer(
|
||||||
"stringdef": {
|
"stringdef": {
|
||||||
{`[^"]+`, LiteralString, Pop(1)},
|
{`[^"]+`, LiteralString, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Fortran lexer.
|
// Fortran lexer.
|
||||||
var Fortran = internal.Register(MustNewLexer(
|
var Fortran = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Fortran",
|
Name: "Fortran",
|
||||||
Aliases: []string{"fortran"},
|
Aliases: []string{"fortran"},
|
||||||
|
@ -14,7 +14,11 @@ var Fortran = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-fortran"},
|
MimeTypes: []string{"text/x-fortran"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
fortranRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func fortranRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`^#.*\n`, CommentPreproc, nil},
|
{`^#.*\n`, CommentPreproc, nil},
|
||||||
{`!.*\n`, Comment, nil},
|
{`!.*\n`, Comment, nil},
|
||||||
|
@ -43,5 +47,5 @@ var Fortran = internal.Register(MustNewLexer(
|
||||||
{`[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil},
|
{`[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil},
|
||||||
{`[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil},
|
{`[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Fsharp lexer.
|
// Fsharp lexer.
|
||||||
var Fsharp = internal.Register(MustNewLexer(
|
var Fsharp = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "FSharp",
|
Name: "FSharp",
|
||||||
Aliases: []string{"fsharp"},
|
Aliases: []string{"fsharp"},
|
||||||
Filenames: []string{"*.fs", "*.fsi"},
|
Filenames: []string{"*.fs", "*.fsi"},
|
||||||
MimeTypes: []string{"text/x-fsharp"},
|
MimeTypes: []string{"text/x-fsharp"},
|
||||||
},
|
},
|
||||||
Rules{
|
fsharpRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func fsharpRules() Rules {
|
||||||
|
return Rules{
|
||||||
"escape-sequence": {
|
"escape-sequence": {
|
||||||
{`\\[\\"\'ntbrafv]`, LiteralStringEscape, nil},
|
{`\\[\\"\'ntbrafv]`, LiteralStringEscape, nil},
|
||||||
{`\\[0-9]{3}`, LiteralStringEscape, nil},
|
{`\\[0-9]{3}`, LiteralStringEscape, nil},
|
||||||
|
@ -90,5 +94,5 @@ var Fsharp = internal.Register(MustNewLexer(
|
||||||
{`"""B?`, LiteralString, Pop(1)},
|
{`"""B?`, LiteralString, Pop(1)},
|
||||||
{`"`, LiteralString, nil},
|
{`"`, LiteralString, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Gas lexer.
|
// Gas lexer.
|
||||||
var Gas = internal.Register(MustNewLexer(
|
var Gas = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "GAS",
|
Name: "GAS",
|
||||||
Aliases: []string{"gas", "asm"},
|
Aliases: []string{"gas", "asm"},
|
||||||
Filenames: []string{"*.s", "*.S"},
|
Filenames: []string{"*.s", "*.S"},
|
||||||
MimeTypes: []string{"text/x-gas"},
|
MimeTypes: []string{"text/x-gas"},
|
||||||
},
|
},
|
||||||
Rules{
|
gasRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func gasRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("whitespace"),
|
Include("whitespace"),
|
||||||
{`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+):`, NameLabel, nil},
|
{`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+):`, NameLabel, nil},
|
||||||
|
@ -51,5 +55,5 @@ var Gas = internal.Register(MustNewLexer(
|
||||||
"punctuation": {
|
"punctuation": {
|
||||||
{`[-*,.()\[\]!:]+`, Punctuation, nil},
|
{`[-*,.()\[\]!:]+`, Punctuation, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// GDScript lexer.
|
// GDScript lexer.
|
||||||
var GDScript = internal.Register(MustNewLexer(
|
var GDScript = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "GDScript",
|
Name: "GDScript",
|
||||||
Aliases: []string{"gdscript", "gd"},
|
Aliases: []string{"gdscript", "gd"},
|
||||||
Filenames: []string{"*.gd"},
|
Filenames: []string{"*.gd"},
|
||||||
MimeTypes: []string{"text/x-gdscript", "application/x-gdscript"},
|
MimeTypes: []string{"text/x-gdscript", "application/x-gdscript"},
|
||||||
},
|
},
|
||||||
Rules{
|
gdscriptRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func gdscriptRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\n`, Text, nil},
|
{`\n`, Text, nil},
|
||||||
{`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil},
|
{`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil},
|
||||||
|
@ -120,5 +124,5 @@ var GDScript = internal.Register(MustNewLexer(
|
||||||
Include("strings-single"),
|
Include("strings-single"),
|
||||||
{`\n`, LiteralStringSingle, nil},
|
{`\n`, LiteralStringSingle, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -7,14 +7,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Genshi Text lexer.
|
// Genshi Text lexer.
|
||||||
var GenshiText = internal.Register(MustNewLexer(
|
var GenshiText = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Genshi Text",
|
Name: "Genshi Text",
|
||||||
Aliases: []string{"genshitext"},
|
Aliases: []string{"genshitext"},
|
||||||
Filenames: []string{},
|
Filenames: []string{},
|
||||||
MimeTypes: []string{"application/x-genshi-text", "text/x-genshi"},
|
MimeTypes: []string{"application/x-genshi-text", "text/x-genshi"},
|
||||||
},
|
},
|
||||||
Rules{
|
genshiTextRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func genshiTextRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^#$\s]+`, Other, nil},
|
{`[^#$\s]+`, Other, nil},
|
||||||
{`^(\s*)(##.*)$`, ByGroups(Text, Comment), nil},
|
{`^(\s*)(##.*)$`, ByGroups(Text, Comment), nil},
|
||||||
|
@ -33,11 +37,11 @@ var GenshiText = internal.Register(MustNewLexer(
|
||||||
{`(?<!\$)(\$\{)(.+?)(\})`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
|
{`(?<!\$)(\$\{)(.+?)(\})`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
|
||||||
{`(?<!\$)(\$)([a-zA-Z_][\w.]*)`, NameVariable, nil},
|
{`(?<!\$)(\$)([a-zA-Z_][\w.]*)`, NameVariable, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
||||||
// Html+Genshi lexer.
|
// Html+Genshi lexer.
|
||||||
var GenshiHTMLTemplate = internal.Register(MustNewLexer(
|
var GenshiHTMLTemplate = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Genshi HTML",
|
Name: "Genshi HTML",
|
||||||
Aliases: []string{"html+genshi", "html+kid"},
|
Aliases: []string{"html+genshi", "html+kid"},
|
||||||
|
@ -50,7 +54,7 @@ var GenshiHTMLTemplate = internal.Register(MustNewLexer(
|
||||||
))
|
))
|
||||||
|
|
||||||
// Genshi lexer.
|
// Genshi lexer.
|
||||||
var Genshi = internal.Register(MustNewLexer(
|
var Genshi = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Genshi",
|
Name: "Genshi",
|
||||||
Aliases: []string{"genshi", "kid", "xml+genshi", "xml+kid"},
|
Aliases: []string{"genshi", "kid", "xml+genshi", "xml+kid"},
|
||||||
|
@ -62,53 +66,55 @@ var Genshi = internal.Register(MustNewLexer(
|
||||||
genshiMarkupRules,
|
genshiMarkupRules,
|
||||||
))
|
))
|
||||||
|
|
||||||
var genshiMarkupRules = Rules{
|
func genshiMarkupRules() Rules {
|
||||||
"root": {
|
return Rules{
|
||||||
{`[^<$]+`, Other, nil},
|
"root": {
|
||||||
{`(<\?python)(.*?)(\?>)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
|
{`[^<$]+`, Other, nil},
|
||||||
{`<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>`, Other, nil},
|
{`(<\?python)(.*?)(\?>)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
|
||||||
{`<\s*py:[a-zA-Z0-9]+`, NameTag, Push("pytag")},
|
{`<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>`, Other, nil},
|
||||||
{`<\s*[a-zA-Z0-9:.]+`, NameTag, Push("tag")},
|
{`<\s*py:[a-zA-Z0-9]+`, NameTag, Push("pytag")},
|
||||||
Include("variable"),
|
{`<\s*[a-zA-Z0-9:.]+`, NameTag, Push("tag")},
|
||||||
{`[<$]`, Other, nil},
|
Include("variable"),
|
||||||
},
|
{`[<$]`, Other, nil},
|
||||||
"pytag": {
|
},
|
||||||
{`\s+`, Text, nil},
|
"pytag": {
|
||||||
{`[\w:-]+\s*=`, NameAttribute, Push("pyattr")},
|
{`\s+`, Text, nil},
|
||||||
{`/?\s*>`, NameTag, Pop(1)},
|
{`[\w:-]+\s*=`, NameAttribute, Push("pyattr")},
|
||||||
},
|
{`/?\s*>`, NameTag, Pop(1)},
|
||||||
"pyattr": {
|
},
|
||||||
{`(")(.*?)(")`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)},
|
"pyattr": {
|
||||||
{`(')(.*?)(')`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)},
|
{`(")(.*?)(")`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)},
|
||||||
{`[^\s>]+`, LiteralString, Pop(1)},
|
{`(')(.*?)(')`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)},
|
||||||
},
|
{`[^\s>]+`, LiteralString, Pop(1)},
|
||||||
"tag": {
|
},
|
||||||
{`\s+`, Text, nil},
|
"tag": {
|
||||||
{`py:[\w-]+\s*=`, NameAttribute, Push("pyattr")},
|
{`\s+`, Text, nil},
|
||||||
{`[\w:-]+\s*=`, NameAttribute, Push("attr")},
|
{`py:[\w-]+\s*=`, NameAttribute, Push("pyattr")},
|
||||||
{`/?\s*>`, NameTag, Pop(1)},
|
{`[\w:-]+\s*=`, NameAttribute, Push("attr")},
|
||||||
},
|
{`/?\s*>`, NameTag, Pop(1)},
|
||||||
"attr": {
|
},
|
||||||
{`"`, LiteralString, Push("attr-dstring")},
|
"attr": {
|
||||||
{`'`, LiteralString, Push("attr-sstring")},
|
{`"`, LiteralString, Push("attr-dstring")},
|
||||||
{`[^\s>]*`, LiteralString, Pop(1)},
|
{`'`, LiteralString, Push("attr-sstring")},
|
||||||
},
|
{`[^\s>]*`, LiteralString, Pop(1)},
|
||||||
"attr-dstring": {
|
},
|
||||||
{`"`, LiteralString, Pop(1)},
|
"attr-dstring": {
|
||||||
Include("strings"),
|
{`"`, LiteralString, Pop(1)},
|
||||||
{`'`, LiteralString, nil},
|
Include("strings"),
|
||||||
},
|
{`'`, LiteralString, nil},
|
||||||
"attr-sstring": {
|
},
|
||||||
{`'`, LiteralString, Pop(1)},
|
"attr-sstring": {
|
||||||
Include("strings"),
|
{`'`, LiteralString, Pop(1)},
|
||||||
{`'`, LiteralString, nil},
|
Include("strings"),
|
||||||
},
|
{`'`, LiteralString, nil},
|
||||||
"strings": {
|
},
|
||||||
{`[^"'$]+`, LiteralString, nil},
|
"strings": {
|
||||||
Include("variable"),
|
{`[^"'$]+`, LiteralString, nil},
|
||||||
},
|
Include("variable"),
|
||||||
"variable": {
|
},
|
||||||
{`(?<!\$)(\$\{)(.+?)(\})`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
|
"variable": {
|
||||||
{`(?<!\$)(\$)([a-zA-Z_][\w\.]*)`, NameVariable, nil},
|
{`(?<!\$)(\$\{)(.+?)(\})`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil},
|
||||||
},
|
{`(?<!\$)(\$)([a-zA-Z_][\w\.]*)`, NameVariable, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,23 +5,27 @@ import (
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
)
|
)
|
||||||
|
|
||||||
var stepKeywords = `^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )`
|
|
||||||
|
|
||||||
var featureKeywords = `^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$`
|
|
||||||
|
|
||||||
var featureElementKeywords = `^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$`
|
|
||||||
|
|
||||||
var examplesKeywords = `^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$`
|
|
||||||
|
|
||||||
// Gherkin lexer.
|
// Gherkin lexer.
|
||||||
var Gherkin = internal.Register(MustNewLexer(
|
var Gherkin = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Gherkin",
|
Name: "Gherkin",
|
||||||
Aliases: []string{"cucumber", "Cucumber", "gherkin", "Gherkin"},
|
Aliases: []string{"cucumber", "Cucumber", "gherkin", "Gherkin"},
|
||||||
Filenames: []string{"*.feature", "*.FEATURE"},
|
Filenames: []string{"*.feature", "*.FEATURE"},
|
||||||
MimeTypes: []string{"text/x-gherkin"},
|
MimeTypes: []string{"text/x-gherkin"},
|
||||||
},
|
},
|
||||||
Rules{
|
gherkinRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func gherkinRules() Rules {
|
||||||
|
stepKeywords := `^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )`
|
||||||
|
|
||||||
|
featureKeywords := `^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$`
|
||||||
|
|
||||||
|
featureElementKeywords := `^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$`
|
||||||
|
|
||||||
|
examplesKeywords := `^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$`
|
||||||
|
|
||||||
|
return Rules{
|
||||||
"comments": {
|
"comments": {
|
||||||
{`\s*#.*$`, Comment, nil},
|
{`\s*#.*$`, Comment, nil},
|
||||||
},
|
},
|
||||||
|
@ -114,5 +118,5 @@ var Gherkin = internal.Register(MustNewLexer(
|
||||||
{examplesKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("examplesTable")},
|
{examplesKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("examplesTable")},
|
||||||
{`(\s|.)`, NameFunction, nil},
|
{`(\s|.)`, NameFunction, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// GLSL lexer.
|
// GLSL lexer.
|
||||||
var GLSL = internal.Register(MustNewLexer(
|
var GLSL = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "GLSL",
|
Name: "GLSL",
|
||||||
Aliases: []string{"glsl"},
|
Aliases: []string{"glsl"},
|
||||||
Filenames: []string{"*.vert", "*.frag", "*.geo"},
|
Filenames: []string{"*.vert", "*.frag", "*.geo"},
|
||||||
MimeTypes: []string{"text/x-glslsrc"},
|
MimeTypes: []string{"text/x-glslsrc"},
|
||||||
},
|
},
|
||||||
Rules{
|
glslRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func glslRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`^#.*`, CommentPreproc, nil},
|
{`^#.*`, CommentPreproc, nil},
|
||||||
{`//.*`, CommentSingle, nil},
|
{`//.*`, CommentSingle, nil},
|
||||||
|
@ -33,5 +37,5 @@ var GLSL = internal.Register(MustNewLexer(
|
||||||
{`\.`, Punctuation, nil},
|
{`\.`, Punctuation, nil},
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Gnuplot lexer.
|
// Gnuplot lexer.
|
||||||
var Gnuplot = internal.Register(MustNewLexer(
|
var Gnuplot = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Gnuplot",
|
Name: "Gnuplot",
|
||||||
Aliases: []string{"gnuplot"},
|
Aliases: []string{"gnuplot"},
|
||||||
Filenames: []string{"*.plot", "*.plt"},
|
Filenames: []string{"*.plot", "*.plt"},
|
||||||
MimeTypes: []string{"text/x-gnuplot"},
|
MimeTypes: []string{"text/x-gnuplot"},
|
||||||
},
|
},
|
||||||
Rules{
|
gnuplotRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func gnuplotRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("whitespace"),
|
Include("whitespace"),
|
||||||
{`bind\b|bin\b|bi\b`, Keyword, Push("bind")},
|
{`bind\b|bin\b|bi\b`, Keyword, Push("bind")},
|
||||||
|
@ -113,5 +117,5 @@ var Gnuplot = internal.Register(MustNewLexer(
|
||||||
{`functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|f\b|set\b|se\b|s\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b`, NameBuiltin, nil},
|
{`functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|f\b|set\b|se\b|s\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b`, NameBuiltin, nil},
|
||||||
Include("genericargs"),
|
Include("genericargs"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Go lexer.
|
// Go lexer.
|
||||||
var Go = internal.Register(MustNewLexer(
|
var Go = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Go",
|
Name: "Go",
|
||||||
Aliases: []string{"go", "golang"},
|
Aliases: []string{"go", "golang"},
|
||||||
|
@ -17,7 +17,19 @@ var Go = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-gosrc"},
|
MimeTypes: []string{"text/x-gosrc"},
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
goRules,
|
||||||
|
).SetAnalyser(func(text string) float32 {
|
||||||
|
if strings.Contains(text, "fmt.") && strings.Contains(text, "package ") {
|
||||||
|
return 0.5
|
||||||
|
}
|
||||||
|
if strings.Contains(text, "package ") {
|
||||||
|
return 0.1
|
||||||
|
}
|
||||||
|
return 0.0
|
||||||
|
}))
|
||||||
|
|
||||||
|
func goRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\n`, Text, nil},
|
{`\n`, Text, nil},
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
|
@ -37,8 +49,8 @@ var Go = internal.Register(MustNewLexer(
|
||||||
{`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
|
{`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
|
||||||
{`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
|
{`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
|
||||||
{`0[0-7]+`, LiteralNumberOct, nil},
|
{`0[0-7]+`, LiteralNumberOct, nil},
|
||||||
{`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
|
{`0[xX][0-9a-fA-F_]+`, LiteralNumberHex, nil},
|
||||||
{`(0|[1-9][0-9]*)`, LiteralNumberInteger, nil},
|
{`(0|[1-9][0-9_]*)`, LiteralNumberInteger, nil},
|
||||||
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
|
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
|
||||||
{"(`)([^`]*)(`)", ByGroups(LiteralString, Using(TypeRemappingLexer(GoTextTemplate, TypeMapping{{Other, LiteralString, nil}})), LiteralString), nil},
|
{"(`)([^`]*)(`)", ByGroups(LiteralString, Using(TypeRemappingLexer(GoTextTemplate, TypeMapping{{Other, LiteralString, nil}})), LiteralString), nil},
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
||||||
|
@ -47,58 +59,52 @@ var Go = internal.Register(MustNewLexer(
|
||||||
{`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil},
|
{`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil},
|
||||||
{`[^\W\d]\w*`, NameOther, nil},
|
{`[^\W\d]\w*`, NameOther, nil},
|
||||||
},
|
},
|
||||||
},
|
|
||||||
).SetAnalyser(func(text string) float32 {
|
|
||||||
if strings.Contains(text, "fmt.") && strings.Contains(text, "package ") {
|
|
||||||
return 0.5
|
|
||||||
}
|
}
|
||||||
if strings.Contains(text, "package ") {
|
|
||||||
return 0.1
|
|
||||||
}
|
|
||||||
return 0.0
|
|
||||||
}))
|
|
||||||
|
|
||||||
var goTemplateRules = Rules{
|
|
||||||
"root": {
|
|
||||||
{`{{(- )?/\*(.|\n)*?\*/( -)?}}`, CommentMultiline, nil},
|
|
||||||
{`{{[-]?`, CommentPreproc, Push("template")},
|
|
||||||
{`[^{]+`, Other, nil},
|
|
||||||
{`{`, Other, nil},
|
|
||||||
},
|
|
||||||
"template": {
|
|
||||||
{`[-]?}}`, CommentPreproc, Pop(1)},
|
|
||||||
{`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline
|
|
||||||
{`\(`, Operator, Push("subexpression")},
|
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
|
||||||
Include("expression"),
|
|
||||||
},
|
|
||||||
"subexpression": {
|
|
||||||
{`\)`, Operator, Pop(1)},
|
|
||||||
Include("expression"),
|
|
||||||
},
|
|
||||||
"expression": {
|
|
||||||
{`\s+`, Whitespace, nil},
|
|
||||||
{`\(`, Operator, Push("subexpression")},
|
|
||||||
{`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil},
|
|
||||||
{`\||:?=|,`, Operator, nil},
|
|
||||||
{`[$]?[^\W\d]\w*`, NameOther, nil},
|
|
||||||
{`[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil},
|
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
|
||||||
{`-?\d+i`, LiteralNumber, nil},
|
|
||||||
{`-?\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
|
||||||
{`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
|
||||||
{`-?\d+[Ee][-+]\d+i`, LiteralNumber, nil},
|
|
||||||
{`-?\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
|
|
||||||
{`-?\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
|
|
||||||
{`-?0[0-7]+`, LiteralNumberOct, nil},
|
|
||||||
{`-?0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
|
|
||||||
{`-?(0|[1-9][0-9]*)`, LiteralNumberInteger, nil},
|
|
||||||
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
|
|
||||||
{"`[^`]*`", LiteralString, nil},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var GoHTMLTemplate = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
func goTemplateRules() Rules {
|
||||||
|
return Rules{
|
||||||
|
"root": {
|
||||||
|
{`{{(- )?/\*(.|\n)*?\*/( -)?}}`, CommentMultiline, nil},
|
||||||
|
{`{{[-]?`, CommentPreproc, Push("template")},
|
||||||
|
{`[^{]+`, Other, nil},
|
||||||
|
{`{`, Other, nil},
|
||||||
|
},
|
||||||
|
"template": {
|
||||||
|
{`[-]?}}`, CommentPreproc, Pop(1)},
|
||||||
|
{`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline
|
||||||
|
{`\(`, Operator, Push("subexpression")},
|
||||||
|
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
||||||
|
Include("expression"),
|
||||||
|
},
|
||||||
|
"subexpression": {
|
||||||
|
{`\)`, Operator, Pop(1)},
|
||||||
|
Include("expression"),
|
||||||
|
},
|
||||||
|
"expression": {
|
||||||
|
{`\s+`, Whitespace, nil},
|
||||||
|
{`\(`, Operator, Push("subexpression")},
|
||||||
|
{`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil},
|
||||||
|
{`\||:?=|,`, Operator, nil},
|
||||||
|
{`[$]?[^\W\d]\w*`, NameOther, nil},
|
||||||
|
{`\$|[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil},
|
||||||
|
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
||||||
|
{`-?\d+i`, LiteralNumber, nil},
|
||||||
|
{`-?\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
||||||
|
{`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
||||||
|
{`-?\d+[Ee][-+]\d+i`, LiteralNumber, nil},
|
||||||
|
{`-?\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
|
||||||
|
{`-?\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
|
||||||
|
{`-?0[0-7]+`, LiteralNumberOct, nil},
|
||||||
|
{`-?0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||||
|
{`-?(0|[1-9][0-9]*)`, LiteralNumberInteger, nil},
|
||||||
|
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
|
||||||
|
{"`[^`]*`", LiteralString, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var GoHTMLTemplate = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Go HTML Template",
|
Name: "Go HTML Template",
|
||||||
Aliases: []string{"go-html-template"},
|
Aliases: []string{"go-html-template"},
|
||||||
|
@ -106,7 +112,7 @@ var GoHTMLTemplate = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||||
goTemplateRules,
|
goTemplateRules,
|
||||||
)))
|
)))
|
||||||
|
|
||||||
var GoTextTemplate = internal.Register(MustNewLexer(
|
var GoTextTemplate = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Go Text Template",
|
Name: "Go Text Template",
|
||||||
Aliases: []string{"go-text-template"},
|
Aliases: []string{"go-text-template"},
|
||||||
|
|
|
@ -6,13 +6,17 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Go lexer.
|
// Go lexer.
|
||||||
var Graphql = internal.Register(MustNewLexer(
|
var Graphql = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "GraphQL",
|
Name: "GraphQL",
|
||||||
Aliases: []string{"graphql", "graphqls", "gql"},
|
Aliases: []string{"graphql", "graphqls", "gql"},
|
||||||
Filenames: []string{"*.graphql", "*.graphqls"},
|
Filenames: []string{"*.graphql", "*.graphqls"},
|
||||||
},
|
},
|
||||||
Rules{
|
graphqlRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func graphqlRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`(query|mutation|subscription|fragment|scalar|implements|interface|union|enum|input|type)`, KeywordDeclaration, Push("type")},
|
{`(query|mutation|subscription|fragment|scalar|implements|interface|union|enum|input|type)`, KeywordDeclaration, Push("type")},
|
||||||
{`(on|extend|schema|directive|\.\.\.)`, KeywordDeclaration, nil},
|
{`(on|extend|schema|directive|\.\.\.)`, KeywordDeclaration, nil},
|
||||||
|
@ -41,5 +45,5 @@ var Graphql = internal.Register(MustNewLexer(
|
||||||
{`[^\W\d]\w*`, NameClass, Pop(1)},
|
{`[^\W\d]\w*`, NameClass, Pop(1)},
|
||||||
Include("root"),
|
Include("root"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Groovy lexer.
|
// Groovy lexer.
|
||||||
var Groovy = internal.Register(MustNewLexer(
|
var Groovy = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Groovy",
|
Name: "Groovy",
|
||||||
Aliases: []string{"groovy"},
|
Aliases: []string{"groovy"},
|
||||||
|
@ -14,7 +14,11 @@ var Groovy = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/x-groovy"},
|
MimeTypes: []string{"text/x-groovy"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
groovyRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func groovyRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`#!(.*?)$`, CommentPreproc, Push("base")},
|
{`#!(.*?)$`, CommentPreproc, Push("base")},
|
||||||
Default(Push("base")),
|
Default(Push("base")),
|
||||||
|
@ -54,5 +58,5 @@ var Groovy = internal.Register(MustNewLexer(
|
||||||
"import": {
|
"import": {
|
||||||
{`[\w.]+\*?`, NameNamespace, Pop(1)},
|
{`[\w.]+\*?`, NameNamespace, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Handlebars lexer.
|
// Handlebars lexer.
|
||||||
var Handlebars = internal.Register(MustNewLexer(
|
var Handlebars = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Handlebars",
|
Name: "Handlebars",
|
||||||
Aliases: []string{"handlebars"},
|
Aliases: []string{"handlebars", "hbs"},
|
||||||
Filenames: []string{"*.handlebars"},
|
Filenames: []string{"*.handlebars", "*.hbs"},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
handlebarsRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func handlebarsRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^{]+`, Other, nil},
|
{`[^{]+`, Other, nil},
|
||||||
{`\{\{!.*\}\}`, Comment, nil},
|
{`\{\{!.*\}\}`, Comment, nil},
|
||||||
|
@ -52,5 +56,5 @@ var Handlebars = internal.Register(MustNewLexer(
|
||||||
{`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
{`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||||
{`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil},
|
{`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Haskell lexer.
|
// Haskell lexer.
|
||||||
var Haskell = internal.Register(MustNewLexer(
|
var Haskell = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Haskell",
|
Name: "Haskell",
|
||||||
Aliases: []string{"haskell", "hs"},
|
Aliases: []string{"haskell", "hs"},
|
||||||
Filenames: []string{"*.hs"},
|
Filenames: []string{"*.hs"},
|
||||||
MimeTypes: []string{"text/x-haskell"},
|
MimeTypes: []string{"text/x-haskell"},
|
||||||
},
|
},
|
||||||
Rules{
|
haskellRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func haskellRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil},
|
{`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil},
|
||||||
|
@ -95,5 +99,5 @@ var Haskell = internal.Register(MustNewLexer(
|
||||||
{`\d+`, LiteralStringEscape, Pop(1)},
|
{`\d+`, LiteralStringEscape, Pop(1)},
|
||||||
{`\s+\\`, LiteralStringEscape, Pop(1)},
|
{`\s+\\`, LiteralStringEscape, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Haxe lexer.
|
// Haxe lexer.
|
||||||
var Haxe = internal.Register(MustNewLexer(
|
var Haxe = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Haxe",
|
Name: "Haxe",
|
||||||
Aliases: []string{"hx", "haxe", "hxsl"},
|
Aliases: []string{"hx", "haxe", "hxsl"},
|
||||||
|
@ -14,7 +14,11 @@ var Haxe = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/haxe", "text/x-haxe", "text/x-hx"},
|
MimeTypes: []string{"text/haxe", "text/x-haxe", "text/x-hx"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
haxeRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func haxeRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("spaces"),
|
Include("spaces"),
|
||||||
Include("meta"),
|
Include("meta"),
|
||||||
|
@ -609,8 +613,8 @@ var Haxe = internal.Register(MustNewLexer(
|
||||||
{`\}`, Punctuation, Pop(1)},
|
{`\}`, Punctuation, Pop(1)},
|
||||||
{`,`, Punctuation, Push("#pop", "object")},
|
{`,`, Punctuation, Push("#pop", "object")},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
||||||
func haxePreProcMutator(state *LexerState) error {
|
func haxePreProcMutator(state *LexerState) error {
|
||||||
stack, ok := state.Get("haxe-pre-proc").([][]string)
|
stack, ok := state.Get("haxe-pre-proc").([][]string)
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// HCL lexer.
|
// HCL lexer.
|
||||||
var HCL = internal.Register(MustNewLexer(
|
var HCL = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "HCL",
|
Name: "HCL",
|
||||||
Aliases: []string{"hcl"},
|
Aliases: []string{"hcl"},
|
||||||
Filenames: []string{"*.hcl"},
|
Filenames: []string{"*.hcl"},
|
||||||
MimeTypes: []string{"application/x-hcl"},
|
MimeTypes: []string{"application/x-hcl"},
|
||||||
},
|
},
|
||||||
Rules{
|
hclRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func hclRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("string"),
|
Include("string"),
|
||||||
Include("punctuation"),
|
Include("punctuation"),
|
||||||
|
@ -65,5 +69,5 @@ var HCL = internal.Register(MustNewLexer(
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`\\\n`, Text, nil},
|
{`\\\n`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hexdump lexer.
|
// Hexdump lexer.
|
||||||
var Hexdump = internal.Register(MustNewLexer(
|
var Hexdump = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Hexdump",
|
Name: "Hexdump",
|
||||||
Aliases: []string{"hexdump"},
|
Aliases: []string{"hexdump"},
|
||||||
Filenames: []string{},
|
Filenames: []string{},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
hexdumpRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func hexdumpRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\n`, Text, nil},
|
{`\n`, Text, nil},
|
||||||
Include("offset"),
|
Include("offset"),
|
||||||
|
@ -63,5 +67,5 @@ var Hexdump = internal.Register(MustNewLexer(
|
||||||
{`\s`, Text, nil},
|
{`\s`, Text, nil},
|
||||||
{`^\*`, Punctuation, nil},
|
{`^\*`, Punctuation, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package lexers
|
package h
|
||||||
|
|
||||||
import (
|
import (
|
||||||
. "github.com/alecthomas/chroma" // nolint
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// HLB lexer.
|
// HLB lexer.
|
||||||
var HLB = internal.Register(MustNewLexer(
|
var HLB = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "HLB",
|
Name: "HLB",
|
||||||
Aliases: []string{"hlb"},
|
Aliases: []string{"hlb"},
|
||||||
Filenames: []string{"*.hlb"},
|
Filenames: []string{"*.hlb"},
|
||||||
MimeTypes: []string{},
|
MimeTypes: []string{},
|
||||||
},
|
},
|
||||||
Rules{
|
hlbRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func hlbRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`(#.*)`, ByGroups(CommentSingle), nil},
|
{`(#.*)`, ByGroups(CommentSingle), nil},
|
||||||
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
|
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
|
||||||
|
@ -50,5 +54,5 @@ var HLB = internal.Register(MustNewLexer(
|
||||||
{`(\n|\r|\r\n)`, Text, nil},
|
{`(\n|\r|\r\n)`, Text, nil},
|
||||||
{`.`, Text, nil},
|
{`.`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
|
@ -8,7 +8,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// HTML lexer.
|
// HTML lexer.
|
||||||
var HTML = internal.Register(MustNewLexer(
|
var HTML = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "HTML",
|
Name: "HTML",
|
||||||
Aliases: []string{"html"},
|
Aliases: []string{"html"},
|
||||||
|
@ -18,7 +18,11 @@ var HTML = internal.Register(MustNewLexer(
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
htmlRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func htmlRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^<&]+`, Text, nil},
|
{`[^<&]+`, Text, nil},
|
||||||
{`&\S*?;`, NameEntity, nil},
|
{`&\S*?;`, NameEntity, nil},
|
||||||
|
@ -55,5 +59,5 @@ var HTML = internal.Register(MustNewLexer(
|
||||||
{`'.*?'`, LiteralString, Pop(1)},
|
{`'.*?'`, LiteralString, Pop(1)},
|
||||||
{`[^\s>]+`, LiteralString, Pop(1)},
|
{`[^\s>]+`, LiteralString, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// HTTP lexer.
|
// HTTP lexer.
|
||||||
var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
|
var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "HTTP",
|
Name: "HTTP",
|
||||||
Aliases: []string{"http"},
|
Aliases: []string{"http"},
|
||||||
|
@ -17,7 +17,11 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
|
||||||
NotMultiline: true,
|
NotMultiline: true,
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
httpRules,
|
||||||
|
)))
|
||||||
|
|
||||||
|
func httpRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
||||||
{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
||||||
|
@ -30,8 +34,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
|
||||||
"content": {
|
"content": {
|
||||||
{`.+`, EmitterFunc(httpContentBlock), nil},
|
{`.+`, EmitterFunc(httpContentBlock), nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
)))
|
}
|
||||||
|
|
||||||
func httpContentBlock(groups []string, lexer Lexer) Iterator {
|
func httpContentBlock(groups []string, lexer Lexer) Iterator {
|
||||||
tokens := []Token{
|
tokens := []Token{
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hy lexer.
|
// Hy lexer.
|
||||||
var Hy = internal.Register(MustNewLexer(
|
var Hy = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Hy",
|
Name: "Hy",
|
||||||
Aliases: []string{"hylang"},
|
Aliases: []string{"hylang"},
|
||||||
Filenames: []string{"*.hy"},
|
Filenames: []string{"*.hy"},
|
||||||
MimeTypes: []string{"text/x-hy", "application/x-hy"},
|
MimeTypes: []string{"text/x-hy", "application/x-hy"},
|
||||||
},
|
},
|
||||||
Rules{
|
hyRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func hyRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`;.*$`, CommentSingle, nil},
|
{`;.*$`, CommentSingle, nil},
|
||||||
{`[,\s]+`, Text, nil},
|
{`[,\s]+`, Text, nil},
|
||||||
|
@ -47,5 +51,5 @@ var Hy = internal.Register(MustNewLexer(
|
||||||
{`(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls)\b`, NameBuiltinPseudo, nil},
|
{`(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls)\b`, NameBuiltinPseudo, nil},
|
||||||
{Words(`(?<!\.)`, `\b`, `ArithmeticError`, `AssertionError`, `AttributeError`, `BaseException`, `DeprecationWarning`, `EOFError`, `EnvironmentError`, `Exception`, `FloatingPointError`, `FutureWarning`, `GeneratorExit`, `IOError`, `ImportError`, `ImportWarning`, `IndentationError`, `IndexError`, `KeyError`, `KeyboardInterrupt`, `LookupError`, `MemoryError`, `NameError`, `NotImplemented`, `NotImplementedError`, `OSError`, `OverflowError`, `OverflowWarning`, `PendingDeprecationWarning`, `ReferenceError`, `RuntimeError`, `RuntimeWarning`, `StandardError`, `StopIteration`, `SyntaxError`, `SyntaxWarning`, `SystemError`, `SystemExit`, `TabError`, `TypeError`, `UnboundLocalError`, `UnicodeDecodeError`, `UnicodeEncodeError`, `UnicodeError`, `UnicodeTranslateError`, `UnicodeWarning`, `UserWarning`, `ValueError`, `VMSError`, `Warning`, `WindowsError`, `ZeroDivisionError`), NameException, nil},
|
{Words(`(?<!\.)`, `\b`, `ArithmeticError`, `AssertionError`, `AttributeError`, `BaseException`, `DeprecationWarning`, `EOFError`, `EnvironmentError`, `Exception`, `FloatingPointError`, `FutureWarning`, `GeneratorExit`, `IOError`, `ImportError`, `ImportWarning`, `IndentationError`, `IndexError`, `KeyError`, `KeyboardInterrupt`, `LookupError`, `MemoryError`, `NameError`, `NotImplemented`, `NotImplementedError`, `OSError`, `OverflowError`, `OverflowWarning`, `PendingDeprecationWarning`, `ReferenceError`, `RuntimeError`, `RuntimeWarning`, `StandardError`, `StopIteration`, `SyntaxError`, `SyntaxWarning`, `SystemError`, `SystemExit`, `TabError`, `TypeError`, `UnboundLocalError`, `UnicodeDecodeError`, `UnicodeEncodeError`, `UnicodeError`, `UnicodeTranslateError`, `UnicodeWarning`, `UserWarning`, `ValueError`, `VMSError`, `Warning`, `WindowsError`, `ZeroDivisionError`), NameException, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Idris lexer.
|
// Idris lexer.
|
||||||
var Idris = internal.Register(MustNewLexer(
|
var Idris = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Idris",
|
Name: "Idris",
|
||||||
Aliases: []string{"idris", "idr"},
|
Aliases: []string{"idris", "idr"},
|
||||||
Filenames: []string{"*.idr"},
|
Filenames: []string{"*.idr"},
|
||||||
MimeTypes: []string{"text/x-idris"},
|
MimeTypes: []string{"text/x-idris"},
|
||||||
},
|
},
|
||||||
Rules{
|
idrisRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func idrisRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`^(\s*)(%lib|link|flag|include|hide|freeze|access|default|logging|dynamic|name|error_handlers|language)`, ByGroups(Text, KeywordReserved), nil},
|
{`^(\s*)(%lib|link|flag|include|hide|freeze|access|default|logging|dynamic|name|error_handlers|language)`, ByGroups(Text, KeywordReserved), nil},
|
||||||
{`(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$`, ByGroups(Text, CommentSingle), nil},
|
{`(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$`, ByGroups(Text, CommentSingle), nil},
|
||||||
|
@ -76,5 +80,5 @@ var Idris = internal.Register(MustNewLexer(
|
||||||
{`\d+`, LiteralStringEscape, Pop(1)},
|
{`\d+`, LiteralStringEscape, Pop(1)},
|
||||||
{`\s+\\`, LiteralStringEscape, Pop(1)},
|
{`\s+\\`, LiteralStringEscape, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Igor lexer.
|
// Igor lexer.
|
||||||
var Igor = internal.Register(MustNewLexer(
|
var Igor = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Igor",
|
Name: "Igor",
|
||||||
Aliases: []string{"igor", "igorpro"},
|
Aliases: []string{"igor", "igorpro"},
|
||||||
|
@ -14,7 +14,11 @@ var Igor = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/ipf"},
|
MimeTypes: []string{"text/ipf"},
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
},
|
},
|
||||||
Rules{
|
igorRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func igorRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`//.*$`, CommentSingle, nil},
|
{`//.*$`, CommentSingle, nil},
|
||||||
{`"([^"\\]|\\.)*"`, LiteralString, nil},
|
{`"([^"\\]|\\.)*"`, LiteralString, nil},
|
||||||
|
@ -28,5 +32,5 @@ var Igor = internal.Register(MustNewLexer(
|
||||||
{`.`, Text, nil},
|
{`.`, Text, nil},
|
||||||
{`\n|\r`, Text, nil},
|
{`\n|\r`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Ini lexer.
|
// Ini lexer.
|
||||||
var Ini = internal.Register(MustNewLexer(
|
var Ini = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "INI",
|
Name: "INI",
|
||||||
Aliases: []string{"ini", "cfg", "dosini"},
|
Aliases: []string{"ini", "cfg", "dosini"},
|
||||||
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"},
|
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"},
|
||||||
MimeTypes: []string{"text/x-ini", "text/inf"},
|
MimeTypes: []string{"text/x-ini", "text/inf"},
|
||||||
},
|
},
|
||||||
Rules{
|
iniRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func iniRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`[;#].*`, CommentSingle, nil},
|
{`[;#].*`, CommentSingle, nil},
|
||||||
|
@ -21,5 +25,5 @@ var Ini = internal.Register(MustNewLexer(
|
||||||
{`(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Text, Operator, Text, LiteralString), nil},
|
{`(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Text, Operator, Text, LiteralString), nil},
|
||||||
{`(.+?)$`, NameAttribute, nil},
|
{`(.+?)$`, NameAttribute, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Io lexer.
|
// Io lexer.
|
||||||
var Io = internal.Register(MustNewLexer(
|
var Io = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Io",
|
Name: "Io",
|
||||||
Aliases: []string{"io"},
|
Aliases: []string{"io"},
|
||||||
Filenames: []string{"*.io"},
|
Filenames: []string{"*.io"},
|
||||||
MimeTypes: []string{"text/x-iosrc"},
|
MimeTypes: []string{"text/x-iosrc"},
|
||||||
},
|
},
|
||||||
Rules{
|
ioRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func ioRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`\n`, Text, nil},
|
{`\n`, Text, nil},
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
|
@ -36,5 +40,5 @@ var Io = internal.Register(MustNewLexer(
|
||||||
{`\+/`, CommentMultiline, Pop(1)},
|
{`\+/`, CommentMultiline, Pop(1)},
|
||||||
{`[+/]`, CommentMultiline, nil},
|
{`[+/]`, CommentMultiline, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -146,16 +146,19 @@ func Register(lexer chroma.Lexer) chroma.Lexer {
|
||||||
return lexer
|
return lexer
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used for the fallback lexer as well as the explicit plaintext lexer
|
// PlaintextRules is used for the fallback lexer as well as the explicit
|
||||||
var PlaintextRules = chroma.Rules{
|
// plaintext lexer.
|
||||||
"root": []chroma.Rule{
|
func PlaintextRules() chroma.Rules {
|
||||||
{`.+`, chroma.Text, nil},
|
return chroma.Rules{
|
||||||
{`\n`, chroma.Text, nil},
|
"root": []chroma.Rule{
|
||||||
},
|
{`.+`, chroma.Text, nil},
|
||||||
|
{`\n`, chroma.Text, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback lexer if no other is found.
|
// Fallback lexer if no other is found.
|
||||||
var Fallback chroma.Lexer = chroma.MustNewLexer(&chroma.Config{
|
var Fallback chroma.Lexer = chroma.MustNewLazyLexer(&chroma.Config{
|
||||||
Name: "fallback",
|
Name: "fallback",
|
||||||
Filenames: []string{"*"},
|
Filenames: []string{"*"},
|
||||||
}, PlaintextRules)
|
}, PlaintextRules)
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// J lexer.
|
// J lexer.
|
||||||
var J = internal.Register(MustNewLexer(
|
var J = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "J",
|
Name: "J",
|
||||||
Aliases: []string{"j"},
|
Aliases: []string{"j"},
|
||||||
Filenames: []string{"*.ijs"},
|
Filenames: []string{"*.ijs"},
|
||||||
MimeTypes: []string{"text/x-j"},
|
MimeTypes: []string{"text/x-j"},
|
||||||
},
|
},
|
||||||
Rules{
|
jRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func jRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`#!.*$`, CommentPreproc, nil},
|
{`#!.*$`, CommentPreproc, nil},
|
||||||
{`NB\..*`, CommentSingle, nil},
|
{`NB\..*`, CommentSingle, nil},
|
||||||
|
@ -69,5 +73,5 @@ var J = internal.Register(MustNewLexer(
|
||||||
{`''`, LiteralString, nil},
|
{`''`, LiteralString, nil},
|
||||||
{`'`, LiteralString, Pop(1)},
|
{`'`, LiteralString, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,15 +6,20 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Java lexer.
|
// Java lexer.
|
||||||
var Java = internal.Register(MustNewLexer(
|
var Java = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Java",
|
Name: "Java",
|
||||||
Aliases: []string{"java"},
|
Aliases: []string{"java"},
|
||||||
Filenames: []string{"*.java"},
|
Filenames: []string{"*.java"},
|
||||||
MimeTypes: []string{"text/x-java"},
|
MimeTypes: []string{"text/x-java"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
javaRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func javaRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^\S\n]+`, Text, nil},
|
{`[^\S\n]+`, Text, nil},
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
@ -47,5 +52,5 @@ var Java = internal.Register(MustNewLexer(
|
||||||
"import": {
|
"import": {
|
||||||
{`[\w.]+\*?`, NameNamespace, Pop(1)},
|
{`[\w.]+\*?`, NameNamespace, Pop(1)},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -26,11 +26,11 @@ var JavascriptRules = Rules{
|
||||||
{`\A#! ?/.*?\n`, CommentHashbang, nil},
|
{`\A#! ?/.*?\n`, CommentHashbang, nil},
|
||||||
{`^(?=\s|/|<!--)`, Text, Push("slashstartsregex")},
|
{`^(?=\s|/|<!--)`, Text, Push("slashstartsregex")},
|
||||||
Include("commentsandwhitespace"),
|
Include("commentsandwhitespace"),
|
||||||
{`(\.\d+|[0-9]+\.[0-9]*)([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil},
|
{`\d+(\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
|
||||||
{`0[bB][01]+`, LiteralNumberBin, nil},
|
{`0[bB][01]+`, LiteralNumberBin, nil},
|
||||||
{`0[oO][0-7]+`, LiteralNumberOct, nil},
|
{`0[oO][0-7]+`, LiteralNumberOct, nil},
|
||||||
{`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
|
{`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||||
{`[0-9]+`, LiteralNumberInteger, nil},
|
{`[0-9_]+`, LiteralNumberInteger, nil},
|
||||||
{`\.\.\.|=>`, Punctuation, nil},
|
{`\.\.\.|=>`, Punctuation, nil},
|
||||||
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
|
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
|
||||||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
||||||
|
@ -65,7 +65,7 @@ var Javascript = internal.Register(MustNewLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "JavaScript",
|
Name: "JavaScript",
|
||||||
Aliases: []string{"js", "javascript"},
|
Aliases: []string{"js", "javascript"},
|
||||||
Filenames: []string{"*.js", "*.jsm"},
|
Filenames: []string{"*.js", "*.jsm", "*.mjs"},
|
||||||
MimeTypes: []string{"application/javascript", "application/x-javascript", "text/x-javascript", "text/javascript"},
|
MimeTypes: []string{"application/javascript", "application/x-javascript", "text/x-javascript", "text/javascript"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// JSON lexer.
|
// JSON lexer.
|
||||||
var JSON = internal.Register(MustNewLexer(
|
var JSON = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "JSON",
|
Name: "JSON",
|
||||||
Aliases: []string{"json"},
|
Aliases: []string{"json"},
|
||||||
|
@ -15,7 +15,11 @@ var JSON = internal.Register(MustNewLexer(
|
||||||
NotMultiline: true,
|
NotMultiline: true,
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
jsonRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func jsonRules() Rules {
|
||||||
|
return Rules{
|
||||||
"whitespace": {
|
"whitespace": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
},
|
},
|
||||||
|
@ -51,5 +55,5 @@ var JSON = internal.Register(MustNewLexer(
|
||||||
"root": {
|
"root": {
|
||||||
Include("value"),
|
Include("value"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ import (
|
||||||
// JSX lexer.
|
// JSX lexer.
|
||||||
//
|
//
|
||||||
// This was generated from https://github.com/fcurella/jsx-lexer
|
// This was generated from https://github.com/fcurella/jsx-lexer
|
||||||
var JSX = internal.Register(MustNewLexer(
|
var JSX = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "react",
|
Name: "react",
|
||||||
Aliases: []string{"jsx", "react"},
|
Aliases: []string{"jsx", "react"},
|
||||||
|
@ -16,7 +16,11 @@ var JSX = internal.Register(MustNewLexer(
|
||||||
MimeTypes: []string{"text/jsx", "text/typescript-jsx"},
|
MimeTypes: []string{"text/jsx", "text/typescript-jsx"},
|
||||||
DotAll: true,
|
DotAll: true,
|
||||||
},
|
},
|
||||||
Rules{
|
jsxRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func jsxRules() Rules {
|
||||||
|
return Rules{
|
||||||
"commentsandwhitespace": {
|
"commentsandwhitespace": {
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
{`<!--`, Comment, nil},
|
{`<!--`, Comment, nil},
|
||||||
|
@ -91,5 +95,5 @@ var JSX = internal.Register(MustNewLexer(
|
||||||
{`}`, Punctuation, Pop(1)},
|
{`}`, Punctuation, Pop(1)},
|
||||||
Include("root"),
|
Include("root"),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -5,14 +5,18 @@ import (
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
)
|
)
|
||||||
|
|
||||||
var Jungle = internal.Register(MustNewLexer(
|
var Jungle = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Jungle",
|
Name: "Jungle",
|
||||||
Aliases: []string{"jungle"},
|
Aliases: []string{"jungle"},
|
||||||
Filenames: []string{"*.jungle"},
|
Filenames: []string{"*.jungle"},
|
||||||
MimeTypes: []string{"text/x-jungle"},
|
MimeTypes: []string{"text/x-jungle"},
|
||||||
},
|
},
|
||||||
Rules{
|
jungleRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func jungleRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`[^\S\n]+`, Text, nil},
|
{`[^\S\n]+`, Text, nil},
|
||||||
{`\n`, Text, nil},
|
{`\n`, Text, nil},
|
||||||
|
@ -46,5 +50,5 @@ var Jungle = internal.Register(MustNewLexer(
|
||||||
{`[a-zA-Z_]\w*`, Name, nil},
|
{`[a-zA-Z_]\w*`, Name, nil},
|
||||||
Default(Pop(1)),
|
Default(Pop(1)),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Lighttpd Configuration File lexer.
|
// Lighttpd Configuration File lexer.
|
||||||
var Lighttpd = internal.Register(MustNewLexer(
|
var Lighttpd = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Lighttpd configuration file",
|
Name: "Lighttpd configuration file",
|
||||||
Aliases: []string{"lighty", "lighttpd"},
|
Aliases: []string{"lighty", "lighttpd"},
|
||||||
Filenames: []string{},
|
Filenames: []string{},
|
||||||
MimeTypes: []string{"text/x-lighttpd-conf"},
|
MimeTypes: []string{"text/x-lighttpd-conf"},
|
||||||
},
|
},
|
||||||
Rules{
|
lighttpdRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func lighttpdRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`#.*\n`, CommentSingle, nil},
|
{`#.*\n`, CommentSingle, nil},
|
||||||
{`/\S*`, Name, nil},
|
{`/\S*`, Name, nil},
|
||||||
|
@ -26,5 +30,5 @@ var Lighttpd = internal.Register(MustNewLexer(
|
||||||
{`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil},
|
{`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil},
|
||||||
{`\s+`, Text, nil},
|
{`\s+`, Text, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
|
@ -6,14 +6,18 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Llvm lexer.
|
// Llvm lexer.
|
||||||
var Llvm = internal.Register(MustNewLexer(
|
var Llvm = internal.Register(MustNewLazyLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "LLVM",
|
Name: "LLVM",
|
||||||
Aliases: []string{"llvm"},
|
Aliases: []string{"llvm"},
|
||||||
Filenames: []string{"*.ll"},
|
Filenames: []string{"*.ll"},
|
||||||
MimeTypes: []string{"text/x-llvm"},
|
MimeTypes: []string{"text/x-llvm"},
|
||||||
},
|
},
|
||||||
Rules{
|
llvmRules,
|
||||||
|
))
|
||||||
|
|
||||||
|
func llvmRules() Rules {
|
||||||
|
return Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("whitespace"),
|
Include("whitespace"),
|
||||||
{`([-a-zA-Z$._][\w\-$.]*|"[^"]*?")\s*:`, NameLabel, nil},
|
{`([-a-zA-Z$._][\w\-$.]*|"[^"]*?")\s*:`, NameLabel, nil},
|
||||||
|
@ -39,5 +43,5 @@ var Llvm = internal.Register(MustNewLexer(
|
||||||
{Words(``, ``, `void`, `half`, `float`, `double`, `x86_fp80`, `fp128`, `ppc_fp128`, `label`, `metadata`, `token`), KeywordType, nil},
|
{Words(``, ``, `void`, `half`, `float`, `double`, `x86_fp80`, `fp128`, `ppc_fp128`, `label`, `metadata`, `token`), KeywordType, nil},
|
||||||
{`i[1-9]\d*`, Keyword, nil},
|
{`i[1-9]\d*`, Keyword, nil},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
))
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue