Compare commits

..

1 Commits
4.2.1 ... 4.0.4

Author SHA1 Message Date
4250b854c9 chore: bump version to 4.0.4
Create release containing typo fixes (#580).
2020-08-23 15:26:36 -04:00
555 changed files with 9800 additions and 23629 deletions

View File

@ -1,66 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
# ******** NOTE ********
name: "CodeQL"
on:
push:
branches: [ master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '45 23 * * 0'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'go' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more...
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
#- name: Autobuild
#uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
- run: make
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@ -1,8 +0,0 @@
# NB: this image isn't used anywhere in the build pipeline. It exists to
# conveniently facilitate ad-hoc experimentation in a sandboxed environment
# during development.
FROM golang:1.15-alpine
RUN apk add git less make
WORKDIR /app

View File

@ -7,7 +7,6 @@ dist_dir := ./dist
CAT := cat CAT := cat
COLUMN := column COLUMN := column
CTAGS := ctags CTAGS := ctags
DOCKER := docker
GO := go GO := go
GREP := grep GREP := grep
GZIP := gzip --best GZIP := gzip --best
@ -21,8 +20,6 @@ SED := sed
SORT := sort SORT := sort
ZIP := zip -m ZIP := zip -m
docker_image := cheat-devel:latest
# build flags # build flags
BUILD_FLAGS := -ldflags="-s -w" -mod vendor -trimpath BUILD_FLAGS := -ldflags="-s -w" -mod vendor -trimpath
GOBIN := GOBIN :=
@ -108,7 +105,6 @@ clean: $(dist_dir)
.PHONY: distclean .PHONY: distclean
distclean: distclean:
$(RM) -f tags $(RM) -f tags
@$(DOCKER) image rm -f $(docker_image)
## setup: install revive (linter) and scc (sloc tool) ## setup: install revive (linter) and scc (sloc tool)
.PHONY: setup .PHONY: setup
@ -136,10 +132,6 @@ man:
vendor: vendor:
$(GO) mod vendor && $(GO) mod tidy && $(GO) mod verify $(GO) mod vendor && $(GO) mod tidy && $(GO) mod verify
## vendor-update: update vendored dependencies
vendor-update:
$(GO) get -t -u ./... && $(GO) mod vendor
## fmt: run go fmt ## fmt: run go fmt
.PHONY: fmt .PHONY: fmt
fmt: fmt:
@ -173,16 +165,6 @@ check: | vendor fmt lint vet test
.PHONY: prepare .PHONY: prepare
prepare: | $(dist_dir) clean generate vendor fmt lint vet test prepare: | $(dist_dir) clean generate vendor fmt lint vet test
## docker-setup: create a docker image for use during development
.PHONY: docker-setup
docker-setup:
$(DOCKER) build -t $(docker_image) -f Dockerfile .
## docker-sh: shell into the docker development container
.PHONY: docker-sh
docker-sh:
$(DOCKER) run -v $(shell pwd):/app -ti $(docker_image) /bin/ash
## help: display this help text ## help: display this help text
.PHONY: help .PHONY: help
help: help:

View File

@ -47,17 +47,17 @@ Installing
`cheat` has no dependencies. To install it, download the executable from the `cheat` has no dependencies. To install it, download the executable from the
[releases][] page and place it on your `PATH`. [releases][] page and place it on your `PATH`.
Alternatively, if you have [go][] installed, you may install `cheat` using `go
get`:
```sh
go get -u github.com/cheat/cheat/cmd/cheat
```
Configuring Configuring
----------- -----------
### conf.yml ### ### conf.yml ###
`cheat` is configured by a YAML file that will be auto-generated on first run. `cheat` is configured by a YAML file that will be auto-generated on first run.
Should you need to create a config file manually, you can do
so via:
```sh
mkdir -p ~/.config/cheat && cheat --init > ~/.config/cheat/conf.yml
```
By default, the config file is assumed to exist on an XDG-compliant By default, the config file is assumed to exist on an XDG-compliant
configuration path like `~/.config/cheat/conf.yml`. If you would like to store configuration path like `~/.config/cheat/conf.yml`. If you would like to store
@ -211,4 +211,3 @@ Additionally, `cheat` supports enhanced autocompletion via integration with
[cheatsheets]: https://github.com/cheat/cheatsheets [cheatsheets]: https://github.com/cheat/cheatsheets
[completions]: https://github.com/cheat/cheat/tree/master/scripts [completions]: https://github.com/cheat/cheat/tree/master/scripts
[fzf]: https://github.com/junegunn/fzf [fzf]: https://github.com/junegunn/fzf
[go]: https://golang.org

View File

@ -27,5 +27,5 @@ func cmdDirectories(opts map[string]interface{}, conf config.Config) {
// write columnized output to stdout // write columnized output to stdout
w.Flush() w.Flush()
display.Write(out.String(), conf) display.Display(out.String(), conf)
} }

View File

@ -37,8 +37,8 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
// sheets with local sheets), here we simply want to create a slice // sheets with local sheets), here we simply want to create a slice
// containing all sheets. // containing all sheets.
flattened := []sheet.Sheet{} flattened := []sheet.Sheet{}
for _, pathsheets := range cheatsheets { for _, pathSheets := range cheatsheets {
for _, s := range pathsheets { for _, s := range pathSheets {
flattened = append(flattened, s) flattened = append(flattened, s)
} }
} }
@ -105,5 +105,5 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
// write columnized output to stdout // write columnized output to stdout
w.Flush() w.Flush()
display.Write(out.String(), conf) display.Display(out.String(), conf)
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
"github.com/cheat/cheat/internal/display" "github.com/cheat/cheat/internal/display"
"github.com/cheat/cheat/internal/sheet"
"github.com/cheat/cheat/internal/sheets" "github.com/cheat/cheat/internal/sheets"
) )
@ -31,65 +32,71 @@ func cmdSearch(opts map[string]interface{}, conf config.Config) {
) )
} }
// iterate over each cheatpath // consolidate the cheatsheets found on all paths into a single map of
out := "" // `title` => `sheet` (ie, allow more local cheatsheets to override less
for _, pathcheats := range cheatsheets { // local cheatsheets)
consolidated := sheets.Consolidate(cheatsheets)
// sort the cheatsheets alphabetically, and search for matches // if <cheatsheet> was provided, search that single sheet only
for _, sheet := range sheets.Sort(pathcheats) { if opts["<cheatsheet>"] != nil {
// if <cheatsheet> was provided, constrain the search only to cheatsheet := opts["<cheatsheet>"].(string)
// matching cheatsheets
if opts["<cheatsheet>"] != nil && sheet.Title != opts["<cheatsheet>"] {
continue
}
// assume that we want to perform a case-insensitive search for <phrase> // assert that the cheatsheet exists
pattern := "(?i)" + phrase s, ok := consolidated[cheatsheet]
if !ok {
fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet)
os.Exit(2)
}
// unless --regex is provided, in which case we pass the regex unaltered consolidated = map[string]sheet.Sheet{
if opts["--regex"] == true { cheatsheet: s,
pattern = phrase
}
// compile the regex
reg, err := regexp.Compile(pattern)
if err != nil {
fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to compile regexp: %s, %v", pattern, err))
os.Exit(1)
}
// `Search` will return text entries that match the search terms. We're
// using it here to overwrite the prior cheatsheet Text, filtering it to
// only what is relevant
sheet.Text = sheet.Search(reg)
// if the sheet did not match the search, ignore it and move on
if sheet.Text == "" {
continue
}
// if colorization was requested, apply it here
if conf.Color(opts) {
sheet.Colorize(conf)
}
// display the cheatsheet title and path
out += fmt.Sprintf("%s %s\n",
display.Underline(sheet.Title),
display.Faint(fmt.Sprintf("(%s)", sheet.CheatPath), conf),
)
// indent each line of content
out += display.Indent(sheet.Text) + "\n"
} }
} }
// trim superfluous newlines // sort the cheatsheets alphabetically, and search for matches
out = strings.TrimSpace(out) out := ""
for _, sheet := range sheets.Sort(consolidated) {
// assume that we want to perform a case-insensitive search for <phrase>
pattern := "(?i)" + phrase
// unless --regex is provided, in which case we pass the regex unaltered
if opts["--regex"] == true {
pattern = phrase
}
// compile the regex
reg, err := regexp.Compile(pattern)
if err != nil {
fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to compile regexp: %s, %v", pattern, err))
os.Exit(1)
}
// `Search` will return text entries that match the search terms. We're
// using it here to overwrite the prior cheatsheet Text, filtering it to
// only what is relevant
sheet.Text = sheet.Search(reg)
// if the sheet did not match the search, ignore it and move on
if sheet.Text == "" {
continue
}
// if colorization was requested, apply it here
if conf.Color(opts) {
sheet.Colorize(conf)
}
// output the cheatsheet title
out += fmt.Sprintf("%s:\n", sheet.Title)
// indent each line of content with two spaces
for _, line := range strings.Split(sheet.Text, "\n") {
out += fmt.Sprintf(" %s\n", line)
}
}
// display the output // display the output
// NB: resist the temptation to call `display.Display` multiple times in display.Display(out, conf)
// the loop above. That will not play nicely with the paginator.
display.Write(out, conf)
} }

View File

@ -26,5 +26,5 @@ func cmdTags(opts map[string]interface{}, conf config.Config) {
} }
// display the output // display the output
display.Write(out, conf) display.Display(out, conf)
} }

View File

@ -30,39 +30,9 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
) )
} }
// if --all was passed, display cheatsheets from all cheatpaths // consolidate the cheatsheets found on all paths into a single map of
if opts["--all"].(bool) { // `title` => `sheet` (ie, allow more local cheatsheets to override less
// iterate over the cheatpaths // local cheatsheets)
out := ""
for _, cheatpath := range cheatsheets {
// if the cheatpath contains the specified cheatsheet, display it
if sheet, ok := cheatpath[cheatsheet]; ok {
// identify the matching cheatsheet
out += fmt.Sprintf("%s %s\n",
display.Underline(sheet.Title),
display.Faint(fmt.Sprintf("(%s)", sheet.CheatPath), conf),
)
// apply colorization if requested
if conf.Color(opts) {
sheet.Colorize(conf)
}
// display the cheatsheet
out += display.Indent(sheet.Text) + "\n"
}
}
// display and exit
display.Write(strings.TrimSuffix(out, "\n"), conf)
os.Exit(0)
}
// otherwise, consolidate the cheatsheets found on all paths into a single
// map of `title` => `sheet` (ie, allow more local cheatsheets to override
// less local cheatsheets)
consolidated := sheets.Consolidate(cheatsheets) consolidated := sheets.Consolidate(cheatsheets)
// fail early if the requested cheatsheet does not exist // fail early if the requested cheatsheet does not exist
@ -78,5 +48,5 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
} }
// display the cheatsheet // display the cheatsheet
display.Write(sheet.Text, conf) display.Display(sheet.Text, conf)
} }

View File

@ -3,12 +3,11 @@ Usage:
Options: Options:
--init Write a default config file to stdout --init Write a default config file to stdout
-a --all Search among all cheatpaths
-c --colorize Colorize output -c --colorize Colorize output
-d --directories List cheatsheet directories -d --directories List cheatsheet directories
-e --edit=<cheatsheet> Edit <cheatsheet> -e --edit=<cheatsheet> Edit <cheatsheet>
-l --list List cheatsheets -l --list List cheatsheets
-p --path=<name> Return only sheets found on cheatpath <name> -p --path=<name> Return only sheets found on path <name>
-r --regex Treat search <phrase> as a regex -r --regex Treat search <phrase> as a regex
-s --search=<phrase> Search cheatsheets for <phrase> -s --search=<phrase> Search cheatsheets for <phrase>
-t --tag=<tag> Return only sheets matching <tag> -t --tag=<tag> Return only sheets matching <tag>

View File

@ -5,6 +5,7 @@ package main
import ( import (
"fmt" "fmt"
"os" "os"
"path"
"runtime" "runtime"
"strings" "strings"
@ -16,7 +17,7 @@ import (
"github.com/cheat/cheat/internal/installer" "github.com/cheat/cheat/internal/installer"
) )
const version = "4.2.1" const version = "4.0.4"
func main() { func main() {
@ -73,16 +74,62 @@ func main() {
os.Exit(0) os.Exit(0)
} }
// choose a confpath // read the config template
confpath = confpaths[0] configs := configs()
// run the installer // determine the appropriate paths for config data and (optional) community
if err := installer.Run(configs(), confpath); err != nil { // cheatsheets based on the user's platform
fmt.Fprintf(os.Stderr, "failed to run installer: %v\n", err) confpath = confpaths[0]
confdir := path.Dir(confpath)
// create paths for community and personal cheatsheets
community := path.Join(confdir, "/cheatsheets/community")
personal := path.Join(confdir, "/cheatsheets/personal")
// template the above paths into the default configs
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
// prompt the user to download the community cheatsheets
yes, err = installer.Prompt(
"Would you like to download the community cheatsheets? [Y/n]",
true,
)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
os.Exit(1)
}
// clone the community cheatsheets if so instructed
if yes {
// clone the community cheatsheets
if err := installer.Clone(community); err != nil {
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
os.Exit(1)
}
// also create a directory for personal cheatsheets
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
fmt.Fprintf(
os.Stderr,
"failed to create config: failed to create directory: %s: %v\n",
personal,
err)
os.Exit(1)
}
}
// the config file does not exist, so we'll try to create one
if err = config.Init(confpath, configs); err != nil {
fmt.Fprintf(
os.Stderr,
"failed to create config file: %s: %v\n",
confpath,
err,
)
os.Exit(1) os.Exit(1)
} }
// notify the user and exit
fmt.Printf("Created config file: %s\n", confpath) fmt.Printf("Created config file: %s\n", confpath)
fmt.Println("Please read this file for advanced configuration information.") fmt.Println("Please read this file for advanced configuration information.")
os.Exit(0) os.Exit(0)
@ -138,9 +185,6 @@ func main() {
case opts["<cheatsheet>"] != nil: case opts["<cheatsheet>"] != nil:
cmd = cmdView cmd = cmdView
case opts["--tag"] != nil && opts["--tag"].(string) != "":
cmd = cmdList
default: default:
fmt.Println(usage()) fmt.Println(usage())
os.Exit(0) os.Exit(0)

View File

@ -12,12 +12,11 @@ func usage() string {
Options: Options:
--init Write a default config file to stdout --init Write a default config file to stdout
-a --all Search among all cheatpaths
-c --colorize Colorize output -c --colorize Colorize output
-d --directories List cheatsheet directories -d --directories List cheatsheet directories
-e --edit=<cheatsheet> Edit <cheatsheet> -e --edit=<cheatsheet> Edit <cheatsheet>
-l --list List cheatsheets -l --list List cheatsheets
-p --path=<name> Return only sheets found on cheatpath <name> -p --path=<name> Return only sheets found on path <name>
-r --regex Treat search <phrase> as a regex -r --regex Treat search <phrase> as a regex
-s --search=<phrase> Search cheatsheets for <phrase> -s --search=<phrase> Search cheatsheets for <phrase>
-t --tag=<tag> Return only sheets matching <tag> -t --tag=<tag> Return only sheets matching <tag>

View File

@ -1,4 +1,4 @@
.\" Automatically generated by Pandoc 2.2.1 .\" Automatically generated by Pandoc 1.17.2
.\" .\"
.TH "CHEAT" "1" "" "" "General Commands Manual" .TH "CHEAT" "1" "" "" "General Commands Manual"
.hy .hy
@ -17,62 +17,62 @@ commands that they use frequently, but not frequently enough to
remember. remember.
.SH OPTIONS .SH OPTIONS
.TP .TP
.B \[en]init .B \-\-init
Print a config file to stdout. Print a config file to stdout.
.RS .RS
.RE .RE
.TP .TP
.B \-c, \[en]colorize .B \-c, \-\-colorize
Colorize output. Colorize output.
.RS .RS
.RE .RE
.TP .TP
.B \-d, \[en]directories .B \-d, \-\-directories
List cheatsheet directories. List cheatsheet directories.
.RS .RS
.RE .RE
.TP .TP
.B \-e, \[en]edit=\f[I]CHEATSHEET\f[] .B \-e, \-\-edit=\f[I]CHEATSHEET\f[]
Open \f[I]CHEATSHEET\f[] for editing. Open \f[I]CHEATSHEET\f[] for editing.
.RS .RS
.RE .RE
.TP .TP
.B \-l, \[en]list .B \-l, \-\-list
List available cheatsheets. List available cheatsheets.
.RS .RS
.RE .RE
.TP .TP
.B \-p, \[en]path=\f[I]PATH\f[] .B \-p, \-\-path=\f[I]PATH\f[]
Filter only to sheets found on path \f[I]PATH\f[]. Filter only to sheets found on path \f[I]PATH\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-r, \[en]regex .B \-r, \-\-regex
Treat search \f[I]PHRASE\f[] as a regular expression. Treat search \f[I]PHRASE\f[] as a regular expression.
.RS .RS
.RE .RE
.TP .TP
.B \-s, \[en]search=\f[I]PHRASE\f[] .B \-s, \-\-search=\f[I]PHRASE\f[]
Search cheatsheets for \f[I]PHRASE\f[]. Search cheatsheets for \f[I]PHRASE\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-t, \[en]tag=\f[I]TAG\f[] .B \-t, \-\-tag=\f[I]TAG\f[]
Filter only to sheets tagged with \f[I]TAG\f[]. Filter only to sheets tagged with \f[I]TAG\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-T, \[en]tags .B \-T, \-\-tags
List all tags in use. List all tags in use.
.RS .RS
.RE .RE
.TP .TP
.B \-v, \[en]version .B \-v, \-\-version
Print the version number. Print the version number.
.RS .RS
.RE .RE
.TP .TP
.B \[en]rm=\f[I]CHEATSHEET\f[] .B \-\-rm=\f[I]CHEATSHEET\f[]
Remove (deletes) \f[I]CHEATSHEET\f[]. Remove (deletes) \f[I]CHEATSHEET\f[].
.RS .RS
.RE .RE
@ -88,7 +88,7 @@ cheat \-e \f[I]foo\f[]
.RS .RS
.RE .RE
.TP .TP
.B To edit (or create) the foo/bar cheatsheet on the `work' cheatpath: .B To edit (or create) the foo/bar cheatsheet on the \[aq]work\[aq] cheatpath:
cheat \-p \f[I]work\f[] \-e \f[I]foo/bar\f[] cheat \-p \f[I]work\f[] \-e \f[I]foo/bar\f[]
.RS .RS
.RE .RE
@ -103,7 +103,7 @@ cheat \-l
.RS .RS
.RE .RE
.TP .TP
.B To list all cheatsheets whose titles match `apt': .B To list all cheatsheets whose titles match \[aq]apt\[aq]:
cheat \-l \f[I]apt\f[] cheat \-l \f[I]apt\f[]
.RS .RS
.RE .RE
@ -113,23 +113,23 @@ cheat \-T
.RS .RS
.RE .RE
.TP .TP
.B To list available cheatsheets that are tagged as `personal': .B To list available cheatsheets that are tagged as \[aq]personal\[aq]:
cheat \-l \-t \f[I]personal\f[] cheat \-l \-t \f[I]personal\f[]
.RS .RS
.RE .RE
.TP .TP
.B To search for `ssh' among all cheatsheets, and colorize matches: .B To search for \[aq]ssh\[aq] among all cheatsheets, and colorize matches:
cheat \-c \-s \f[I]ssh\f[] cheat \-c \-s \f[I]ssh\f[]
.RS .RS
.RE .RE
.TP .TP
.B To search (by regex) for cheatsheets that contain an IP address: .B To search (by regex) for cheatsheets that contain an IP address:
cheat \-c \-r \-s \f[I]`(?:[0\-9]{1,3}.){3}[0\-9]{1,3}'\f[] cheat \-c \-r \-s \f[I]\[aq](?:[0\-9]{1,3}.){3}[0\-9]{1,3}\[aq]\f[]
.RS .RS
.RE .RE
.TP .TP
.B To remove (delete) the foo/bar cheatsheet: .B To remove (delete) the foo/bar cheatsheet:
cheat \[en]rm \f[I]foo/bar\f[] cheat \-\-rm \f[I]foo/bar\f[]
.RS .RS
.RE .RE
.SH FILES .SH FILES
@ -159,15 +159,15 @@ depending upon your platform:
\f[B]cheat\f[] will search in the order specified above. \f[B]cheat\f[] will search in the order specified above.
The first \f[I]conf.yaml\f[] encountered will be respected. The first \f[I]conf.yaml\f[] encountered will be respected.
.PP .PP
If \f[B]cheat\f[] cannot locate a config file, it will ask if you'd like If \f[B]cheat\f[] cannot locate a config file, it will ask if you\[aq]d
to generate one automatically. like to generate one automatically.
Alternatively, you may also generate a config file manually by running Alternatively, you may also generate a config file manually by running
\f[B]cheat \[en]init\f[] and saving its output to the appropriate \f[B]cheat \-\-init\f[] and saving its output to the appropriate
location for your platform. location for your platform.
.SS Cheatpaths .SS Cheatpaths
.PP .PP
\f[B]cheat\f[] reads its cheatsheets from \[lq]cheatpaths\[rq], which \f[B]cheat\f[] reads its cheatsheets from "cheatpaths", which are the
are the directories in which cheatsheets are stored. directories in which cheatsheets are stored.
Cheatpaths may be configured in \f[I]conf.yaml\f[], and viewed via Cheatpaths may be configured in \f[I]conf.yaml\f[], and viewed via
\f[B]cheat \-d\f[]. \f[B]cheat \-d\f[].
.PP .PP

5
go.mod
View File

@ -3,7 +3,7 @@ module github.com/cheat/cheat
go 1.14 go 1.14
require ( require (
github.com/alecthomas/chroma v0.9.1 github.com/alecthomas/chroma v0.8.0
github.com/davecgh/go-spew v1.1.1 github.com/davecgh/go-spew v1.1.1
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815
github.com/kr/text v0.2.0 // indirect github.com/kr/text v0.2.0 // indirect
@ -11,8 +11,7 @@ require (
github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/go-homedir v1.1.0
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
github.com/sergi/go-diff v1.1.0 // indirect github.com/sergi/go-diff v1.1.0 // indirect
golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 // indirect
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0
gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v2 v2.3.0
) )

22
go.sum
View File

@ -1,7 +1,7 @@
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
github.com/alecthomas/chroma v0.9.1 h1:cBmvQqRImzR5aWqdMxYZByND4S7BCS/g0svZb28h0Dc= github.com/alecthomas/chroma v0.8.0 h1:HS+HE97sgcqjQGu5uVr8jIE55Mmh5UeQ7kckAhHg2pY=
github.com/alecthomas/chroma v0.9.1/go.mod h1:eMuEnpA18XbG/WhOWtCzJHS7WqEtDAI+HxdwoW0nVSk= github.com/alecthomas/chroma v0.8.0/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE= github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
@ -13,12 +13,14 @@ github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
@ -29,23 +31,27 @@ github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 h1:dXfMednGJh/SUUFjTLsWJz3P+TQt9qnR11GgeI3vWKs= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
@ -54,5 +60,5 @@ gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 h1:POO/ycCATvegFmVuPpQzZFJ+p
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg= gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -1,22 +0,0 @@
package config
import (
"testing"
)
// TestColor asserts that colorization rules are properly respected
func TestColor(t *testing.T) {
// mock a config
conf := Config{}
opts := map[string]interface{}{"--colorize": false}
if conf.Color(opts) {
t.Errorf("failed to respect --colorize (false)")
}
opts = map[string]interface{}{"--colorize": true}
if !conf.Color(opts) {
t.Errorf("failed to respect --colorize (true)")
}
}

View File

@ -1,38 +0,0 @@
package config
import (
"io/ioutil"
"os"
"testing"
)
// TestInit asserts that configs are properly initialized
func TestInit(t *testing.T) {
// initialize a temporary config file
confFile, err := ioutil.TempFile("", "cheat-test")
if err != nil {
t.Errorf("failed to create temp file: %v", err)
}
// clean up the temp file
defer os.Remove(confFile.Name())
// initialize the config file
conf := "mock config data"
if err = Init(confFile.Name(), conf); err != nil {
t.Errorf("failed to init config file: %v", err)
}
// read back the config file contents
bytes, err := ioutil.ReadFile(confFile.Name())
if err != nil {
t.Errorf("failed to read config file: %v", err)
}
// assert that the contents were written correctly
got := string(bytes)
if got != conf {
t.Errorf("failed to write configs: want: %s, got: %s", conf, got)
}
}

View File

@ -1,53 +0,0 @@
package config
import (
"io/ioutil"
"os"
"testing"
)
// TestPathConfigNotExists asserts that `Path` identifies non-existent config
// files
func TestPathConfigNotExists(t *testing.T) {
// package (invalid) cheatpaths
paths := []string{"/cheat-test-conf-does-not-exist"}
// assert
if _, err := Path(paths); err == nil {
t.Errorf("failed to identify non-existent config file")
}
}
// TestPathConfigExists asserts that `Path` identifies existent config files
func TestPathConfigExists(t *testing.T) {
// initialize a temporary config file
confFile, err := ioutil.TempFile("", "cheat-test")
if err != nil {
t.Errorf("failed to create temp file: %v", err)
}
// clean up the temp file
defer os.Remove(confFile.Name())
// package cheatpaths
paths := []string{
"/cheat-test-conf-does-not-exist",
confFile.Name(),
}
// assert
got, err := Path(paths)
if err != nil {
t.Errorf("failed to identify config file: %v", err)
}
if got != confFile.Name() {
t.Errorf(
"failed to return config path: want: %s, got: %s",
confFile.Name(),
got,
)
}
}

View File

@ -9,9 +9,9 @@ import (
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
) )
// Write writes output either directly to stdout, or through a pager, // Display writes output either directly to stdout, or through a pager,
// depending upon configuration. // depending upon configuration.
func Write(out string, conf config.Config) { func Display(out string, conf config.Config) {
// if no pager was configured, print the output to stdout and exit // if no pager was configured, print the output to stdout and exit
if conf.Pager == "" { if conf.Pager == "" {
fmt.Print(out) fmt.Print(out)

View File

@ -1,18 +0,0 @@
package display
import (
"fmt"
"github.com/cheat/cheat/internal/config"
)
// Faint returns an faint string
func Faint(str string, conf config.Config) string {
// make `str` faint only if colorization has been requested
if conf.Colorize {
return fmt.Sprintf(fmt.Sprintf("\033[2m%s\033[0m", str))
}
// otherwise, return the string unmodified
return str
}

View File

@ -1,27 +0,0 @@
package display
import (
"testing"
"github.com/cheat/cheat/internal/config"
)
// TestFaint asserts that Faint applies faint formatting
func TestFaint(t *testing.T) {
// case: apply colorization
conf := config.Config{Colorize: true}
want := "\033[2mfoo\033[0m"
got := Faint("foo", conf)
if want != got {
t.Errorf("failed to faint: want: %s, got: %s", want, got)
}
// case: do not apply colorization
conf.Colorize = false
want = "foo"
got = Faint("foo", conf)
if want != got {
t.Errorf("failed to faint: want: %s, got: %s", want, got)
}
}

View File

@ -1,21 +0,0 @@
package display
import (
"fmt"
"strings"
)
// Indent prepends each line of a string with a tab
func Indent(str string) string {
// trim superfluous whitespace
str = strings.TrimSpace(str)
// prepend each line with a tab character
out := ""
for _, line := range strings.Split(str, "\n") {
out += fmt.Sprintf("\t%s\n", line)
}
return out
}

View File

@ -1,12 +0,0 @@
package display
import "testing"
// TestIndent asserts that Indent prepends a tab to each line
func TestIndent(t *testing.T) {
got := Indent("foo\nbar\nbaz")
want := "\tfoo\n\tbar\n\tbaz\n"
if got != want {
t.Errorf("failed to indent: want: %s, got: %s", want, got)
}
}

View File

@ -1,8 +0,0 @@
package display
import "fmt"
// Underline returns an underlined string
func Underline(str string) string {
return fmt.Sprintf(fmt.Sprintf("\033[4m%s\033[0m", str))
}

View File

@ -1,14 +0,0 @@
package display
import (
"testing"
)
// TestUnderline asserts that Underline applies underline formatting
func TestUnderline(t *testing.T) {
want := "\033[4mfoo\033[0m"
got := Underline("foo")
if want != got {
t.Errorf("failed to underline: want: %s, got: %s", want, got)
}
}

View File

@ -8,8 +8,8 @@ import (
const cloneURL = "https://github.com/cheat/cheatsheets.git" const cloneURL = "https://github.com/cheat/cheatsheets.git"
// clone clones the community cheatsheets // Clone clones the community cheatsheets
func clone(path string) error { func Clone(path string) error {
// perform the clone in a shell // perform the clone in a shell
cmd := exec.Command("git", "clone", cloneURL, path) cmd := exec.Command("git", "clone", cloneURL, path)

View File

@ -1,55 +0,0 @@
package installer
import (
"fmt"
"os"
"path"
"strings"
"github.com/cheat/cheat/internal/config"
)
// Run runs the installer
func Run(configs string, confpath string) error {
// determine the appropriate paths for config data and (optional) community
// cheatsheets based on the user's platform
confdir := path.Dir(confpath)
// create paths for community and personal cheatsheets
community := path.Join(confdir, "/cheatsheets/community")
personal := path.Join(confdir, "/cheatsheets/personal")
// template the above paths into the default configs
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
// prompt the user to download the community cheatsheets
yes, err := Prompt(
"Would you like to download the community cheatsheets? [Y/n]",
true,
)
if err != nil {
return fmt.Errorf("failed to prompt: %v", err)
}
// clone the community cheatsheets if so instructed
if yes {
// clone the community cheatsheets
if err := clone(community); err != nil {
return fmt.Errorf("failed to clone cheatsheets: %v", err)
}
// also create a directory for personal cheatsheets
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
return fmt.Errorf("failed to create directory: %v", err)
}
}
// the config file does not exist, so we'll try to create one
if err = config.Init(confpath, configs); err != nil {
return fmt.Errorf("failed to create config file: %v", err)
}
return nil
}

View File

@ -13,7 +13,7 @@ func Path(filename string) string {
// determine the path of this file during runtime // determine the path of this file during runtime
_, thisfile, _, _ := runtime.Caller(0) _, thisfile, _, _ := runtime.Caller(0)
// compute the mock path // compute the config path
file, err := filepath.Abs( file, err := filepath.Abs(
path.Join( path.Join(
filepath.Dir(thisfile), filepath.Dir(thisfile),
@ -22,7 +22,7 @@ func Path(filename string) string {
), ),
) )
if err != nil { if err != nil {
panic(fmt.Errorf("failed to resolve mock path: %v", err)) panic(fmt.Errorf("failed to resolve config path: %v", err))
} }
return file return file

View File

@ -1,34 +0,0 @@
package sheet
import (
"testing"
"github.com/cheat/cheat/internal/config"
)
// TestColorize asserts that syntax-highlighting is correctly applied
func TestColorize(t *testing.T) {
// mock configs
conf := config.Config{
Formatter: "terminal16m",
Style: "solarized-dark",
}
// mock a sheet
s := Sheet{
Text: "echo 'foo'",
}
// colorize the sheet text
s.Colorize(conf)
// initialize expectations
want := "echo"
want += " 'foo'"
// assert
if s.Text != want {
t.Errorf("failed to colorize sheet: want: %s, got: %s", want, s.Text)
}
}

View File

@ -25,7 +25,7 @@ func TestCopyFlat(t *testing.T) {
} }
// mock a cheatsheet struct // mock a cheatsheet struct
sheet, err := New("foo", "community", src.Name(), []string{}, false) sheet, err := New("foo", src.Name(), []string{}, false)
if err != nil { if err != nil {
t.Errorf("failed to init cheatsheet: %v", err) t.Errorf("failed to init cheatsheet: %v", err)
} }
@ -72,13 +72,7 @@ func TestCopyDeep(t *testing.T) {
} }
// mock a cheatsheet struct // mock a cheatsheet struct
sheet, err := New( sheet, err := New("/cheat-tests/alpha/bravo/foo", src.Name(), []string{}, false)
"/cheat-tests/alpha/bravo/foo",
"community",
src.Name(),
[]string{},
false,
)
if err != nil { if err != nil {
t.Errorf("failed to init cheatsheet: %v", err) t.Errorf("failed to init cheatsheet: %v", err)
} }

View File

@ -10,19 +10,17 @@ import (
// Sheet encapsulates sheet information // Sheet encapsulates sheet information
type Sheet struct { type Sheet struct {
Title string Title string
CheatPath string Path string
Path string Text string
Text string Tags []string
Tags []string Syntax string
Syntax string ReadOnly bool
ReadOnly bool
} }
// New initializes a new Sheet // New initializes a new Sheet
func New( func New(
title string, title string,
cheatpath string,
path string, path string,
tags []string, tags []string,
readOnly bool, readOnly bool,
@ -48,12 +46,11 @@ func New(
// initialize and return a sheet // initialize and return a sheet
return Sheet{ return Sheet{
Title: title, Title: title,
CheatPath: cheatpath, Path: path,
Path: path, Text: text + "\n",
Text: text + "\n", Tags: tags,
Tags: tags, Syntax: fm.Syntax,
Syntax: fm.Syntax, ReadOnly: readOnly,
ReadOnly: readOnly,
}, nil }, nil
} }

View File

@ -13,7 +13,6 @@ func TestSheetSuccess(t *testing.T) {
// initialize a sheet // initialize a sheet
sheet, err := New( sheet, err := New(
"foo", "foo",
"community",
mock.Path("sheet/foo"), mock.Path("sheet/foo"),
[]string{"alpha", "bravo"}, []string{"alpha", "bravo"},
false, false,
@ -62,7 +61,6 @@ func TestSheetFailure(t *testing.T) {
// initialize a sheet // initialize a sheet
_, err := New( _, err := New(
"foo", "foo",
"community",
mock.Path("/does-not-exist"), mock.Path("/does-not-exist"),
[]string{"alpha", "bravo"}, []string{"alpha", "bravo"},
false, false,
@ -71,20 +69,3 @@ func TestSheetFailure(t *testing.T) {
t.Errorf("failed to return an error on unreadable sheet") t.Errorf("failed to return an error on unreadable sheet")
} }
} }
// TestSheetFrontMatterFailure asserts that an error is returned if the sheet's
// frontmatter cannot be parsed.
func TestSheetFrontMatterFailure(t *testing.T) {
// initialize a sheet
_, err := New(
"foo",
"community",
mock.Path("sheet/bad-fm"),
[]string{"alpha", "bravo"},
false,
)
if err == nil {
t.Errorf("failed to return an error on malformed front-matter")
}
}

View File

@ -59,13 +59,7 @@ func Load(cheatpaths []cp.Cheatpath) ([]map[string]sheet.Sheet, error) {
} }
// parse the cheatsheet file into a `sheet` struct // parse the cheatsheet file into a `sheet` struct
s, err := sheet.New( s, err := sheet.New(title, path, cheatpath.Tags, cheatpath.ReadOnly)
title,
cheatpath.Name,
path,
cheatpath.Tags,
cheatpath.ReadOnly,
)
if err != nil { if err != nil {
return fmt.Errorf( return fmt.Errorf(
"failed to load sheet: %s, path: %s, err: %v", "failed to load sheet: %s, path: %s, err: %v",

View File

@ -1,62 +1,3 @@
package sheets package sheets
import ( // TODO
"path"
"testing"
"github.com/cheat/cheat/internal/cheatpath"
"github.com/cheat/cheat/internal/mock"
)
// TestLoad asserts that sheets on valid cheatpaths can be loaded successfully
func TestLoad(t *testing.T) {
// mock cheatpaths
cheatpaths := []cheatpath.Cheatpath{
{
Name: "community",
Path: path.Join(mock.Path("cheatsheets"), "community"),
ReadOnly: true,
},
{
Name: "personal",
Path: path.Join(mock.Path("cheatsheets"), "personal"),
ReadOnly: false,
},
}
// load cheatsheets
sheets, err := Load(cheatpaths)
if err != nil {
t.Errorf("failed to load cheatsheets: %v", err)
}
// assert that the correct number of sheets loaded
// (sheet load details are tested in `sheet_test.go`)
want := 4
if len(sheets) != want {
t.Errorf(
"failed to load correct number of cheatsheets: want: %d, got: %d",
want,
len(sheets),
)
}
}
// TestLoadBadPath asserts that an error is returned if a cheatpath is invalid
func TestLoadBadPath(t *testing.T) {
// mock a bad cheatpath
cheatpaths := []cheatpath.Cheatpath{
{
Name: "badpath",
Path: "/cheat/test/path/does/not/exist",
ReadOnly: true,
},
}
// attempt to load the cheatpath
if _, err := Load(cheatpaths); err == nil {
t.Errorf("failed to reject invalid cheatpath")
}
}

View File

@ -1,4 +0,0 @@
---
tags: [ community ]
---
This is the bar cheatsheet.

View File

@ -1,4 +0,0 @@
---
tags: [ community ]
---
This is the foo cheatsheet.

View File

@ -1,4 +0,0 @@
---
tags: [ personal ]
---
This is the bat cheatsheet.

View File

@ -1,4 +0,0 @@
---
tags: [ personal ]
---
This is the baz cheatsheet.

View File

@ -1,4 +0,0 @@
---
syntax: sh
This is malformed frontmatter.

View File

@ -6,8 +6,6 @@ release:
brews: brews:
- -
install: bin.install "chroma" install: bin.install "chroma"
env:
- CGO_ENABLED=0
builds: builds:
- goos: - goos:
- linux - linux

12
vendor/github.com/alecthomas/chroma/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,12 @@
sudo: false
language: go
go:
- "1.13.x"
script:
- go test -v ./...
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
- ./bin/golangci-lint run
- git clean -fdx .
after_success:
curl -sL https://git.io/goreleaser | bash && goreleaser

View File

@ -1,7 +1,5 @@
.PHONY: chromad upload all .PHONY: chromad upload all
VERSION ?= $(shell git describe --tags --dirty --always)
all: README.md tokentype_string.go all: README.md tokentype_string.go
README.md: lexers/*/*.go README.md: lexers/*/*.go
@ -11,8 +9,10 @@ tokentype_string.go: types.go
go generate go generate
chromad: chromad:
(cd ./cmd/chromad && go get github.com/GeertJohan/go.rice/rice@master && go install github.com/GeertJohan/go.rice/rice)
rm -f chromad rm -f chromad
(export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .) (export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -o ../../chromad .)
rice append -i ./cmd/chromad --exec=./chromad
upload: chromad upload: chromad
scp chromad root@swapoff.org: && \ scp chromad root@swapoff.org: && \

View File

@ -1,4 +1,4 @@
# Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CircleCI](https://img.shields.io/circleci/project/github/alecthomas/chroma.svg)](https://circleci.com/gh/alecthomas/chroma) [![Go Report Card](https://goreportcard.com/badge/github.com/alecthomas/chroma)](https://goreportcard.com/report/github.com/alecthomas/chroma) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/) # Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![Build Status](https://travis-ci.org/alecthomas/chroma.svg)](https://travis-ci.org/alecthomas/chroma) [![Gitter chat](https://badges.gitter.im/alecthomas.svg)](https://gitter.im/alecthomas/Lobby)
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly. > **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
@ -36,30 +36,29 @@ translators for Pygments lexers and styles.
Prefix | Language Prefix | Language
:----: | -------- :----: | --------
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, BlitzBasic, BNF, Brainfuck B | Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, BNF, Brainfuck
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython C | C, C#, C++, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan D | D, Dart, Diff, Django/Jinja, Docker, DTD
E | EBNF, Elixir, Elm, EmacsLisp, Erlang E | EBNF, Elixir, Elm, EmacsLisp, Erlang
F | Factor, Fish, Forth, Fortran, FSharp F | Factor, Fish, Forth, Fortran, FSharp
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy H | Handlebars, Haskell, Haxe, HCL, Hexdump, HTML, HTTP, Hy
I | Idris, Igor, INI, Io I | Idris, INI, Io
J | J, Java, JavaScript, JSON, Julia, Jungle J | J, Java, JavaScript, JSON, Julia, Jungle
K | Kotlin K | Kotlin
L | Lighttpd configuration file, LLVM, Lua L | Lighttpd configuration file, LLVM, Lua
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
N | NASM, Newspeak, Nginx configuration file, Nim, Nix N | NASM, Newspeak, Nginx configuration file, Nim, Nix
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python, Python 3 P | PacmanConf, Perl, PHP, Pig, PkgConfig, PL/pgSQL, plaintext, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3
Q | QBasic Q | QBasic
R | R, Racket, Ragel, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust R | R, Racket, Ragel, react, reg, reStructuredText, Rexx, Ruby, Rust
S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Swift, SYSTEMD, systemverilog S | Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, SML, Snobol, Solidity, SPARQL, SQL, SquidConf, Swift, SYSTEMD, systemverilog
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
V | VB.net, verilog, VHDL, VimL, vue V | VB.net, verilog, VHDL, VimL, vue
W | WDTE W | WDTE
X | XML, Xorg X | XML, Xorg
Y | YAML, YANG Y | YAML
Z | Zig
_I will attempt to keep this section up to date, but an authoritative list can be _I will attempt to keep this section up to date, but an authoritative list can be
@ -184,7 +183,7 @@ following constructor options:
- `ClassPrefix(prefix)` - prefix each generated CSS class. - `ClassPrefix(prefix)` - prefix each generated CSS class.
- `TabWidth(width)` - Set the rendered tab width, in characters. - `TabWidth(width)` - Set the rendered tab width, in characters.
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). - `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
- `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves. - `LinkableLineNumbers()` - Make the line numbers linkable.
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). - `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. - `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
@ -216,7 +215,7 @@ python3 ~/Projects/chroma/_tools/pygments2chroma.py \
&& gofmt -s -w ~/Projects/chroma/lexers/*.go && gofmt -s -w ~/Projects/chroma/lexers/*.go
``` ```
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) See notes in [pygments-lexers.go](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
for a list of lexers, and notes on some of the issues importing them. for a list of lexers, and notes on some of the issues importing them.
<a id="markdown-formatters" name="formatters"></a> <a id="markdown-formatters" name="formatters"></a>

View File

@ -211,7 +211,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight)) fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
} }
fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line)) fmt.Fprintf(w, "<span%s%s>%*d\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), lineDigits, line)
if highlight { if highlight {
fmt.Fprintf(w, "</span>") fmt.Fprintf(w, "</span>")
@ -237,7 +237,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
} }
if f.lineNumbers && !wrapInTable { if f.lineNumbers && !wrapInTable {
fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line)) fmt.Fprintf(w, "<span%s%s>%*d</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), lineDigits, line)
} }
for _, token := range tokens { for _, token := range tokens {
@ -272,19 +272,7 @@ func (f *Formatter) lineIDAttribute(line int) string {
if !f.linkableLineNumbers { if !f.linkableLineNumbers {
return "" return ""
} }
return fmt.Sprintf(" id=\"%s\"", f.lineID(line)) return fmt.Sprintf(" id=\"%s%d\"", f.lineNumbersIDPrefix, line)
}
func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string {
title := fmt.Sprintf("%*d", lineDigits, line)
if !f.linkableLineNumbers {
return title
}
return fmt.Sprintf("<a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#%s\">%s</a>", f.lineID(line), title)
}
func (f *Formatter) lineID(line int) string {
return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line)
} }
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) { func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {

View File

@ -17,20 +17,6 @@ var c = chroma.MustParseColour
var ttyTables = map[int]*ttyTable{ var ttyTables = map[int]*ttyTable{
8: { 8: {
foreground: map[chroma.Colour]string{
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
c("#555555"): "\033[1m\033[30m", c("#ff0000"): "\033[1m\033[31m", c("#00ff00"): "\033[1m\033[32m", c("#ffff00"): "\033[1m\033[33m",
c("#0000ff"): "\033[1m\033[34m", c("#ff00ff"): "\033[1m\033[35m", c("#00ffff"): "\033[1m\033[36m", c("#ffffff"): "\033[1m\033[37m",
},
background: map[chroma.Colour]string{
c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
c("#555555"): "\033[1m\033[40m", c("#ff0000"): "\033[1m\033[41m", c("#00ff00"): "\033[1m\033[42m", c("#ffff00"): "\033[1m\033[43m",
c("#0000ff"): "\033[1m\033[44m", c("#ff00ff"): "\033[1m\033[45m", c("#00ffff"): "\033[1m\033[46m", c("#ffffff"): "\033[1m\033[47m",
},
},
16: {
foreground: map[chroma.Colour]string{ foreground: map[chroma.Colour]string{
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m", c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m", c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
@ -241,11 +227,15 @@ type indexedTTYFormatter struct {
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) { func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
theme := styleToEscapeSequence(c.table, style) theme := styleToEscapeSequence(c.table, style)
for token := it(); token != chroma.EOF; token = it() { for token := it(); token != chroma.EOF; token = it() {
// TODO: Cache token lookups?
clr, ok := theme[token.Type] clr, ok := theme[token.Type]
if !ok { if !ok {
clr, ok = theme[token.Type.SubCategory()] clr, ok = theme[token.Type.SubCategory()]
if !ok { if !ok {
clr = theme[token.Type.Category()] clr = theme[token.Type.Category()]
// if !ok {
// clr = theme[chroma.InheritStyle]
// }
} }
} }
if clr != "" { if clr != "" {
@ -259,22 +249,10 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma
return nil return nil
} }
// TTY is an 8-colour terminal formatter.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
// TTY8 is an 8-colour terminal formatter. // TTY8 is an 8-colour terminal formatter.
// //
// The Lab colour space is used to map RGB values to the most appropriate index colour. // The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY8 = Register("terminal8", &indexedTTYFormatter{ttyTables[8]}) var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
// TTY16 is a 16-colour terminal formatter.
//
// It uses \033[3xm for normal colours and \033[90Xm for bright colours.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY16 = Register("terminal16", &indexedTTYFormatter{ttyTables[16]})
// TTY256 is a 256-colour terminal formatter. // TTY256 is a 256-colour terminal formatter.
// //

View File

@ -8,7 +8,7 @@ require (
github.com/alecthomas/kong v0.2.4 github.com/alecthomas/kong v0.2.4
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
github.com/dlclark/regexp2 v1.4.0 github.com/dlclark/regexp2 v1.2.0
github.com/mattn/go-colorable v0.1.6 github.com/mattn/go-colorable v0.1.6
github.com/mattn/go-isatty v0.0.12 github.com/mattn/go-isatty v0.0.12
github.com/pkg/errors v0.9.1 // indirect github.com/pkg/errors v0.9.1 // indirect

View File

@ -13,8 +13,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk= github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=

View File

@ -4,7 +4,7 @@ import "strings"
// An Iterator across tokens. // An Iterator across tokens.
// //
// EOF will be returned at the end of the Token stream. // nil will be returned at the end of the Token stream.
// //
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover. // If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
type Iterator func() Token type Iterator func() Token

View File

@ -6,7 +6,7 @@ import (
) )
// ABAP lexer. // ABAP lexer.
var Abap = internal.Register(MustNewLazyLexer( var Abap = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "ABAP", Name: "ABAP",
Aliases: []string{"abap"}, Aliases: []string{"abap"},
@ -14,11 +14,7 @@ var Abap = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-abap"}, MimeTypes: []string{"text/x-abap"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
abapRules, Rules{
))
func abapRules() Rules {
return Rules{
"common": { "common": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`^\*.*$`, CommentSingle, nil}, {`^\*.*$`, CommentSingle, nil},
@ -56,5 +52,5 @@ func abapRules() Rules {
{`[/;:()\[\],.]`, Punctuation, nil}, {`[/;:()\[\],.]`, Punctuation, nil},
{`(!)(\w+)`, ByGroups(Operator, Name), nil}, {`(!)(\w+)`, ByGroups(Operator, Name), nil},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Abnf lexer. // Abnf lexer.
var Abnf = internal.Register(MustNewLazyLexer( var Abnf = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "ABNF", Name: "ABNF",
Aliases: []string{"abnf"}, Aliases: []string{"abnf"},
Filenames: []string{"*.abnf"}, Filenames: []string{"*.abnf"},
MimeTypes: []string{"text/x-abnf"}, MimeTypes: []string{"text/x-abnf"},
}, },
abnfRules, Rules{
))
func abnfRules() Rules {
return Rules{
"root": { "root": {
{`;.*$`, CommentSingle, nil}, {`;.*$`, CommentSingle, nil},
{`(%[si])?"[^"]*"`, Literal, nil}, {`(%[si])?"[^"]*"`, Literal, nil},
@ -38,5 +34,5 @@ func abnfRules() Rules {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`.`, Text, nil}, {`.`, Text, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Actionscript lexer. // Actionscript lexer.
var Actionscript = internal.Register(MustNewLazyLexer( var Actionscript = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "ActionScript", Name: "ActionScript",
Aliases: []string{"as", "actionscript"}, Aliases: []string{"as", "actionscript"},
@ -15,11 +15,7 @@ var Actionscript = internal.Register(MustNewLazyLexer(
NotMultiline: true, NotMultiline: true,
DotAll: true, DotAll: true,
}, },
actionscriptRules, Rules{
))
func actionscriptRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`//.*?\n`, CommentSingle, nil}, {`//.*?\n`, CommentSingle, nil},
@ -39,5 +35,5 @@ func actionscriptRules() Rules {
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Actionscript 3 lexer. // Actionscript 3 lexer.
var Actionscript3 = internal.Register(MustNewLazyLexer( var Actionscript3 = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "ActionScript 3", Name: "ActionScript 3",
Aliases: []string{"as3", "actionscript3"}, Aliases: []string{"as3", "actionscript3"},
@ -14,11 +14,7 @@ var Actionscript3 = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"}, MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"},
DotAll: true, DotAll: true,
}, },
actionscript3Rules, Rules{
))
func actionscript3Rules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")}, {`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")},
@ -56,5 +52,5 @@ func actionscript3Rules() Rules {
{`,`, Operator, Pop(1)}, {`,`, Operator, Pop(1)},
Default(Pop(1)), Default(Pop(1)),
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Ada lexer. // Ada lexer.
var Ada = internal.Register(MustNewLazyLexer( var Ada = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Ada", Name: "Ada",
Aliases: []string{"ada", "ada95", "ada2005"}, Aliases: []string{"ada", "ada95", "ada2005"},
@ -14,11 +14,7 @@ var Ada = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-ada"}, MimeTypes: []string{"text/x-ada"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
adaRules, Rules{
))
func adaRules() Rules {
return Rules{
"root": { "root": {
{`[^\S\n]+`, Text, nil}, {`[^\S\n]+`, Text, nil},
{`--.*?\n`, CommentSingle, nil}, {`--.*?\n`, CommentSingle, nil},
@ -114,5 +110,5 @@ func adaRules() Rules {
{`\)`, Punctuation, Pop(1)}, {`\)`, Punctuation, Pop(1)},
Include("root"), Include("root"),
}, },
} },
} ))

View File

@ -1,48 +0,0 @@
package a
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Al lexer.
var Al = internal.Register(MustNewLazyLexer(
&Config{
Name: "AL",
Aliases: []string{"al"},
Filenames: []string{"*.al", "*.dal"},
MimeTypes: []string{"text/x-al"},
DotAll: true,
CaseInsensitive: true,
},
alRules,
))
// https://github.com/microsoft/AL/blob/master/grammar/alsyntax.tmlanguage
func alRules() Rules {
return Rules{
"root": {
{`\s+`, TextWhitespace, nil},
{`(?s)\/\*.*?\\*\*\/`, CommentMultiline, nil},
{`(?s)//.*?\n`, CommentSingle, nil},
{`\"([^\"])*\"`, Text, nil},
{`'([^'])*'`, LiteralString, nil},
{`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil},
{`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil},
{`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil},
// Added new objects types of BC 2021 wave 1 (REPORTEXTENSION|Entitlement|PermissionSet|PermissionSetExtension)
{`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|Entitlement|PermissionSet|PermissionSetExtension))\b`, Keyword, nil},
{`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil},
{`\b([<>]=|<>|<|>)\b?`, Operator, nil},
{`\b(\-|\+|\/|\*)\b`, Operator, nil},
{`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil},
{`\b(?i:(ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil},
{`\s*[(\.\.)&\|]\s*`, Operator, nil},
{`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil},
{`[;:,]`, Punctuation, nil},
{`#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
{`\w+`, Text, nil},
{`.`, Text, nil},
},
}
}

View File

@ -6,18 +6,14 @@ import (
) )
// Angular2 lexer. // Angular2 lexer.
var Angular2 = internal.Register(MustNewLazyLexer( var Angular2 = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Angular2", Name: "Angular2",
Aliases: []string{"ng2"}, Aliases: []string{"ng2"},
Filenames: []string{}, Filenames: []string{},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
angular2Rules, Rules{
))
func angular2Rules() Rules {
return Rules{
"root": { "root": {
{`[^{([*#]+`, Other, nil}, {`[^{([*#]+`, Other, nil},
{`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")}, {`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")},
@ -42,5 +38,5 @@ func angular2Rules() Rules {
{`'.*?'`, LiteralString, Pop(1)}, {`'.*?'`, LiteralString, Pop(1)},
{`[^\s>]+`, LiteralString, Pop(1)}, {`[^\s>]+`, LiteralString, Pop(1)},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// ANTLR lexer. // ANTLR lexer.
var ANTLR = internal.Register(MustNewLazyLexer( var ANTLR = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "ANTLR", Name: "ANTLR",
Aliases: []string{"antlr"}, Aliases: []string{"antlr"},
Filenames: []string{}, Filenames: []string{},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
antlrRules, Rules{
))
func antlrRules() Rules {
return Rules{
"whitespace": { "whitespace": {
{`\s+`, TextWhitespace, nil}, {`\s+`, TextWhitespace, nil},
}, },
@ -101,5 +97,5 @@ func antlrRules() Rules {
{`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, {`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil},
{`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil}, {`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Apacheconf lexer. // Apacheconf lexer.
var Apacheconf = internal.Register(MustNewLazyLexer( var Apacheconf = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "ApacheConf", Name: "ApacheConf",
Aliases: []string{"apacheconf", "aconf", "apache"}, Aliases: []string{"apacheconf", "aconf", "apache"},
@ -14,11 +14,7 @@ var Apacheconf = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-apacheconf"}, MimeTypes: []string{"text/x-apacheconf"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
apacheconfRules, Rules{
))
func apacheconfRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`(#.*?)$`, Comment, nil}, {`(#.*?)$`, Comment, nil},
@ -38,5 +34,5 @@ func apacheconfRules() Rules {
{`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, {`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil},
{`[^\s"\\]+`, Text, nil}, {`[^\s"\\]+`, Text, nil},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Apl lexer. // Apl lexer.
var Apl = internal.Register(MustNewLazyLexer( var Apl = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "APL", Name: "APL",
Aliases: []string{"apl"}, Aliases: []string{"apl"},
Filenames: []string{"*.apl"}, Filenames: []string{"*.apl"},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
aplRules, Rules{
))
func aplRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`[⍝#].*$`, CommentSingle, nil}, {`[⍝#].*$`, CommentSingle, nil},
@ -36,5 +32,5 @@ func aplRules() Rules {
{`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil}, {`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil},
{`[{}]`, KeywordType, nil}, {`[{}]`, KeywordType, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Applescript lexer. // Applescript lexer.
var Applescript = internal.Register(MustNewLazyLexer( var Applescript = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "AppleScript", Name: "AppleScript",
Aliases: []string{"applescript"}, Aliases: []string{"applescript"},
@ -14,11 +14,7 @@ var Applescript = internal.Register(MustNewLazyLexer(
MimeTypes: []string{}, MimeTypes: []string{},
DotAll: true, DotAll: true,
}, },
applescriptRules, Rules{
))
func applescriptRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`¬\n`, LiteralStringEscape, nil}, {`¬\n`, LiteralStringEscape, nil},
@ -55,5 +51,5 @@ func applescriptRules() Rules {
{`[^*(]+`, CommentMultiline, nil}, {`[^*(]+`, CommentMultiline, nil},
{`[*(]`, CommentMultiline, nil}, {`[*(]`, CommentMultiline, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Arduino lexer. // Arduino lexer.
var Arduino = internal.Register(MustNewLazyLexer( var Arduino = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Arduino", Name: "Arduino",
Aliases: []string{"arduino"}, Aliases: []string{"arduino"},
@ -14,11 +14,7 @@ var Arduino = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-arduino"}, MimeTypes: []string{"text/x-arduino"},
EnsureNL: true, EnsureNL: true,
}, },
arduinoRules, Rules{
))
func arduinoRules() Rules {
return Rules{
"statements": { "statements": {
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil}, {Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil},
{`char(16_t|32_t)\b`, KeywordType, nil}, {`char(16_t|32_t)\b`, KeywordType, nil},
@ -110,5 +106,5 @@ func arduinoRules() Rules {
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)}, {`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil}, {`.*?\n`, Comment, nil},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Awk lexer. // Awk lexer.
var Awk = internal.Register(MustNewLazyLexer( var Awk = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Awk", Name: "Awk",
Aliases: []string{"awk", "gawk", "mawk", "nawk"}, Aliases: []string{"awk", "gawk", "mawk", "nawk"},
Filenames: []string{"*.awk"}, Filenames: []string{"*.awk"},
MimeTypes: []string{"application/x-awk"}, MimeTypes: []string{"application/x-awk"},
}, },
awkRules, Rules{
))
func awkRules() Rules {
return Rules{
"commentsandwhitespace": { "commentsandwhitespace": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`#.*$`, CommentSingle, nil}, {`#.*$`, CommentSingle, nil},
@ -34,19 +30,19 @@ func awkRules() Rules {
"root": { "root": {
{`^(?=\s|/)`, Text, Push("slashstartsregex")}, {`^(?=\s|/)`, Text, Push("slashstartsregex")},
Include("commentsandwhitespace"), Include("commentsandwhitespace"),
{`\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, {`\+\+|--|\|\||&&|in\b|\$|!?~|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, {`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
{`[})\].]`, Punctuation, nil}, {`[})\].]`, Punctuation, nil},
{`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")}, {`(break|continue|do|while|exit|for|if|else|return)\b`, Keyword, Push("slashstartsregex")},
{`function\b`, KeywordDeclaration, Push("slashstartsregex")}, {`function\b`, KeywordDeclaration, Push("slashstartsregex")},
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil}, {`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next|nextfile|print|printf|strftime|systime|delete|system)\b`, KeywordReserved, nil},
{`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil}, {`(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|SUBSEP)\b`, NameBuiltin, nil},
{`[@$a-zA-Z_]\w*`, NameOther, nil}, {`[$a-zA-Z_]\w*`, NameOther, nil},
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`[0-9]+`, LiteralNumberInteger, nil}, {`[0-9]+`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Ballerina lexer. // Ballerina lexer.
var Ballerina = internal.Register(MustNewLazyLexer( var Ballerina = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Ballerina", Name: "Ballerina",
Aliases: []string{"ballerina"}, Aliases: []string{"ballerina"},
@ -14,11 +14,7 @@ var Ballerina = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-ballerina"}, MimeTypes: []string{"text/x-ballerina"},
DotAll: true, DotAll: true,
}, },
ballerinaRules, Rules{
))
func ballerinaRules() Rules {
return Rules{
"root": { "root": {
{`[^\S\n]+`, Text, nil}, {`[^\S\n]+`, Text, nil},
{`//.*?\n`, CommentSingle, nil}, {`//.*?\n`, CommentSingle, nil},
@ -46,5 +42,5 @@ func ballerinaRules() Rules {
"import": { "import": {
{`[\w.]+`, NameNamespace, Pop(1)}, {`[\w.]+`, NameNamespace, Pop(1)},
}, },
} },
} ))

View File

@ -7,27 +7,17 @@ import (
"github.com/alecthomas/chroma/lexers/internal" "github.com/alecthomas/chroma/lexers/internal"
) )
// TODO(moorereason): can this be factored away?
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`) var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`)
// Bash lexer. // Bash lexer.
var Bash = internal.Register(MustNewLazyLexer( var Bash = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Bash", Name: "Bash",
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"}, Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"},
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"}, Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
MimeTypes: []string{"application/x-sh", "application/x-shellscript"}, MimeTypes: []string{"application/x-sh", "application/x-shellscript"},
}, },
bashRules, Rules{
).SetAnalyser(func(text string) float32 {
if bashAnalyserRe.FindString(text) != "" {
return 1.0
}
return 0.0
}))
func bashRules() Rules {
return Rules{
"root": { "root": {
Include("basic"), Include("basic"),
{"`", LiteralStringBacktick, Push("backticks")}, {"`", LiteralStringBacktick, Push("backticks")},
@ -96,5 +86,10 @@ func bashRules() Rules {
{"`", LiteralStringBacktick, Pop(1)}, {"`", LiteralStringBacktick, Pop(1)},
Include("root"), Include("root"),
}, },
},
).SetAnalyser(func(text string) float32 {
if bashAnalyserRe.FindString(text) != "" {
return 1.0
} }
} return 0.0
}))

View File

@ -6,7 +6,7 @@ import (
) )
// Batchfile lexer. // Batchfile lexer.
var Batchfile = internal.Register(MustNewLazyLexer( var Batchfile = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Batchfile", Name: "Batchfile",
Aliases: []string{"bat", "batch", "dosbatch", "winbatch"}, Aliases: []string{"bat", "batch", "dosbatch", "winbatch"},
@ -14,11 +14,7 @@ var Batchfile = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"application/x-dos-batch"}, MimeTypes: []string{"application/x-dos-batch"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
batchfileRules, Rules{
))
func batchfileRules() Rules {
return Rules{
"root": { "root": {
{`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil}, {`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil},
{`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")}, {`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")},
@ -194,5 +190,5 @@ func batchfileRules() Rules {
{`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)}, {`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)},
Default(Pop(1)), Default(Pop(1)),
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Bibtex lexer. // Bibtex lexer.
var Bibtex = internal.Register(MustNewLazyLexer( var Bibtex = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "BibTeX", Name: "BibTeX",
Aliases: []string{"bib", "bibtex"}, Aliases: []string{"bib", "bibtex"},
@ -15,11 +15,7 @@ var Bibtex = internal.Register(MustNewLazyLexer(
NotMultiline: true, NotMultiline: true,
CaseInsensitive: true, CaseInsensitive: true,
}, },
bibtexRules, Rules{
))
func bibtexRules() Rules {
return Rules{
"root": { "root": {
Include("whitespace"), Include("whitespace"),
{`@comment`, Comment, nil}, {`@comment`, Comment, nil},
@ -76,5 +72,5 @@ func bibtexRules() Rules {
"whitespace": { "whitespace": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Blitzbasic lexer. // Blitzbasic lexer.
var Blitzbasic = internal.Register(MustNewLazyLexer( var Blitzbasic = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "BlitzBasic", Name: "BlitzBasic",
Aliases: []string{"blitzbasic", "b3d", "bplus"}, Aliases: []string{"blitzbasic", "b3d", "bplus"},
@ -14,11 +14,7 @@ var Blitzbasic = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-bb"}, MimeTypes: []string{"text/x-bb"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
blitzbasicRules, Rules{
))
func blitzbasicRules() Rules {
return Rules{
"root": { "root": {
{`[ \t]+`, Text, nil}, {`[ \t]+`, Text, nil},
{`;.*?\n`, CommentSingle, nil}, {`;.*?\n`, CommentSingle, nil},
@ -48,5 +44,5 @@ func blitzbasicRules() Rules {
{`"C?`, LiteralStringDouble, Pop(1)}, {`"C?`, LiteralStringDouble, Pop(1)},
{`[^"]+`, LiteralStringDouble, nil}, {`[^"]+`, LiteralStringDouble, nil},
}, },
} },
} ))

View File

@ -6,23 +6,19 @@ import (
) )
// Bnf lexer. // Bnf lexer.
var Bnf = internal.Register(MustNewLazyLexer( var Bnf = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "BNF", Name: "BNF",
Aliases: []string{"bnf"}, Aliases: []string{"bnf"},
Filenames: []string{"*.bnf"}, Filenames: []string{"*.bnf"},
MimeTypes: []string{"text/x-bnf"}, MimeTypes: []string{"text/x-bnf"},
}, },
bnfRules, Rules{
))
func bnfRules() Rules {
return Rules{
"root": { "root": {
{`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil}, {`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil},
{`::=`, Operator, nil}, {`::=`, Operator, nil},
{`[^<>:]+`, Text, nil}, {`[^<>:]+`, Text, nil},
{`.`, Text, nil}, {`.`, Text, nil},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Brainfuck lexer. // Brainfuck lexer.
var Brainfuck = internal.Register(MustNewLazyLexer( var Brainfuck = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Brainfuck", Name: "Brainfuck",
Aliases: []string{"brainfuck", "bf"}, Aliases: []string{"brainfuck", "bf"},
Filenames: []string{"*.bf", "*.b"}, Filenames: []string{"*.bf", "*.b"},
MimeTypes: []string{"application/x-brainfuck"}, MimeTypes: []string{"application/x-brainfuck"},
}, },
brainfuckRules, Rules{
))
func brainfuckRules() Rules {
return Rules{
"common": { "common": {
{`[.,]+`, NameTag, nil}, {`[.,]+`, NameTag, nil},
{`[+-]+`, NameBuiltin, nil}, {`[+-]+`, NameBuiltin, nil},
@ -34,5 +30,5 @@ func brainfuckRules() Rules {
{`\]`, Keyword, Pop(1)}, {`\]`, Keyword, Pop(1)},
Include("common"), Include("common"),
}, },
} },
} ))

View File

@ -6,19 +6,14 @@ import (
) )
// C lexer. // C lexer.
var C = internal.Register(MustNewLazyLexer( var C = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "C", Name: "C",
Aliases: []string{"c"}, Aliases: []string{"c"},
Filenames: []string{"*.c", "*.h", "*.idc"}, Filenames: []string{"*.c", "*.h", "*.idc"},
MimeTypes: []string{"text/x-chdr", "text/x-csrc"}, MimeTypes: []string{"text/x-chdr", "text/x-csrc"},
EnsureNL: true,
}, },
cRules, Rules{
))
func cRules() Rules {
return Rules{
"whitespace": { "whitespace": {
{`^#if\s+0`, CommentPreproc, Push("if0")}, {`^#if\s+0`, CommentPreproc, Push("if0")},
{`^#`, CommentPreproc, Push("macro")}, {`^#`, CommentPreproc, Push("macro")},
@ -92,5 +87,5 @@ func cRules() Rules {
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)}, {`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil}, {`.*?\n`, Comment, nil},
}, },
} },
} ))

View File

@ -6,149 +6,143 @@ import (
) )
// caddyfileCommon are the rules common to both of the lexer variants // caddyfileCommon are the rules common to both of the lexer variants
func caddyfileCommonRules() Rules { var caddyfileCommon = Rules{
return Rules{ "site_block_common": {
"site_block_common": { // Import keyword
// Import keyword {`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil}, // Matcher definition
// Matcher definition {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, // Matcher token stub for docs
// Matcher token stub for docs {`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")}, // These cannot have matchers but may have things that look like
// These cannot have matchers but may have things that look like // matchers in their arguments, so we just parse as a subdirective.
// matchers in their arguments, so we just parse as a subdirective. {`try_files`, Keyword, Push("subdirective")},
{`try_files`, Keyword, Push("subdirective")}, // These are special, they can nest more directives
// These are special, they can nest more directives {`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")}, // Any other directive
// Any other directive {`[^\s#]+`, Keyword, Push("directive")},
{`[^\s#]+`, Keyword, Push("directive")}, Include("base"),
Include("base"), },
}, "matcher": {
"matcher": { {`\{`, Punctuation, Push("block")},
{`\{`, Punctuation, Push("block")}, // Not can be one-liner
// Not can be one-liner {`not`, Keyword, Push("deep_not_matcher")},
{`not`, Keyword, Push("deep_not_matcher")}, // Any other same-line matcher
// Any other same-line matcher {`[^\s#]+`, Keyword, Push("arguments")},
{`[^\s#]+`, Keyword, Push("arguments")}, // Terminators
// Terminators {`\n`, Text, Pop(1)},
{`\n`, Text, Pop(1)}, {`\}`, Punctuation, Pop(1)},
{`\}`, Punctuation, Pop(1)}, Include("base"),
Include("base"), },
}, "block": {
"block": { {`\}`, Punctuation, Pop(2)},
{`\}`, Punctuation, Pop(2)}, // Not can be one-liner
// Not can be one-liner {`not`, Keyword, Push("not_matcher")},
{`not`, Keyword, Push("not_matcher")}, // Any other subdirective
// Any other subdirective {`[^\s#]+`, Keyword, Push("subdirective")},
{`[^\s#]+`, Keyword, Push("subdirective")}, Include("base"),
Include("base"), },
}, "nested_block": {
"nested_block": { {`\}`, Punctuation, Pop(2)},
{`\}`, Punctuation, Pop(2)}, // Matcher definition
// Matcher definition {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, // Something that starts with literally < is probably a docs stub
// Something that starts with literally < is probably a docs stub {`\<[^#]+\>`, Keyword, Push("nested_directive")},
{`\<[^#]+\>`, Keyword, Push("nested_directive")}, // Any other directive
// Any other directive {`[^\s#]+`, Keyword, Push("nested_directive")},
{`[^\s#]+`, Keyword, Push("nested_directive")}, Include("base"),
Include("base"), },
}, "not_matcher": {
"not_matcher": { {`\}`, Punctuation, Pop(2)},
{`\}`, Punctuation, Pop(2)}, {`\{(?=\s)`, Punctuation, Push("block")},
{`\{(?=\s)`, Punctuation, Push("block")}, {`[^\s#]+`, Keyword, Push("arguments")},
{`[^\s#]+`, Keyword, Push("arguments")}, {`\s+`, Text, nil},
{`\s+`, Text, nil}, },
}, "deep_not_matcher": {
"deep_not_matcher": { {`\}`, Punctuation, Pop(2)},
{`\}`, Punctuation, Pop(2)}, {`\{(?=\s)`, Punctuation, Push("block")},
{`\{(?=\s)`, Punctuation, Push("block")}, {`[^\s#]+`, Keyword, Push("deep_subdirective")},
{`[^\s#]+`, Keyword, Push("deep_subdirective")}, {`\s+`, Text, nil},
{`\s+`, Text, nil}, },
}, "directive": {
"directive": { {`\{(?=\s)`, Punctuation, Push("block")},
{`\{(?=\s)`, Punctuation, Push("block")}, Include("matcher_token"),
Include("matcher_token"), Include("comments_pop_1"),
Include("comments_pop_1"), {`\n`, Text, Pop(1)},
{`\n`, Text, Pop(1)}, Include("base"),
Include("base"), },
}, "nested_directive": {
"nested_directive": { {`\{(?=\s)`, Punctuation, Push("nested_block")},
{`\{(?=\s)`, Punctuation, Push("nested_block")}, Include("matcher_token"),
Include("matcher_token"), Include("comments_pop_1"),
Include("comments_pop_1"), {`\n`, Text, Pop(1)},
{`\n`, Text, Pop(1)}, Include("base"),
Include("base"), },
}, "subdirective": {
"subdirective": { {`\{(?=\s)`, Punctuation, Push("block")},
{`\{(?=\s)`, Punctuation, Push("block")}, Include("comments_pop_1"),
Include("comments_pop_1"), {`\n`, Text, Pop(1)},
{`\n`, Text, Pop(1)}, Include("base"),
Include("base"), },
}, "arguments": {
"arguments": { {`\{(?=\s)`, Punctuation, Push("block")},
{`\{(?=\s)`, Punctuation, Push("block")}, Include("comments_pop_2"),
Include("comments_pop_2"), {`\\\n`, Text, nil}, // Skip escaped newlines
{`\\\n`, Text, nil}, // Skip escaped newlines {`\n`, Text, Pop(2)},
{`\n`, Text, Pop(2)}, Include("base"),
Include("base"), },
}, "deep_subdirective": {
"deep_subdirective": { {`\{(?=\s)`, Punctuation, Push("block")},
{`\{(?=\s)`, Punctuation, Push("block")}, Include("comments_pop_3"),
Include("comments_pop_3"), {`\n`, Text, Pop(3)},
{`\n`, Text, Pop(3)}, Include("base"),
Include("base"), },
}, "matcher_token": {
"matcher_token": { {`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher {`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher {`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher {`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs },
}, "comments": {
"comments": { {`^#.*\n`, CommentSingle, nil}, // Comment at start of line
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line {`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace },
}, "comments_pop_1": {
"comments_pop_1": { {`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line {`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace },
}, "comments_pop_2": {
"comments_pop_2": { {`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line {`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace },
}, "comments_pop_3": {
"comments_pop_3": { {`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line {`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace },
}, "base": {
"base": { Include("comments"),
Include("comments"), {`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil}, {`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil}, {`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil}, {`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, {`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder {`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`\[(?=[^#{}$]+\])`, Punctuation, nil}, {`\]|\|`, Punctuation, nil},
{`\]|\|`, Punctuation, nil}, {`[^\s#{}$\]]+`, LiteralString, nil},
{`[^\s#{}$\]]+`, LiteralString, nil}, {`/[^\s#]*`, Name, nil},
{`/[^\s#]*`, Name, nil}, {`\s+`, Text, nil},
{`\s+`, Text, nil}, },
},
}
} }
// Caddyfile lexer. // Caddyfile lexer.
var Caddyfile = internal.Register(MustNewLazyLexer( var Caddyfile = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Caddyfile", Name: "Caddyfile",
Aliases: []string{"caddyfile", "caddy"}, Aliases: []string{"caddyfile", "caddy"},
Filenames: []string{"Caddyfile*"}, Filenames: []string{"Caddyfile*"},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
caddyfileRules, Rules{
))
func caddyfileRules() Rules {
return Rules{
"root": { "root": {
Include("comments"), Include("comments"),
// Global options block // Global options block
@ -192,25 +186,21 @@ func caddyfileRules() Rules {
{`\}`, Punctuation, Pop(2)}, {`\}`, Punctuation, Pop(2)},
Include("site_block_common"), Include("site_block_common"),
}, },
}.Merge(caddyfileCommonRules()) }.Merge(caddyfileCommon),
} ))
// Caddyfile directive-only lexer. // Caddyfile directive-only lexer.
var CaddyfileDirectives = internal.Register(MustNewLazyLexer( var CaddyfileDirectives = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Caddyfile Directives", Name: "Caddyfile Directives",
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"}, Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
Filenames: []string{}, Filenames: []string{},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
caddyfileDirectivesRules, Rules{
))
func caddyfileDirectivesRules() Rules {
return Rules{
// Same as "site_block" in Caddyfile // Same as "site_block" in Caddyfile
"root": { "root": {
Include("site_block_common"), Include("site_block_common"),
}, },
}.Merge(caddyfileCommonRules()) }.Merge(caddyfileCommon),
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Cap'N'Proto Proto lexer. // Cap'N'Proto Proto lexer.
var CapNProto = internal.Register(MustNewLazyLexer( var CapNProto = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Cap'n Proto", Name: "Cap'n Proto",
Aliases: []string{"capnp"}, Aliases: []string{"capnp"},
Filenames: []string{"*.capnp"}, Filenames: []string{"*.capnp"},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
capNProtoRules, Rules{
))
func capNProtoRules() Rules {
return Rules{
"root": { "root": {
{`#.*?$`, CommentSingle, nil}, {`#.*?$`, CommentSingle, nil},
{`@[0-9a-zA-Z]*`, NameDecorator, nil}, {`@[0-9a-zA-Z]*`, NameDecorator, nil},
@ -61,5 +57,5 @@ func capNProtoRules() Rules {
{`[])]`, NameAttribute, Pop(1)}, {`[])]`, NameAttribute, Pop(1)},
Default(Pop(1)), Default(Pop(1)),
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Ceylon lexer. // Ceylon lexer.
var Ceylon = internal.Register(MustNewLazyLexer( var Ceylon = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Ceylon", Name: "Ceylon",
Aliases: []string{"ceylon"}, Aliases: []string{"ceylon"},
@ -14,11 +14,7 @@ var Ceylon = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-ceylon"}, MimeTypes: []string{"text/x-ceylon"},
DotAll: true, DotAll: true,
}, },
ceylonRules, Rules{
))
func ceylonRules() Rules {
return Rules{
"root": { "root": {
{`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, {`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil},
{`[^\S\n]+`, Text, nil}, {`[^\S\n]+`, Text, nil},
@ -63,5 +59,5 @@ func ceylonRules() Rules {
{`\*/`, CommentMultiline, Pop(1)}, {`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil}, {`[*/]`, CommentMultiline, nil},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Cfengine3 lexer. // Cfengine3 lexer.
var Cfengine3 = internal.Register(MustNewLazyLexer( var Cfengine3 = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "CFEngine3", Name: "CFEngine3",
Aliases: []string{"cfengine3", "cf3"}, Aliases: []string{"cfengine3", "cf3"},
Filenames: []string{"*.cf"}, Filenames: []string{"*.cf"},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
cfengine3Rules, Rules{
))
func cfengine3Rules() Rules {
return Rules{
"root": { "root": {
{`#.*?\n`, Comment, nil}, {`#.*?\n`, Comment, nil},
{`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil}, {`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil},
@ -56,5 +52,5 @@ func cfengine3Rules() Rules {
{`\w+`, NameVariable, nil}, {`\w+`, NameVariable, nil},
{`\s+`, Text, nil}, {`\s+`, Text, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Chaiscript lexer. // Chaiscript lexer.
var Chaiscript = internal.Register(MustNewLazyLexer( var Chaiscript = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "ChaiScript", Name: "ChaiScript",
Aliases: []string{"chai", "chaiscript"}, Aliases: []string{"chai", "chaiscript"},
@ -14,11 +14,7 @@ var Chaiscript = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"}, MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"},
DotAll: true, DotAll: true,
}, },
chaiscriptRules, Rules{
))
func chaiscriptRules() Rules {
return Rules{
"commentsandwhitespace": { "commentsandwhitespace": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`//.*?\n`, CommentSingle, nil}, {`//.*?\n`, CommentSingle, nil},
@ -63,5 +59,5 @@ func chaiscriptRules() Rules {
{`[^\\"$]+`, LiteralStringDouble, nil}, {`[^\\"$]+`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)}, {`"`, LiteralStringDouble, Pop(1)},
}, },
} },
} ))

View File

@ -7,18 +7,14 @@ import (
) )
// Cheetah lexer. // Cheetah lexer.
var Cheetah = internal.Register(MustNewLazyLexer( var Cheetah = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Cheetah", Name: "Cheetah",
Aliases: []string{"cheetah", "spitfire"}, Aliases: []string{"cheetah", "spitfire"},
Filenames: []string{"*.tmpl", "*.spt"}, Filenames: []string{"*.tmpl", "*.spt"},
MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"}, MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"},
}, },
cheetahRules, Rules{
))
func cheetahRules() Rules {
return Rules{
"root": { "root": {
{`(##[^\n]*)$`, ByGroups(Comment), nil}, {`(##[^\n]*)$`, ByGroups(Comment), nil},
{`#[*](.|\n)*?[*]#`, Comment, nil}, {`#[*](.|\n)*?[*]#`, Comment, nil},
@ -37,5 +33,5 @@ func cheetahRules() Rules {
`, Other, nil}, `, Other, nil},
{`\s+`, Text, nil}, {`\s+`, Text, nil},
}, },
} },
} ))

View File

@ -230,7 +230,7 @@ var (
) )
// Common Lisp lexer. // Common Lisp lexer.
var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer( var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
&Config{ &Config{
Name: "Common Lisp", Name: "Common Lisp",
Aliases: []string{"common-lisp", "cl", "lisp"}, Aliases: []string{"common-lisp", "cl", "lisp"},
@ -238,19 +238,7 @@ var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
MimeTypes: []string{"text/x-common-lisp"}, MimeTypes: []string{"text/x-common-lisp"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
commonLispRules, Rules{
), TypeMapping{
{NameVariable, NameFunction, clBuiltinFunctions},
{NameVariable, Keyword, clSpecialForms},
{NameVariable, NameBuiltin, clMacros},
{NameVariable, Keyword, clLambdaListKeywords},
{NameVariable, Keyword, clDeclarations},
{NameVariable, KeywordType, clBuiltinTypes},
{NameVariable, NameClass, clBuiltinClasses},
}))
func commonLispRules() Rules {
return Rules{
"root": { "root": {
Default(Push("body")), Default(Push("body")),
}, },
@ -306,5 +294,13 @@ func commonLispRules() Rules {
{`\(`, Punctuation, Push("body")}, {`\(`, Punctuation, Push("body")},
{`\)`, Punctuation, Pop(1)}, {`\)`, Punctuation, Pop(1)},
}, },
} },
} ), TypeMapping{
{NameVariable, NameFunction, clBuiltinFunctions},
{NameVariable, Keyword, clSpecialForms},
{NameVariable, NameBuiltin, clMacros},
{NameVariable, Keyword, clLambdaListKeywords},
{NameVariable, Keyword, clDeclarations},
{NameVariable, KeywordType, clBuiltinTypes},
{NameVariable, NameClass, clBuiltinClasses},
}))

View File

@ -6,18 +6,14 @@ import (
) )
// Clojure lexer. // Clojure lexer.
var Clojure = internal.Register(MustNewLazyLexer( var Clojure = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Clojure", Name: "Clojure",
Aliases: []string{"clojure", "clj"}, Aliases: []string{"clojure", "clj"},
Filenames: []string{"*.clj"}, Filenames: []string{"*.clj"},
MimeTypes: []string{"text/x-clojure", "application/x-clojure"}, MimeTypes: []string{"text/x-clojure", "application/x-clojure"},
}, },
clojureRules, Rules{
))
func clojureRules() Rules {
return Rules{
"root": { "root": {
{`;.*$`, CommentSingle, nil}, {`;.*$`, CommentSingle, nil},
{`[,\s]+`, Text, nil}, {`[,\s]+`, Text, nil},
@ -38,5 +34,5 @@ func clojureRules() Rules {
{`(\{|\})`, Punctuation, nil}, {`(\{|\})`, Punctuation, nil},
{`(\(|\))`, Punctuation, nil}, {`(\(|\))`, Punctuation, nil},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Cmake lexer. // Cmake lexer.
var Cmake = internal.Register(MustNewLazyLexer( var Cmake = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "CMake", Name: "CMake",
Aliases: []string{"cmake"}, Aliases: []string{"cmake"},
Filenames: []string{"*.cmake", "CMakeLists.txt"}, Filenames: []string{"*.cmake", "CMakeLists.txt"},
MimeTypes: []string{"text/x-cmake"}, MimeTypes: []string{"text/x-cmake"},
}, },
cmakeRules, Rules{
))
func cmakeRules() Rules {
return Rules{
"root": { "root": {
{`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")}, {`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")},
Include("keywords"), Include("keywords"),
@ -44,5 +40,5 @@ func cmakeRules() Rules {
{`[ \t]+`, Text, nil}, {`[ \t]+`, Text, nil},
{`#.*\n`, Comment, nil}, {`#.*\n`, Comment, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Cobol lexer. // Cobol lexer.
var Cobol = internal.Register(MustNewLazyLexer( var Cobol = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "COBOL", Name: "COBOL",
Aliases: []string{"cobol"}, Aliases: []string{"cobol"},
@ -14,11 +14,7 @@ var Cobol = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-cobol"}, MimeTypes: []string{"text/x-cobol"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
cobolRules, Rules{
))
func cobolRules() Rules {
return Rules{
"root": { "root": {
Include("comment"), Include("comment"),
Include("strings"), Include("strings"),
@ -51,5 +47,5 @@ func cobolRules() Rules {
{`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil}, {`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil},
{`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil}, {`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Coffeescript lexer. // Coffeescript lexer.
var Coffeescript = internal.Register(MustNewLazyLexer( var Coffeescript = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "CoffeeScript", Name: "CoffeeScript",
Aliases: []string{"coffee-script", "coffeescript", "coffee"}, Aliases: []string{"coffee-script", "coffeescript", "coffee"},
@ -15,11 +15,7 @@ var Coffeescript = internal.Register(MustNewLazyLexer(
NotMultiline: true, NotMultiline: true,
DotAll: true, DotAll: true,
}, },
coffeescriptRules, Rules{
))
func coffeescriptRules() Rules {
return Rules{
"commentsandwhitespace": { "commentsandwhitespace": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`###[^#].*?###`, CommentMultiline, nil}, {`###[^#].*?###`, CommentMultiline, nil},
@ -91,5 +87,5 @@ func coffeescriptRules() Rules {
{`#|\\.|\'|"`, LiteralString, nil}, {`#|\\.|\'|"`, LiteralString, nil},
Include("strings"), Include("strings"),
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Cfstatement lexer. // Cfstatement lexer.
var Cfstatement = internal.Register(MustNewLazyLexer( var Cfstatement = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "cfstatement", Name: "cfstatement",
Aliases: []string{"cfs"}, Aliases: []string{"cfs"},
@ -15,11 +15,7 @@ var Cfstatement = internal.Register(MustNewLazyLexer(
NotMultiline: true, NotMultiline: true,
CaseInsensitive: true, CaseInsensitive: true,
}, },
cfstatementRules, Rules{
))
func cfstatementRules() Rules {
return Rules{
"root": { "root": {
{`//.*?\n`, CommentSingle, nil}, {`//.*?\n`, CommentSingle, nil},
{`/\*(?:.|\n)*?\*/`, CommentMultiline, nil}, {`/\*(?:.|\n)*?\*/`, CommentMultiline, nil},
@ -48,5 +44,5 @@ func cfstatementRules() Rules {
{`#`, LiteralStringDouble, nil}, {`#`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)}, {`"`, LiteralStringDouble, Pop(1)},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Coq lexer. // Coq lexer.
var Coq = internal.Register(MustNewLazyLexer( var Coq = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Coq", Name: "Coq",
Aliases: []string{"coq"}, Aliases: []string{"coq"},
Filenames: []string{"*.v"}, Filenames: []string{"*.v"},
MimeTypes: []string{"text/x-coq"}, MimeTypes: []string{"text/x-coq"},
}, },
coqRules, Rules{
))
func coqRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil}, {`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil},
@ -63,5 +59,5 @@ func coqRules() Rules {
{`[a-z][a-z0-9_\']*`, Name, Pop(1)}, {`[a-z][a-z0-9_\']*`, Name, Pop(1)},
Default(Pop(1)), Default(Pop(1)),
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// CPP lexer. // CPP lexer.
var CPP = internal.Register(MustNewLazyLexer( var CPP = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "C++", Name: "C++",
Aliases: []string{"cpp", "c++"}, Aliases: []string{"cpp", "c++"},
@ -14,11 +14,7 @@ var CPP = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"}, MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"},
EnsureNL: true, EnsureNL: true,
}, },
cppRules, Rules{
))
func cppRules() Rules {
return Rules{
"statements": { "statements": {
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil}, {Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil},
{`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")}, {`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")},
@ -106,5 +102,5 @@ func cppRules() Rules {
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)}, {`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil}, {`.*?\n`, Comment, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// CassandraCQL lexer. // CassandraCQL lexer.
var CassandraCQL = internal.Register(MustNewLazyLexer( var CassandraCQL = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Cassandra CQL", Name: "Cassandra CQL",
Aliases: []string{"cassandra", "cql"}, Aliases: []string{"cassandra", "cql"},
@ -15,11 +15,7 @@ var CassandraCQL = internal.Register(MustNewLazyLexer(
NotMultiline: true, NotMultiline: true,
CaseInsensitive: true, CaseInsensitive: true,
}, },
cassandraCQLRules, Rules{
))
func cassandraCQLRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, TextWhitespace, nil}, {`\s+`, TextWhitespace, nil},
{`(--|\/\/).*\n?`, CommentSingle, nil}, {`(--|\/\/).*\n?`, CommentSingle, nil},
@ -27,8 +23,7 @@ func cassandraCQLRules() Rules {
{`(ascii|bigint|blob|boolean|counter|date|decimal|double|float|frozen|inet|int|list|map|set|smallint|text|time|timestamp|timeuuid|tinyint|tuple|uuid|varchar|varint)\b`, NameBuiltin, nil}, {`(ascii|bigint|blob|boolean|counter|date|decimal|double|float|frozen|inet|int|list|map|set|smallint|text|time|timestamp|timeuuid|tinyint|tuple|uuid|varchar|varint)\b`, NameBuiltin, nil},
{Words(``, `\b`, `ADD`, `AGGREGATE`, `ALL`, `ALLOW`, `ALTER`, `AND`, `ANY`, `APPLY`, `AS`, `ASC`, `AUTHORIZE`, `BATCH`, `BEGIN`, `BY`, `CLUSTERING`, `COLUMNFAMILY`, `COMPACT`, `CONSISTENCY`, `COUNT`, `CREATE`, `CUSTOM`, `DELETE`, `DESC`, `DISTINCT`, `DROP`, `EACH_QUORUM`, `ENTRIES`, `EXISTS`, `FILTERING`, `FROM`, `FULL`, `GRANT`, `IF`, `IN`, `INDEX`, `INFINITY`, `INSERT`, `INTO`, `KEY`, `KEYS`, `KEYSPACE`, `KEYSPACES`, `LEVEL`, `LIMIT`, `LOCAL_ONE`, `LOCAL_QUORUM`, `MATERIALIZED`, `MODIFY`, `NAN`, `NORECURSIVE`, `NOSUPERUSER`, `NOT`, `OF`, `ON`, `ONE`, `ORDER`, `PARTITION`, `PASSWORD`, `PER`, `PERMISSION`, `PERMISSIONS`, `PRIMARY`, `QUORUM`, `RENAME`, `REVOKE`, `SCHEMA`, `SELECT`, `STATIC`, `STORAGE`, `SUPERUSER`, `TABLE`, `THREE`, `TO`, `TOKEN`, `TRUNCATE`, `TTL`, `TWO`, `TYPE`, `UNLOGGED`, `UPDATE`, `USE`, `USER`, `USERS`, `USING`, `VALUES`, `VIEW`, `WHERE`, `WITH`, `WRITETIME`, `REPLICATION`, `OR`, `REPLACE`, `FUNCTION`, `CALLED`, `INPUT`, `RETURNS`, `LANGUAGE`, `ROLE`, `ROLES`, `TRIGGER`, `DURABLE_WRITES`, `LOGIN`, `OPTIONS`, `LOGGED`, `SFUNC`, `STYPE`, `FINALFUNC`, `INITCOND`, `IS`, `CONTAINS`, `JSON`, `PAGING`, `OFF`), Keyword, nil}, {Words(``, `\b`, `ADD`, `AGGREGATE`, `ALL`, `ALLOW`, `ALTER`, `AND`, `ANY`, `APPLY`, `AS`, `ASC`, `AUTHORIZE`, `BATCH`, `BEGIN`, `BY`, `CLUSTERING`, `COLUMNFAMILY`, `COMPACT`, `CONSISTENCY`, `COUNT`, `CREATE`, `CUSTOM`, `DELETE`, `DESC`, `DISTINCT`, `DROP`, `EACH_QUORUM`, `ENTRIES`, `EXISTS`, `FILTERING`, `FROM`, `FULL`, `GRANT`, `IF`, `IN`, `INDEX`, `INFINITY`, `INSERT`, `INTO`, `KEY`, `KEYS`, `KEYSPACE`, `KEYSPACES`, `LEVEL`, `LIMIT`, `LOCAL_ONE`, `LOCAL_QUORUM`, `MATERIALIZED`, `MODIFY`, `NAN`, `NORECURSIVE`, `NOSUPERUSER`, `NOT`, `OF`, `ON`, `ONE`, `ORDER`, `PARTITION`, `PASSWORD`, `PER`, `PERMISSION`, `PERMISSIONS`, `PRIMARY`, `QUORUM`, `RENAME`, `REVOKE`, `SCHEMA`, `SELECT`, `STATIC`, `STORAGE`, `SUPERUSER`, `TABLE`, `THREE`, `TO`, `TOKEN`, `TRUNCATE`, `TTL`, `TWO`, `TYPE`, `UNLOGGED`, `UPDATE`, `USE`, `USER`, `USERS`, `USING`, `VALUES`, `VIEW`, `WHERE`, `WITH`, `WRITETIME`, `REPLICATION`, `OR`, `REPLACE`, `FUNCTION`, `CALLED`, `INPUT`, `RETURNS`, `LANGUAGE`, `ROLE`, `ROLES`, `TRIGGER`, `DURABLE_WRITES`, `LOGIN`, `OPTIONS`, `LOGGED`, `SFUNC`, `STYPE`, `FINALFUNC`, `INITCOND`, `IS`, `CONTAINS`, `JSON`, `PAGING`, `OFF`), Keyword, nil},
{"[+*/<>=~!@#%^&|`?-]+", Operator, nil}, {"[+*/<>=~!@#%^&|`?-]+", Operator, nil},
{ {`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`,
`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`,
UsingByGroup( UsingByGroup(
internal.Get, internal.Get,
1, 6, 1, 6,
@ -70,5 +65,5 @@ func cassandraCQLRules() Rules {
{`[^\$]+`, LiteralStringHeredoc, nil}, {`[^\$]+`, LiteralStringHeredoc, nil},
{`\$\$`, LiteralStringHeredoc, Pop(1)}, {`\$\$`, LiteralStringHeredoc, Pop(1)},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Crystal lexer. // Crystal lexer.
var Crystal = internal.Register(MustNewLazyLexer( var Crystal = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Crystal", Name: "Crystal",
Aliases: []string{"cr", "crystal"}, Aliases: []string{"cr", "crystal"},
@ -14,11 +14,7 @@ var Crystal = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-crystal"}, MimeTypes: []string{"text/x-crystal"},
DotAll: true, DotAll: true,
}, },
crystalRules, Rules{
))
func crystalRules() Rules {
return Rules{
"root": { "root": {
{`#.*?$`, CommentSingle, nil}, {`#.*?$`, CommentSingle, nil},
{Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil}, {Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil},
@ -262,5 +258,5 @@ func crystalRules() Rules {
{`[\\#<>]`, LiteralStringRegex, nil}, {`[\\#<>]`, LiteralStringRegex, nil},
{`[^\\#<>]+`, LiteralStringRegex, nil}, {`[^\\#<>]+`, LiteralStringRegex, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// CSharp lexer. // CSharp lexer.
var CSharp = internal.Register(MustNewLazyLexer( var CSharp = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "C#", Name: "C#",
Aliases: []string{"csharp", "c#"}, Aliases: []string{"csharp", "c#"},
@ -15,11 +15,7 @@ var CSharp = internal.Register(MustNewLazyLexer(
DotAll: true, DotAll: true,
EnsureNL: true, EnsureNL: true,
}, },
cSharpRules, Rules{
))
func cSharpRules() Rules {
return Rules{
"root": { "root": {
{`^\s*\[.*?\]`, NameAttribute, nil}, {`^\s*\[.*?\]`, NameAttribute, nil},
{`[^\S\n]+`, Text, nil}, {`[^\S\n]+`, Text, nil},
@ -33,7 +29,7 @@ func cSharpRules() Rules {
{`\$@?"(""|[^"])*"`, LiteralString, nil}, {`\$@?"(""|[^"])*"`, LiteralString, nil},
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, {`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil},
{`'\\.'|'[^\\]'`, LiteralStringChar, nil}, {`'\\.'|'[^\\]'`, LiteralStringChar, nil},
{`0[xX][0-9a-fA-F]+[Ll]?|[0-9_](\.[0-9]*)?([eE][+-]?[0-9]+)?[flFLdD]?`, LiteralNumber, nil}, {`[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil},
{`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil}, {`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
{`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil}, {`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil},
{`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|interface|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil}, {`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|interface|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil},
@ -51,5 +47,5 @@ func cSharpRules() Rules {
{`(?=\()`, Text, Pop(1)}, {`(?=\()`, Text, Pop(1)},
{`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)}, {`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// CSS lexer. // CSS lexer.
var CSS = internal.Register(MustNewLazyLexer( var CSS = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "CSS", Name: "CSS",
Aliases: []string{"css"}, Aliases: []string{"css"},
Filenames: []string{"*.css"}, Filenames: []string{"*.css"},
MimeTypes: []string{"text/css"}, MimeTypes: []string{"text/css"},
}, },
cssRules, Rules{
))
func cssRules() Rules {
return Rules{
"root": { "root": {
Include("basics"), Include("basics"),
}, },
@ -43,18 +39,6 @@ func cssRules() Rules {
Include("basics"), Include("basics"),
{`\}`, Punctuation, Pop(2)}, {`\}`, Punctuation, Pop(2)},
}, },
"atparenthesis": {
Include("common-values"),
{`/\*(?:.|\n)*?\*/`, Comment, nil},
Include("numeric-values"),
{`[*+/-]`, Operator, nil},
{`[,]`, Punctuation, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
{`[a-zA-Z_-]\w*`, Name, nil},
{`\(`, Punctuation, Push("atparenthesis")},
{`\)`, Punctuation, Pop(1)},
},
"content": { "content": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`\}`, Punctuation, Pop(1)}, {`\}`, Punctuation, Pop(1)},
@ -89,7 +73,6 @@ func cssRules() Rules {
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
{`[a-zA-Z_-]\w*`, Name, nil}, {`[a-zA-Z_-]\w*`, Name, nil},
{`\(`, Punctuation, Push("atparenthesis")},
{`\)`, Punctuation, Pop(1)}, {`\)`, Punctuation, Pop(1)},
}, },
"common-values": { "common-values": {
@ -117,5 +100,5 @@ func cssRules() Rules {
{`%`, KeywordType, nil}, {`%`, KeywordType, nil},
Default(Pop(1)), Default(Pop(1)),
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Cython lexer. // Cython lexer.
var Cython = internal.Register(MustNewLazyLexer( var Cython = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Cython", Name: "Cython",
Aliases: []string{"cython", "pyx", "pyrex"}, Aliases: []string{"cython", "pyx", "pyrex"},
Filenames: []string{"*.pyx", "*.pxd", "*.pxi"}, Filenames: []string{"*.pyx", "*.pxd", "*.pxi"},
MimeTypes: []string{"text/x-cython", "application/x-cython"}, MimeTypes: []string{"text/x-cython", "application/x-cython"},
}, },
cythonRules, Rules{
))
func cythonRules() Rules {
return Rules{
"root": { "root": {
{`\n`, Text, nil}, {`\n`, Text, nil},
{`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil}, {`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil},
@ -135,5 +131,5 @@ func cythonRules() Rules {
Include("strings"), Include("strings"),
Include("nl"), Include("nl"),
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// PHP lexer for pure PHP code (not embedded in HTML). // PHP lexer for pure PHP code (not embedded in HTML).
var PHP = internal.Register(MustNewLazyLexer( var PHP = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "PHP", Name: "PHP",
Aliases: []string{"php", "php3", "php4", "php5"}, Aliases: []string{"php", "php3", "php4", "php5"},
@ -16,71 +16,65 @@ var PHP = internal.Register(MustNewLazyLexer(
CaseInsensitive: true, CaseInsensitive: true,
EnsureNL: true, EnsureNL: true,
}, },
phpRules, phpCommonRules.Rename("php", "root"),
)) ))
func phpRules() Rules { var phpCommonRules = Rules{
return phpCommonRules().Rename("php", "root") "php": {
} {`\?>`, CommentPreproc, Pop(1)},
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
func phpCommonRules() Rules { {`\s+`, Text, nil},
return Rules{ {`#.*?\n`, CommentSingle, nil},
"php": { {`//.*?\n`, CommentSingle, nil},
{`\?>`, CommentPreproc, Pop(1)}, {`/\*\*/`, CommentMultiline, nil},
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil}, {`/\*\*.*?\*/`, LiteralStringDoc, nil},
{`\s+`, Text, nil}, {`/\*.*?\*/`, CommentMultiline, nil},
{`#.*?\n`, CommentSingle, nil}, {`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
{`//.*?\n`, CommentSingle, nil}, {`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
{`/\*\*/`, CommentMultiline, nil}, {`\?`, Operator, nil},
{`/\*\*.*?\*/`, LiteralStringDoc, nil}, {`[\[\]{}();,]+`, Punctuation, nil},
{`/\*.*?\*/`, CommentMultiline, nil}, {`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil}, {`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil}, {`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
{`\?`, Operator, nil}, {`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
{`[\[\]{}();,]+`, Punctuation, nil}, {`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, {`(true|false|null)\b`, KeywordConstant, nil},
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil}, Include("magicconstants"),
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")}, {`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil}, {`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil}, {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
{`(true|false|null)\b`, KeywordConstant, nil}, {`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
Include("magicconstants"), {`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil}, {`0[0-7]+`, LiteralNumberOct, nil},
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil}, {`0x[a-f0-9]+`, LiteralNumberHex, nil},
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil}, {`\d+`, LiteralNumberInteger, nil},
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, {`0b[01]+`, LiteralNumberBin, nil},
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil}, {`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
{`0[0-7]+`, LiteralNumberOct, nil}, {"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
{`0x[a-f0-9_]+`, LiteralNumberHex, nil}, {`"`, LiteralStringDouble, Push("string")},
{`[\d_]+`, LiteralNumberInteger, nil}, },
{`0b[01]+`, LiteralNumberBin, nil}, "magicfuncs": {
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil}, {Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil}, },
{`"`, LiteralStringDouble, Push("string")}, "magicconstants": {
}, {Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
"magicfuncs": { },
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil}, "classname": {
}, {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
"magicconstants": { },
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil}, "functionname": {
}, Include("magicfuncs"),
"classname": { {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)}, Default(Pop(1)),
}, },
"functionname": { "string": {
Include("magicfuncs"), {`"`, LiteralStringDouble, Pop(1)},
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)}, {`[^{$"\\]+`, LiteralStringDouble, nil},
Default(Pop(1)), {`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
}, {`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
"string": { {`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
{`"`, LiteralStringDouble, Pop(1)}, {`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
{`[^{$"\\]+`, LiteralStringDouble, nil}, {`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil}, {`[${\\]`, LiteralStringDouble, nil},
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil}, },
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
{`[${\\]`, LiteralStringDouble, nil},
},
}
} }

View File

@ -9,7 +9,7 @@ import (
) )
// PHTML lexer is PHP in HTML. // PHTML lexer is PHP in HTML.
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer( var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
&Config{ &Config{
Name: "PHTML", Name: "PHTML",
Aliases: []string{"phtml"}, Aliases: []string{"phtml"},
@ -19,20 +19,16 @@ var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer(
CaseInsensitive: true, CaseInsensitive: true,
EnsureNL: true, EnsureNL: true,
}, },
phtmlRules, Rules{
"root": {
{`<\?(php)?`, CommentPreproc, Push("php")},
{`[^<]+`, Other, nil},
{`<`, Other, nil},
},
}.Merge(phpCommonRules),
).SetAnalyser(func(text string) float32 { ).SetAnalyser(func(text string) float32 {
if strings.Contains(text, "<?php") { if strings.Contains(text, "<?php") {
return 0.5 return 0.5
} }
return 0.0 return 0.0
}))) })))
func phtmlRules() Rules {
return Rules{
"root": {
{`<\?(php)?`, CommentPreproc, Push("php")},
{`[^<]+`, Other, nil},
{`<`, Other, nil},
},
}.Merge(phpCommonRules())
}

View File

@ -6,7 +6,7 @@ import (
) )
// D lexer. https://dlang.org/spec/lex.html // D lexer. https://dlang.org/spec/lex.html
var D = internal.Register(MustNewLazyLexer( var D = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "D", Name: "D",
Aliases: []string{"d"}, Aliases: []string{"d"},
@ -14,11 +14,7 @@ var D = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-d"}, MimeTypes: []string{"text/x-d"},
EnsureNL: true, EnsureNL: true,
}, },
dRules, Rules{
))
func dRules() Rules {
return Rules{
"root": { "root": {
{`[^\S\n]+`, Text, nil}, {`[^\S\n]+`, Text, nil},
@ -69,5 +65,5 @@ func dRules() Rules {
"import": { "import": {
{`[\w.]+\*?`, NameNamespace, Pop(1)}, {`[\w.]+\*?`, NameNamespace, Pop(1)},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Dart lexer. // Dart lexer.
var Dart = internal.Register(MustNewLazyLexer( var Dart = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Dart", Name: "Dart",
Aliases: []string{"dart"}, Aliases: []string{"dart"},
@ -14,11 +14,7 @@ var Dart = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-dart"}, MimeTypes: []string{"text/x-dart"},
DotAll: true, DotAll: true,
}, },
dartRules, Rules{
))
func dartRules() Rules {
return Rules{
"root": { "root": {
Include("string_literal"), Include("string_literal"),
{`#!(.*?)$`, CommentPreproc, nil}, {`#!(.*?)$`, CommentPreproc, nil},
@ -91,5 +87,5 @@ func dartRules() Rules {
Include("string_common"), Include("string_common"),
{`(\$|\')+`, LiteralStringSingle, nil}, {`(\$|\')+`, LiteralStringSingle, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Diff lexer. // Diff lexer.
var Diff = internal.Register(MustNewLazyLexer( var Diff = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Diff", Name: "Diff",
Aliases: []string{"diff", "udiff"}, Aliases: []string{"diff", "udiff"},
@ -14,11 +14,7 @@ var Diff = internal.Register(MustNewLazyLexer(
Filenames: []string{"*.diff", "*.patch"}, Filenames: []string{"*.diff", "*.patch"},
MimeTypes: []string{"text/x-diff", "text/x-patch"}, MimeTypes: []string{"text/x-diff", "text/x-patch"},
}, },
diffRules, Rules{
))
func diffRules() Rules {
return Rules{
"root": { "root": {
{` .*\n`, Text, nil}, {` .*\n`, Text, nil},
{`\+.*\n`, GenericInserted, nil}, {`\+.*\n`, GenericInserted, nil},
@ -29,5 +25,5 @@ func diffRules() Rules {
{`=.*\n`, GenericHeading, nil}, {`=.*\n`, GenericHeading, nil},
{`.*\n`, Text, nil}, {`.*\n`, Text, nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Django/Jinja lexer. // Django/Jinja lexer.
var DjangoJinja = internal.Register(MustNewLazyLexer( var DjangoJinja = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Django/Jinja", Name: "Django/Jinja",
Aliases: []string{"django", "jinja"}, Aliases: []string{"django", "jinja"},
@ -14,11 +14,7 @@ var DjangoJinja = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"application/x-django-templating", "application/x-jinja"}, MimeTypes: []string{"application/x-django-templating", "application/x-jinja"},
DotAll: true, DotAll: true,
}, },
djangoJinjaRules, Rules{
))
func djangoJinjaRules() Rules {
return Rules{
"root": { "root": {
{`[^{]+`, Other, nil}, {`[^{]+`, Other, nil},
{`\{\{`, CommentPreproc, Push("var")}, {`\{\{`, CommentPreproc, Push("var")},
@ -53,5 +49,5 @@ func djangoJinjaRules() Rules {
Include("varnames"), Include("varnames"),
{`.`, Punctuation, nil}, {`.`, Punctuation, nil},
}, },
} },
} ))

View File

@ -8,7 +8,7 @@ import (
) )
// Docker lexer. // Docker lexer.
var Docker = internal.Register(MustNewLazyLexer( var Docker = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Docker", Name: "Docker",
Aliases: []string{"docker", "dockerfile"}, Aliases: []string{"docker", "dockerfile"},
@ -16,11 +16,7 @@ var Docker = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"text/x-dockerfile-config"}, MimeTypes: []string{"text/x-dockerfile-config"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
dockerRules, Rules{
))
func dockerRules() Rules {
return Rules{
"root": { "root": {
{`#.*`, Comment, nil}, {`#.*`, Comment, nil},
{`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using(b.Bash)), nil}, {`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using(b.Bash)), nil},
@ -31,5 +27,5 @@ func dockerRules() Rules {
{`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil}, {`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil},
{`(.*\\\n)*.+`, Using(b.Bash), nil}, {`(.*\\\n)*.+`, Using(b.Bash), nil},
}, },
} },
} ))

View File

@ -6,7 +6,7 @@ import (
) )
// Dtd lexer. // Dtd lexer.
var Dtd = internal.Register(MustNewLazyLexer( var Dtd = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "DTD", Name: "DTD",
Aliases: []string{"dtd"}, Aliases: []string{"dtd"},
@ -14,11 +14,7 @@ var Dtd = internal.Register(MustNewLazyLexer(
MimeTypes: []string{"application/xml-dtd"}, MimeTypes: []string{"application/xml-dtd"},
DotAll: true, DotAll: true,
}, },
dtdRules, Rules{
))
func dtdRules() Rules {
return Rules{
"root": { "root": {
Include("common"), Include("common"),
{`(<!ELEMENT)(\s+)(\S+)`, ByGroups(Keyword, Text, NameTag), Push("element")}, {`(<!ELEMENT)(\s+)(\S+)`, ByGroups(Keyword, Text, NameTag), Push("element")},
@ -69,5 +65,5 @@ func dtdRules() Rules {
{`[^>\s|()?+*,]+`, NameAttribute, nil}, {`[^>\s|()?+*,]+`, NameAttribute, nil},
{`>`, Keyword, Pop(1)}, {`>`, Keyword, Pop(1)},
}, },
} },
} ))

View File

@ -1,74 +0,0 @@
package d
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Dylan lexer.
var Dylan = internal.Register(MustNewLexer(
&Config{
Name: "Dylan",
Aliases: []string{"dylan"},
Filenames: []string{"*.dylan", "*.dyl", "*.intr"},
MimeTypes: []string{"text/x-dylan"},
CaseInsensitive: true,
},
Rules{
"root": {
{`\s+`, Whitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{`([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Whitespace, LiteralString), nil},
Default(Push("code")),
},
"code": {
{`\s+`, Whitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("comment")},
{`"`, LiteralString, Push("string")},
{`'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil},
{`#b[01]+`, LiteralNumberBin, nil},
{`#o[0-7]+`, LiteralNumberOct, nil},
{`[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)`, LiteralNumberFloat, nil},
{`[-+]?\d+`, LiteralNumberInteger, nil},
{`#x[0-9a-f]+`, LiteralNumberHex, nil},
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)`,
ByGroups(Operator, NameVariable, Operator, NameBuiltin), nil},
{`(\?)(:)(token|name|variable|expression|body|case-body|\*)`,
ByGroups(Operator, Operator, NameVariable), nil},
{`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, NameVariable), nil},
{`(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])`, Punctuation, nil},
{`:=`, Operator, nil},
{`#[tf]`, Literal, nil},
{`#"`, LiteralStringSymbol, Push("symbol")},
{`#[a-z0-9-]+`, Keyword, nil},
{`#(all-keys|include|key|next|rest)`, Keyword, nil},
{`[\w!&*<>|^$%@+~?/=-]+:`, KeywordConstant, nil},
{`<[\w!&*<>|^$%@+~?/=-]+>`, NameClass, nil},
{`\*[\w!&*<>|^$%@+~?/=-]+\*`, NameVariableGlobal, nil},
{`\$[\w!&*<>|^$%@+~?/=-]+`, NameConstant, nil},
{`(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(NameBuiltin, Whitespace, NameVariable), nil},
{`(error|signal|return|break)`, NameException, nil},
{`(\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, Name), nil},
},
"comment": {
{`[^*/]`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push()},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
"symbol": {
{`"`, LiteralStringSymbol, Pop(1)},
{`[^\\"]+`, LiteralStringSymbol, nil},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil},
{`[^\\"\n]+`, LiteralString, nil},
{`\\\n`, LiteralString, nil},
{`\\`, LiteralString, nil},
},
},
))

View File

@ -6,18 +6,14 @@ import (
) )
// Ebnf lexer. // Ebnf lexer.
var Ebnf = internal.Register(MustNewLazyLexer( var Ebnf = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "EBNF", Name: "EBNF",
Aliases: []string{"ebnf"}, Aliases: []string{"ebnf"},
Filenames: []string{"*.ebnf"}, Filenames: []string{"*.ebnf"},
MimeTypes: []string{"text/x-ebnf"}, MimeTypes: []string{"text/x-ebnf"},
}, },
ebnfRules, Rules{
))
func ebnfRules() Rules {
return Rules{
"root": { "root": {
Include("whitespace"), Include("whitespace"),
Include("comment_start"), Include("comment_start"),
@ -51,5 +47,5 @@ func ebnfRules() Rules {
"identifier": { "identifier": {
{`([a-zA-Z][\w \-]*)`, Keyword, nil}, {`([a-zA-Z][\w \-]*)`, Keyword, nil},
}, },
} },
} ))

View File

@ -6,18 +6,14 @@ import (
) )
// Elixir lexer. // Elixir lexer.
var Elixir = internal.Register(MustNewLazyLexer( var Elixir = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Elixir", Name: "Elixir",
Aliases: []string{"elixir", "ex", "exs"}, Aliases: []string{"elixir", "ex", "exs"},
Filenames: []string{"*.ex", "*.exs"}, Filenames: []string{"*.ex", "*.exs"},
MimeTypes: []string{"text/x-elixir"}, MimeTypes: []string{"text/x-elixir"},
}, },
elixirRules, Rules{
))
func elixirRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`#.*$`, CommentSingle, nil}, {`#.*$`, CommentSingle, nil},
@ -32,13 +28,6 @@ func elixirRules() Rules {
{`:"`, LiteralStringSymbol, Push("string_double_atom")}, {`:"`, LiteralStringSymbol, Push("string_double_atom")},
{`:'`, LiteralStringSymbol, Push("string_single_atom")}, {`:'`, LiteralStringSymbol, Push("string_single_atom")},
{`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, {`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil},
{`(fn|do|end|after|else|rescue|catch)\b`, Keyword, nil},
{`(not|and|or|when|in)\b`, OperatorWord, nil},
{`(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b`, Keyword, nil},
{`(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b`, KeywordDeclaration, nil},
{`(import|require|use|alias)\b`, KeywordNamespace, nil},
{`(nil|true|false)\b`, NameConstant, nil},
{`(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b`, NamePseudo, nil},
{`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil}, {`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil},
{`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil}, {`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil},
{`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil}, {`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil},
@ -277,5 +266,5 @@ func elixirRules() Rules {
{`\\.`, LiteralStringOther, nil}, {`\\.`, LiteralStringOther, nil},
{`'[a-zA-Z]*`, LiteralStringOther, Pop(1)}, {`'[a-zA-Z]*`, LiteralStringOther, Pop(1)},
}, },
} },
} ))

Some files were not shown because too many files have changed in this diff Show More