Compare commits

...

48 Commits
3.9.0 ... 4.2.0

Author SHA1 Message Date
883a17092f Merge pull request #606 from chrisallenlane/4.2.0
4.2.0
2020-11-28 11:27:09 -05:00
4f2a57fce8 fix(view): whitespace corrections
- Fix bug whereby `--all` flag would conflict with pager

- Fix whitespace inconsistencies among view and search outputs
2020-11-28 11:18:16 -05:00
ecc96c64f9 refactor(installer): externalize installer
Move installation-related code out of `main.go` and into a new
`installer.Run` method.
2020-11-28 10:32:37 -05:00
a81dd96ff4 fix: rename display.go
Rename `display.go` to `write.go`. (I forgot to do this previously.)
2020-11-27 23:05:02 -05:00
fb538baba5 chore(version): bump to 4.2.0 2020-11-27 22:57:25 -05:00
1a7b5c6127 feat(display): make Faint respect Colorize
Make `display.Faint` respect the `Colorize` config value.
2020-11-27 22:50:55 -05:00
cdddfbb516 chore: rename display.Display
Rename `display.Display` to `display.Write` for clarity and to reduce
"stutter".
2020-11-27 22:35:24 -05:00
4ef4c35d8c feat(search): search all cheatpaths
Update the search function. It now searches all cheatpaths all the time,
as if `--all` were implicitly passed.
2020-11-27 22:31:16 -05:00
a58294859e chore: spelling
`s/pathSheets/pathsheets/g` in `cmd_list` for consistency elsewhere.
2020-11-27 22:26:14 -05:00
606092e288 feat(search): improve search output formatting
Improve the search output formatting.
2020-11-27 17:06:02 -05:00
233a9de1aa feat: implement --all flag
Implement an `--all` flag that can be used to view cheatsheets on all
chaetpaths. (Resolves #548)
2020-11-27 16:39:34 -05:00
aa16f68620 feat(display): add methods to display
- Add `indent`, `faint`, and `underline` methods to `display`
- Add tests for the above
2020-11-27 16:14:33 -05:00
367673d5d9 chore(dependencies): update dependencies
Run `make vendor-update`.
2020-11-27 09:51:39 -05:00
08fb9e11a9 feat(Makefile): add vendor-update
Add `vendor-update` target to `Makefile`, which updates all dependencies
to their newest versions.
2020-11-27 09:50:11 -05:00
3f4d4bddb2 feat(tests): add unit-tests
Add unit-tests for `sheets.Load`.
2020-11-11 19:33:31 -05:00
6c6753b35c Merge pull request #599 from chrisallenlane/issue-597
fix: update installation instructions in README
2020-11-07 18:56:37 -05:00
0718b606e1 fix(README): clarify installation verbiage
Update the installation verbiage in the `README` for clarity
(issue #597).
2020-11-07 18:48:24 -05:00
857119b443 feat(Docker): create development Docker image
- Create Docker image to be used for experimentation during development
- Create targets in `Makefile` pertaining to the above
2020-11-07 18:47:24 -05:00
f421483eea Merge pull request #596 from chrisallenlane/v4.1.1
v4.1.1
2020-11-03 18:32:25 -05:00
4adddbf504 chore: bump version to v4.1.1 2020-11-03 18:05:46 -05:00
b9c86b6975 chore(dependencies): update dependencies 2020-11-03 17:59:56 -05:00
0b21ccf6f8 feat(tests): improve test coverage 2020-11-03 17:29:49 -05:00
a3ad8c5101 Merge pull request #595 from chrisallenlane/codeql
feat: integrate CodeQL build action
2020-11-01 10:51:38 -05:00
bacb74929a feat: integrate CodeQL build action 2020-11-01 10:47:25 -05:00
82e1c27494 Merge pull request #588 from chrisallenlane/bare-tag
feat: implement `cheat -t` shorthand
2020-09-05 09:05:09 -04:00
45beeb2edb chore: bump version to 4.1.0 2020-09-05 08:56:51 -04:00
c2c479b36c feat: support -t shorthand
Make `cheat -t <tag>` function as a shorthand for `cheat -l -t <tag>`.
2020-09-02 17:17:44 -04:00
cb0243e7fc Merge pull request #580 from ryaanwells/patch-1
Fixing "cheetsheet" typo in README.md
2020-08-23 15:25:55 -04:00
e5d04d41ea Fixing "cheetsheet" typo in tags_test.go 2020-08-21 15:45:49 +01:00
2474ea4fb1 Fixing "cheetsheet" typo in README.md 2020-08-18 14:53:15 +01:00
7467c9fbc0 Merge pull request #578 from chrisallenlane/v4.0.3
chore: update dependencies
2020-08-08 13:45:10 -04:00
dfba3da003 chore: update dependencies 2020-08-08 10:29:29 -04:00
ad7ad64a75 Merge pull request #573 from chrisallenlane/linux-conf
fix(config): add /etc/cheat config path
2020-07-11 08:12:04 -04:00
c4dcfd5da0 fix(config): add /etc/cheat config path
Add `/etc/cheat/conf.yml` to default config paths. See #568 for context.
2020-07-09 18:28:10 -04:00
278a5d9154 Merge pull request #570 from cheat/v4.0.1
fix(search): fix pagination error
2020-06-30 07:26:30 -04:00
9fa0c466fd fix(search): fix pagination error
Fix the paginator when used in combination with the `-s` (search)
subcommand. Previously, it would not behave as intended, because `cheat`
was writing to `stdout` at inappropriate times.
2020-06-30 07:21:21 -04:00
4e9b2928b3 Merge pull request #569 from chrisallenlane/dev
v4.0.0
2020-06-25 19:05:37 -04:00
fa5eb44be8 chore: bump version to 4.0.0 2020-06-25 18:53:27 -04:00
49afd7c16b feat: modify return codes
Modify exit codes. `cheat` now returns an exit code value `2` on errors
pertaining to a cheatsheet not being found.

BREAKING CHANGE
2020-06-25 18:38:03 -04:00
59d5c96c24 feat(pagination): implement paginated output
Implement a `pager` config option. If configured, `cheat` will
automatically pipe output through the configured pager (where
appropriate).
2020-06-25 18:21:51 -04:00
8e602b0e93 Merge pull request #563 from chrisallenlane/3.10.1
feat(makefile): support 32-bit systems
2020-05-14 20:11:14 -04:00
fb04cb1fcd feat(makefile): support 32-bit systems
- Update the `Makefile` to additionally output a 386 binary (#562)
- Update the dependencies (#561)
- Bump version
2020-05-14 20:02:35 -04:00
d42726101e Merge pull request #560 from cheat/3.10.0
3.10.0
2020-05-11 20:20:09 -04:00
93b3a711f5 chore: bump version to 3.10.0 2020-05-11 20:15:26 -04:00
9c3d41c8bd Merge branch 'syohex-syohex/improve-completion' into 3.10.0 2020-05-11 20:13:58 -04:00
4eeec6c868 chore(deps): resolve dependency conflict 2020-05-11 20:12:28 -04:00
1b17ab1914 chore(deps): bump github.com/alecthomas/chroma from 0.7.1 to 0.7.3
Bumps [github.com/alecthomas/chroma](https://github.com/alecthomas/chroma) from 0.7.1 to 0.7.3.
- [Release notes](https://github.com/alecthomas/chroma/releases)
- [Changelog](https://github.com/alecthomas/chroma/blob/master/.goreleaser.yml)
- [Commits](https://github.com/alecthomas/chroma/compare/v0.7.1...v0.7.3)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2020-05-11 07:20:59 +00:00
477650ee44 Improve zsh completion 2020-04-12 11:03:08 +09:00
302 changed files with 10269 additions and 7740 deletions

66
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@ -0,0 +1,66 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
# ******** NOTE ********
name: "CodeQL"
on:
push:
branches: [ master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '45 23 * * 0'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'go' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more...
# https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
#- name: Autobuild
#uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
- run: make
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

8
Dockerfile Normal file
View File

@ -0,0 +1,8 @@
# NB: this image isn't used anywhere in the build pipeline. It exists to
# conveniently facilitate ad-hoc experimentation in a sandboxed environment
# during development.
FROM golang:1.15-alpine
RUN apk add git less make
WORKDIR /app

View File

@ -7,6 +7,7 @@ dist_dir := ./dist
CAT := cat CAT := cat
COLUMN := column COLUMN := column
CTAGS := ctags CTAGS := ctags
DOCKER := docker
GO := go GO := go
GREP := grep GREP := grep
GZIP := gzip --best GZIP := gzip --best
@ -20,6 +21,8 @@ SED := sed
SORT := sort SORT := sort
ZIP := zip -m ZIP := zip -m
docker_image := cheat-devel:latest
# build flags # build flags
BUILD_FLAGS := -ldflags="-s -w" -mod vendor -trimpath BUILD_FLAGS := -ldflags="-s -w" -mod vendor -trimpath
GOBIN := GOBIN :=
@ -28,6 +31,7 @@ TMPDIR := /tmp
# release binaries # release binaries
releases := \ releases := \
$(dist_dir)/cheat-darwin-amd64 \ $(dist_dir)/cheat-darwin-amd64 \
$(dist_dir)/cheat-linux-386 \
$(dist_dir)/cheat-linux-amd64 \ $(dist_dir)/cheat-linux-amd64 \
$(dist_dir)/cheat-linux-arm5 \ $(dist_dir)/cheat-linux-arm5 \
$(dist_dir)/cheat-linux-arm6 \ $(dist_dir)/cheat-linux-arm6 \
@ -52,6 +56,11 @@ $(dist_dir)/cheat-darwin-amd64: prepare
GOARCH=amd64 GOOS=darwin \ GOARCH=amd64 GOOS=darwin \
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz $(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
# cheat-linux-386
$(dist_dir)/cheat-linux-386: prepare
GOARCH=386 GOOS=linux \
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
# cheat-linux-amd64 # cheat-linux-amd64
$(dist_dir)/cheat-linux-amd64: prepare $(dist_dir)/cheat-linux-amd64: prepare
GOARCH=amd64 GOOS=linux \ GOARCH=amd64 GOOS=linux \
@ -99,6 +108,7 @@ clean: $(dist_dir)
.PHONY: distclean .PHONY: distclean
distclean: distclean:
$(RM) -f tags $(RM) -f tags
@$(DOCKER) image rm -f $(docker_image)
## setup: install revive (linter) and scc (sloc tool) ## setup: install revive (linter) and scc (sloc tool)
.PHONY: setup .PHONY: setup
@ -126,6 +136,10 @@ man:
vendor: vendor:
$(GO) mod vendor && $(GO) mod tidy && $(GO) mod verify $(GO) mod vendor && $(GO) mod tidy && $(GO) mod verify
## vendor-update: update vendored dependencies
vendor-update:
$(GO) get -t -u ./...
## fmt: run go fmt ## fmt: run go fmt
.PHONY: fmt .PHONY: fmt
fmt: fmt:
@ -159,6 +173,16 @@ check: | vendor fmt lint vet test
.PHONY: prepare .PHONY: prepare
prepare: | $(dist_dir) clean generate vendor fmt lint vet test prepare: | $(dist_dir) clean generate vendor fmt lint vet test
## docker-setup: create a docker image for use during development
.PHONY: docker-setup
docker-setup:
$(DOCKER) build -t $(docker_image) -f Dockerfile .
## docker-sh: shell into the docker development container
.PHONY: docker-sh
docker-sh:
$(DOCKER) run -v $(shell pwd):/app -ti $(docker_image) /bin/ash
## help: display this help text ## help: display this help text
.PHONY: help .PHONY: help
help: help:

View File

@ -47,17 +47,17 @@ Installing
`cheat` has no dependencies. To install it, download the executable from the `cheat` has no dependencies. To install it, download the executable from the
[releases][] page and place it on your `PATH`. [releases][] page and place it on your `PATH`.
Alternatively, if you have [go][] installed, you may install `cheat` using `go
get`:
```sh
go get -u github.com/cheat/cheat/cmd/cheat
```
Configuring Configuring
----------- -----------
### conf.yml ### ### conf.yml ###
`cheat` is configured by a YAML file that will be auto-generated on first run. `cheat` is configured by a YAML file that will be auto-generated on first run.
Should you need to create a config file manually, you can do
so via:
```sh
mkdir -p ~/.config/cheat && cheat --init > ~/.config/cheat/conf.yml
```
By default, the config file is assumed to exist on an XDG-compliant By default, the config file is assumed to exist on an XDG-compliant
configuration path like `~/.config/cheat/conf.yml`. If you would like to store configuration path like `~/.config/cheat/conf.yml`. If you would like to store
@ -98,7 +98,7 @@ install the community-sourced cheatsheets the first time you run `cheat`.
Cheatpaths Cheatpaths
---------- ----------
Cheatsheets are stored on "cheatpaths", which are directories that contain Cheatsheets are stored on "cheatpaths", which are directories that contain
cheetsheets. Cheatpaths are specified in the `conf.yml` file. cheatsheets. Cheatpaths are specified in the `conf.yml` file.
It can be useful to configure `cheat` against multiple cheatpaths. A common It can be useful to configure `cheat` against multiple cheatpaths. A common
pattern is to store cheatsheets from multiple repositories on individual pattern is to store cheatsheets from multiple repositories on individual
@ -211,3 +211,4 @@ Additionally, `cheat` supports enhanced autocompletion via integration with
[cheatsheets]: https://github.com/cheat/cheatsheets [cheatsheets]: https://github.com/cheat/cheatsheets
[completions]: https://github.com/cheat/cheat/tree/master/scripts [completions]: https://github.com/cheat/cheat/tree/master/scripts
[fzf]: https://github.com/junegunn/fzf [fzf]: https://github.com/junegunn/fzf
[go]: https://golang.org

View File

@ -1,18 +1,20 @@
package main package main
import ( import (
"bytes"
"fmt" "fmt"
"os"
"text/tabwriter" "text/tabwriter"
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
"github.com/cheat/cheat/internal/display"
) )
// cmdDirectories lists the configured cheatpaths. // cmdDirectories lists the configured cheatpaths.
func cmdDirectories(opts map[string]interface{}, conf config.Config) { func cmdDirectories(opts map[string]interface{}, conf config.Config) {
// initialize a tabwriter to produce cleanly columnized output // initialize a tabwriter to produce cleanly columnized output
w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) var out bytes.Buffer
w := tabwriter.NewWriter(&out, 0, 0, 1, ' ', 0)
// generate sorted, columnized output // generate sorted, columnized output
for _, path := range conf.Cheatpaths { for _, path := range conf.Cheatpaths {
@ -25,4 +27,5 @@ func cmdDirectories(opts map[string]interface{}, conf config.Config) {
// write columnized output to stdout // write columnized output to stdout
w.Flush() w.Flush()
display.Write(out.String(), conf)
} }

View File

@ -1,6 +1,7 @@
package main package main
import ( import (
"bytes"
"fmt" "fmt"
"os" "os"
"regexp" "regexp"
@ -9,6 +10,7 @@ import (
"text/tabwriter" "text/tabwriter"
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
"github.com/cheat/cheat/internal/display"
"github.com/cheat/cheat/internal/sheet" "github.com/cheat/cheat/internal/sheet"
"github.com/cheat/cheat/internal/sheets" "github.com/cheat/cheat/internal/sheets"
) )
@ -35,8 +37,8 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
// sheets with local sheets), here we simply want to create a slice // sheets with local sheets), here we simply want to create a slice
// containing all sheets. // containing all sheets.
flattened := []sheet.Sheet{} flattened := []sheet.Sheet{}
for _, pathSheets := range cheatsheets { for _, pathsheets := range cheatsheets {
for _, s := range pathSheets { for _, s := range pathsheets {
flattened = append(flattened, s) flattened = append(flattened, s)
} }
} }
@ -79,16 +81,19 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
flattened = filtered flattened = filtered
} }
// exit early if no cheatsheets are available // return exit code 2 if no cheatsheets are available
if len(flattened) == 0 { if len(flattened) == 0 {
os.Exit(0) os.Exit(2)
} }
// initialize a tabwriter to produce cleanly columnized output // initialize a tabwriter to produce cleanly columnized output
w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) var out bytes.Buffer
w := tabwriter.NewWriter(&out, 0, 0, 1, ' ', 0)
// write a header row
fmt.Fprintln(w, "title:\tfile:\ttags:")
// generate sorted, columnized output // generate sorted, columnized output
fmt.Fprintln(w, "title:\tfile:\ttags:")
for _, sheet := range flattened { for _, sheet := range flattened {
fmt.Fprintln(w, fmt.Sprintf( fmt.Fprintln(w, fmt.Sprintf(
"%s\t%s\t%s", "%s\t%s\t%s",
@ -100,4 +105,5 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
// write columnized output to stdout // write columnized output to stdout
w.Flush() w.Flush()
display.Write(out.String(), conf)
} }

View File

@ -37,8 +37,8 @@ func cmdRemove(opts map[string]interface{}, conf config.Config) {
// fail early if the requested cheatsheet does not exist // fail early if the requested cheatsheet does not exist
sheet, ok := consolidated[cheatsheet] sheet, ok := consolidated[cheatsheet]
if !ok { if !ok {
fmt.Fprintln(os.Stderr, fmt.Sprintf("no cheatsheet found for '%s'.\n", cheatsheet)) fmt.Fprintln(os.Stderr, fmt.Sprintf("No cheatsheet found for '%s'.\n", cheatsheet))
os.Exit(1) os.Exit(2)
} }
// fail early if the sheet is read-only // fail early if the sheet is read-only

View File

@ -7,7 +7,7 @@ import (
"strings" "strings"
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
"github.com/cheat/cheat/internal/sheet" "github.com/cheat/cheat/internal/display"
"github.com/cheat/cheat/internal/sheets" "github.com/cheat/cheat/internal/sheets"
) )
@ -31,30 +31,18 @@ func cmdSearch(opts map[string]interface{}, conf config.Config) {
) )
} }
// consolidate the cheatsheets found on all paths into a single map of // iterate over each cheatpath
// `title` => `sheet` (ie, allow more local cheatsheets to override less out := ""
// local cheatsheets) for _, pathcheats := range cheatsheets {
consolidated := sheets.Consolidate(cheatsheets)
// if <cheatsheet> was provided, search that single sheet only
if opts["<cheatsheet>"] != nil {
cheatsheet := opts["<cheatsheet>"].(string)
// assert that the cheatsheet exists
s, ok := consolidated[cheatsheet]
if !ok {
fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet)
os.Exit(0)
}
consolidated = map[string]sheet.Sheet{
cheatsheet: s,
}
}
// sort the cheatsheets alphabetically, and search for matches // sort the cheatsheets alphabetically, and search for matches
for _, sheet := range sheets.Sort(consolidated) { for _, sheet := range sheets.Sort(pathcheats) {
// if <cheatsheet> was provided, constrain the search only to
// matching cheatsheets
if opts["<cheatsheet>"] != nil && sheet.Title != opts["<cheatsheet>"] {
continue
}
// assume that we want to perform a case-insensitive search for <phrase> // assume that we want to perform a case-insensitive search for <phrase>
pattern := "(?i)" + phrase pattern := "(?i)" + phrase
@ -86,13 +74,22 @@ func cmdSearch(opts map[string]interface{}, conf config.Config) {
sheet.Colorize(conf) sheet.Colorize(conf)
} }
// output the cheatsheet title // display the cheatsheet title and path
fmt.Printf("%s:\n", sheet.Title) out += fmt.Sprintf("%s %s\n",
display.Underline(sheet.Title),
display.Faint(fmt.Sprintf("(%s)", sheet.CheatPath), conf),
)
// indent each line of content with two spaces // indent each line of content
for _, line := range strings.Split(sheet.Text, "\n") { out += display.Indent(sheet.Text) + "\n"
fmt.Printf(" %s\n", line)
} }
fmt.Println("")
} }
// trim superfluous newlines
out = strings.TrimSpace(out)
// display the output
// NB: resist the temptation to call `display.Display` multiple times in
// the loop above. That will not play nicely with the paginator.
display.Write(out, conf)
} }

View File

@ -5,6 +5,7 @@ import (
"os" "os"
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
"github.com/cheat/cheat/internal/display"
"github.com/cheat/cheat/internal/sheets" "github.com/cheat/cheat/internal/sheets"
) )
@ -18,8 +19,12 @@ func cmdTags(opts map[string]interface{}, conf config.Config) {
os.Exit(1) os.Exit(1)
} }
// write sheet tags to stdout // assemble the output
out := ""
for _, tag := range sheets.Tags(cheatsheets) { for _, tag := range sheets.Tags(cheatsheets) {
fmt.Println(tag) out += fmt.Sprintln(tag)
} }
// display the output
display.Write(out, conf)
} }

View File

@ -6,6 +6,7 @@ import (
"strings" "strings"
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
"github.com/cheat/cheat/internal/display"
"github.com/cheat/cheat/internal/sheets" "github.com/cheat/cheat/internal/sheets"
) )
@ -29,16 +30,46 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
) )
} }
// consolidate the cheatsheets found on all paths into a single map of // if --all was passed, display cheatsheets from all cheatpaths
// `title` => `sheet` (ie, allow more local cheatsheets to override less if opts["--all"].(bool) {
// local cheatsheets) // iterate over the cheatpaths
out := ""
for _, cheatpath := range cheatsheets {
// if the cheatpath contains the specified cheatsheet, display it
if sheet, ok := cheatpath[cheatsheet]; ok {
// identify the matching cheatsheet
out += fmt.Sprintf("%s %s\n",
display.Underline(sheet.Title),
display.Faint(fmt.Sprintf("(%s)", sheet.CheatPath), conf),
)
// apply colorization if requested
if conf.Color(opts) {
sheet.Colorize(conf)
}
// display the cheatsheet
out += display.Indent(sheet.Text) + "\n"
}
}
// display and exit
display.Write(strings.TrimSuffix(out, "\n"), conf)
os.Exit(0)
}
// otherwise, consolidate the cheatsheets found on all paths into a single
// map of `title` => `sheet` (ie, allow more local cheatsheets to override
// less local cheatsheets)
consolidated := sheets.Consolidate(cheatsheets) consolidated := sheets.Consolidate(cheatsheets)
// fail early if the requested cheatsheet does not exist // fail early if the requested cheatsheet does not exist
sheet, ok := consolidated[cheatsheet] sheet, ok := consolidated[cheatsheet]
if !ok { if !ok {
fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet) fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet)
os.Exit(0) os.Exit(2)
} }
// apply colorization if requested // apply colorization if requested
@ -47,5 +78,5 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
} }
// display the cheatsheet // display the cheatsheet
fmt.Print(sheet.Text) display.Write(sheet.Text, conf)
} }

View File

@ -3,11 +3,12 @@ Usage:
Options: Options:
--init Write a default config file to stdout --init Write a default config file to stdout
-a --all Search among all cheatpaths
-c --colorize Colorize output -c --colorize Colorize output
-d --directories List cheatsheet directories -d --directories List cheatsheet directories
-e --edit=<cheatsheet> Edit <cheatsheet> -e --edit=<cheatsheet> Edit <cheatsheet>
-l --list List cheatsheets -l --list List cheatsheets
-p --path=<name> Return only sheets found on path <name> -p --path=<name> Return only sheets found on cheatpath <name>
-r --regex Treat search <phrase> as a regex -r --regex Treat search <phrase> as a regex
-s --search=<phrase> Search cheatsheets for <phrase> -s --search=<phrase> Search cheatsheets for <phrase>
-t --tag=<tag> Return only sheets matching <tag> -t --tag=<tag> Return only sheets matching <tag>

View File

@ -5,7 +5,6 @@ package main
import ( import (
"fmt" "fmt"
"os" "os"
"path"
"runtime" "runtime"
"strings" "strings"
@ -17,7 +16,7 @@ import (
"github.com/cheat/cheat/internal/installer" "github.com/cheat/cheat/internal/installer"
) )
const version = "3.9.0" const version = "4.2.0"
func main() { func main() {
@ -74,62 +73,16 @@ func main() {
os.Exit(0) os.Exit(0)
} }
// read the config template // choose a confpath
configs := configs()
// determine the appropriate paths for config data and (optional) community
// cheatsheets based on the user's platform
confpath = confpaths[0] confpath = confpaths[0]
confdir := path.Dir(confpath)
// create paths for community and personal cheatsheets // run the installer
community := path.Join(confdir, "/cheatsheets/community") if err := installer.Run(configs(), confpath); err != nil {
personal := path.Join(confdir, "/cheatsheets/personal") fmt.Fprintf(os.Stderr, "failed to run installer: %v\n", err)
// template the above paths into the default configs
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
// prompt the user to download the community cheatsheets
yes, err = installer.Prompt(
"Would you like to download the community cheatsheets? [Y/n]",
true,
)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
os.Exit(1)
}
// clone the community cheatsheets if so instructed
if yes {
// clone the community cheatsheets
if err := installer.Clone(community); err != nil {
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
os.Exit(1)
}
// also create a directory for personal cheatsheets
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
fmt.Fprintf(
os.Stderr,
"failed to create config: failed to create directory: %s: %v\n",
personal,
err)
os.Exit(1)
}
}
// the config file does not exist, so we'll try to create one
if err = config.Init(confpath, configs); err != nil {
fmt.Fprintf(
os.Stderr,
"failed to create config file: %s: %v\n",
confpath,
err,
)
os.Exit(1) os.Exit(1)
} }
// notify the user and exit
fmt.Printf("Created config file: %s\n", confpath) fmt.Printf("Created config file: %s\n", confpath)
fmt.Println("Please read this file for advanced configuration information.") fmt.Println("Please read this file for advanced configuration information.")
os.Exit(0) os.Exit(0)
@ -185,6 +138,9 @@ func main() {
case opts["<cheatsheet>"] != nil: case opts["<cheatsheet>"] != nil:
cmd = cmdView cmd = cmdView
case opts["--tag"] != nil && opts["--tag"].(string) != "":
cmd = cmdList
default: default:
fmt.Println(usage()) fmt.Println(usage())
os.Exit(0) os.Exit(0)

View File

@ -23,6 +23,9 @@ style: monokai
# One of: "terminal", "terminal256", "terminal16m" # One of: "terminal", "terminal256", "terminal16m"
formatter: terminal16m formatter: terminal16m
# Through which pager should output be piped? (Unset this key for no pager.)
pager: less -FRX
# The paths at which cheatsheets are available. Tags associated with a cheatpath # The paths at which cheatsheets are available. Tags associated with a cheatpath
# are automatically attached to all cheatsheets residing on that path. # are automatically attached to all cheatsheets residing on that path.
# #

View File

@ -12,11 +12,12 @@ func usage() string {
Options: Options:
--init Write a default config file to stdout --init Write a default config file to stdout
-a --all Search among all cheatpaths
-c --colorize Colorize output -c --colorize Colorize output
-d --directories List cheatsheet directories -d --directories List cheatsheet directories
-e --edit=<cheatsheet> Edit <cheatsheet> -e --edit=<cheatsheet> Edit <cheatsheet>
-l --list List cheatsheets -l --list List cheatsheets
-p --path=<name> Return only sheets found on path <name> -p --path=<name> Return only sheets found on cheatpath <name>
-r --regex Treat search <phrase> as a regex -r --regex Treat search <phrase> as a regex
-s --search=<phrase> Search cheatsheets for <phrase> -s --search=<phrase> Search cheatsheets for <phrase>
-t --tag=<tag> Return only sheets matching <tag> -t --tag=<tag> Return only sheets matching <tag>

View File

@ -14,6 +14,9 @@ style: monokai
# One of: "terminal", "terminal256", "terminal16m" # One of: "terminal", "terminal256", "terminal16m"
formatter: terminal16m formatter: terminal16m
# Through which pager should output be piped? (Unset this key for no pager.)
pager: less -FRX
# The paths at which cheatsheets are available. Tags associated with a cheatpath # The paths at which cheatsheets are available. Tags associated with a cheatpath
# are automatically attached to all cheatsheets residing on that path. # are automatically attached to all cheatsheets residing on that path.
# #

View File

@ -1,4 +1,4 @@
.\" Automatically generated by Pandoc 1.17.2 .\" Automatically generated by Pandoc 2.2.1
.\" .\"
.TH "CHEAT" "1" "" "" "General Commands Manual" .TH "CHEAT" "1" "" "" "General Commands Manual"
.hy .hy
@ -17,62 +17,62 @@ commands that they use frequently, but not frequently enough to
remember. remember.
.SH OPTIONS .SH OPTIONS
.TP .TP
.B \-\-init .B \[en]init
Print a config file to stdout. Print a config file to stdout.
.RS .RS
.RE .RE
.TP .TP
.B \-c, \-\-colorize .B \-c, \[en]colorize
Colorize output. Colorize output.
.RS .RS
.RE .RE
.TP .TP
.B \-d, \-\-directories .B \-d, \[en]directories
List cheatsheet directories. List cheatsheet directories.
.RS .RS
.RE .RE
.TP .TP
.B \-e, \-\-edit=\f[I]CHEATSHEET\f[] .B \-e, \[en]edit=\f[I]CHEATSHEET\f[]
Open \f[I]CHEATSHEET\f[] for editing. Open \f[I]CHEATSHEET\f[] for editing.
.RS .RS
.RE .RE
.TP .TP
.B \-l, \-\-list .B \-l, \[en]list
List available cheatsheets. List available cheatsheets.
.RS .RS
.RE .RE
.TP .TP
.B \-p, \-\-path=\f[I]PATH\f[] .B \-p, \[en]path=\f[I]PATH\f[]
Filter only to sheets found on path \f[I]PATH\f[]. Filter only to sheets found on path \f[I]PATH\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-r, \-\-regex .B \-r, \[en]regex
Treat search \f[I]PHRASE\f[] as a regular expression. Treat search \f[I]PHRASE\f[] as a regular expression.
.RS .RS
.RE .RE
.TP .TP
.B \-s, \-\-search=\f[I]PHRASE\f[] .B \-s, \[en]search=\f[I]PHRASE\f[]
Search cheatsheets for \f[I]PHRASE\f[]. Search cheatsheets for \f[I]PHRASE\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-t, \-\-tag=\f[I]TAG\f[] .B \-t, \[en]tag=\f[I]TAG\f[]
Filter only to sheets tagged with \f[I]TAG\f[]. Filter only to sheets tagged with \f[I]TAG\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-T, \-\-tags .B \-T, \[en]tags
List all tags in use. List all tags in use.
.RS .RS
.RE .RE
.TP .TP
.B \-v, \-\-version .B \-v, \[en]version
Print the version number. Print the version number.
.RS .RS
.RE .RE
.TP .TP
.B \-\-rm=\f[I]CHEATSHEET\f[] .B \[en]rm=\f[I]CHEATSHEET\f[]
Remove (deletes) \f[I]CHEATSHEET\f[]. Remove (deletes) \f[I]CHEATSHEET\f[].
.RS .RS
.RE .RE
@ -88,7 +88,7 @@ cheat \-e \f[I]foo\f[]
.RS .RS
.RE .RE
.TP .TP
.B To edit (or create) the foo/bar cheatsheet on the \[aq]work\[aq] cheatpath: .B To edit (or create) the foo/bar cheatsheet on the `work' cheatpath:
cheat \-p \f[I]work\f[] \-e \f[I]foo/bar\f[] cheat \-p \f[I]work\f[] \-e \f[I]foo/bar\f[]
.RS .RS
.RE .RE
@ -103,7 +103,7 @@ cheat \-l
.RS .RS
.RE .RE
.TP .TP
.B To list all cheatsheets whose titles match \[aq]apt\[aq]: .B To list all cheatsheets whose titles match `apt':
cheat \-l \f[I]apt\f[] cheat \-l \f[I]apt\f[]
.RS .RS
.RE .RE
@ -113,23 +113,23 @@ cheat \-T
.RS .RS
.RE .RE
.TP .TP
.B To list available cheatsheets that are tagged as \[aq]personal\[aq]: .B To list available cheatsheets that are tagged as `personal':
cheat \-l \-t \f[I]personal\f[] cheat \-l \-t \f[I]personal\f[]
.RS .RS
.RE .RE
.TP .TP
.B To search for \[aq]ssh\[aq] among all cheatsheets, and colorize matches: .B To search for `ssh' among all cheatsheets, and colorize matches:
cheat \-c \-s \f[I]ssh\f[] cheat \-c \-s \f[I]ssh\f[]
.RS .RS
.RE .RE
.TP .TP
.B To search (by regex) for cheatsheets that contain an IP address: .B To search (by regex) for cheatsheets that contain an IP address:
cheat \-c \-r \-s \f[I]\[aq](?:[0\-9]{1,3}.){3}[0\-9]{1,3}\[aq]\f[] cheat \-c \-r \-s \f[I]`(?:[0\-9]{1,3}.){3}[0\-9]{1,3}'\f[]
.RS .RS
.RE .RE
.TP .TP
.B To remove (delete) the foo/bar cheatsheet: .B To remove (delete) the foo/bar cheatsheet:
cheat \-\-rm \f[I]foo/bar\f[] cheat \[en]rm \f[I]foo/bar\f[]
.RS .RS
.RE .RE
.SH FILES .SH FILES
@ -159,15 +159,15 @@ depending upon your platform:
\f[B]cheat\f[] will search in the order specified above. \f[B]cheat\f[] will search in the order specified above.
The first \f[I]conf.yaml\f[] encountered will be respected. The first \f[I]conf.yaml\f[] encountered will be respected.
.PP .PP
If \f[B]cheat\f[] cannot locate a config file, it will ask if you\[aq]d If \f[B]cheat\f[] cannot locate a config file, it will ask if you'd like
like to generate one automatically. to generate one automatically.
Alternatively, you may also generate a config file manually by running Alternatively, you may also generate a config file manually by running
\f[B]cheat \-\-init\f[] and saving its output to the appropriate \f[B]cheat \[en]init\f[] and saving its output to the appropriate
location for your platform. location for your platform.
.SS Cheatpaths .SS Cheatpaths
.PP .PP
\f[B]cheat\f[] reads its cheatsheets from "cheatpaths", which are the \f[B]cheat\f[] reads its cheatsheets from \[lq]cheatpaths\[rq], which
directories in which cheatsheets are stored. are the directories in which cheatsheets are stored.
Cheatpaths may be configured in \f[I]conf.yaml\f[], and viewed via Cheatpaths may be configured in \f[I]conf.yaml\f[], and viewed via
\f[B]cheat \-d\f[]. \f[B]cheat \-d\f[].
.PP .PP
@ -203,6 +203,13 @@ If set, autocompletion scripts will attempt to integrate with
\f[B]fzf\f[]. \f[B]fzf\f[].
.RS .RS
.RE .RE
.SH RETURN VALUES
.IP "0." 3
Successful termination
.IP "1." 3
Application error
.IP "2." 3
Cheatsheet(s) not found
.SH BUGS .SH BUGS
.PP .PP
See GitHub issues: <https://github.com/cheat/cheat/issues> See GitHub issues: <https://github.com/cheat/cheat/issues>

View File

@ -163,6 +163,15 @@ set, all other config paths will be ignored.
: If set, autocompletion scripts will attempt to integrate with **fzf**. : If set, autocompletion scripts will attempt to integrate with **fzf**.
RETURN VALUES
=============
0. Successful termination
1. Application error
2. Cheatsheet(s) not found
BUGS BUGS
==== ====

7
go.mod
View File

@ -3,16 +3,17 @@ module github.com/cheat/cheat
go 1.14 go 1.14
require ( require (
github.com/alecthomas/chroma v0.7.1 github.com/alecthomas/chroma v0.8.2
github.com/davecgh/go-spew v1.1.1 github.com/davecgh/go-spew v1.1.1
github.com/dlclark/regexp2 v1.4.0 // indirect
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815
github.com/kr/text v0.2.0 // indirect github.com/kr/text v0.2.0 // indirect
github.com/mattn/go-isatty v0.0.12 github.com/mattn/go-isatty v0.0.12
github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/go-homedir v1.1.0
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
github.com/sergi/go-diff v1.1.0 // indirect github.com/sergi/go-diff v1.1.0 // indirect
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 // indirect golang.org/x/sys v0.0.0-20201126233918-771906719818 // indirect
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0
gopkg.in/yaml.v2 v2.2.8 gopkg.in/yaml.v2 v2.4.0
) )

32
go.sum
View File

@ -1,10 +1,10 @@
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
github.com/alecthomas/chroma v0.7.1 h1:G1i02OhUbRi2nJxcNkwJaY/J1gHXj9tt72qN6ZouLFQ= github.com/alecthomas/chroma v0.8.2 h1:x3zkuE2lUk/RIekyAJ3XRqSCP4zwWDfcw/YJCuCAACg=
github.com/alecthomas/chroma v0.7.1/go.mod h1:gHw09mkX1Qp80JlYbmN9L3+4R5o6DJJ3GRShh+AICNc= github.com/alecthomas/chroma v0.8.2/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI= github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY= github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ= github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
@ -13,8 +13,10 @@ github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg= github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
@ -24,19 +26,16 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
@ -49,12 +48,13 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 h1:YAFjXN64LMvktoUZH9zgY4lGc/msGN7HQfoSuKCgaDU=
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg= golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201126233918-771906719818 h1:f1CIuDlJhwANEC2MM87MBEVMr3jl5bifgsfj90XAF9c=
golang.org/x/sys v0.0.0-20201126233918-771906719818/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@ -64,5 +64,5 @@ gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 h1:POO/ycCATvegFmVuPpQzZFJ+p
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg= gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=

View File

@ -0,0 +1,22 @@
package config
import (
"testing"
)
// TestColor asserts that colorization rules are properly respected
func TestColor(t *testing.T) {
// mock a config
conf := Config{}
opts := map[string]interface{}{"--colorize": false}
if conf.Color(opts) {
t.Errorf("failed to respect --colorize (false)")
}
opts = map[string]interface{}{"--colorize": true}
if !conf.Color(opts) {
t.Errorf("failed to respect --colorize (true)")
}
}

View File

@ -5,6 +5,7 @@ import (
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"strings"
cp "github.com/cheat/cheat/internal/cheatpath" cp "github.com/cheat/cheat/internal/cheatpath"
@ -19,6 +20,7 @@ type Config struct {
Cheatpaths []cp.Cheatpath `yaml:"cheatpaths"` Cheatpaths []cp.Cheatpath `yaml:"cheatpaths"`
Style string `yaml:"style"` Style string `yaml:"style"`
Formatter string `yaml:"formatter"` Formatter string `yaml:"formatter"`
Pager string `yaml:"pager"`
} }
// New returns a new Config struct // New returns a new Config struct
@ -111,5 +113,10 @@ func New(opts map[string]interface{}, confPath string, resolve bool) (Config, er
conf.Formatter = "terminal16m" conf.Formatter = "terminal16m"
} }
// if a pager was not provided, set a default
if strings.TrimSpace(conf.Pager) == "" {
conf.Pager = ""
}
return conf, nil return conf, nil
} }

View File

@ -0,0 +1,38 @@
package config
import (
"io/ioutil"
"os"
"testing"
)
// TestInit asserts that configs are properly initialized
func TestInit(t *testing.T) {
// initialize a temporary config file
confFile, err := ioutil.TempFile("", "cheat-test")
if err != nil {
t.Errorf("failed to create temp file: %v", err)
}
// clean up the temp file
defer os.Remove(confFile.Name())
// initialize the config file
conf := "mock config data"
if err = Init(confFile.Name(), conf); err != nil {
t.Errorf("failed to init config file: %v", err)
}
// read back the config file contents
bytes, err := ioutil.ReadFile(confFile.Name())
if err != nil {
t.Errorf("failed to read config file: %v", err)
}
// assert that the contents were written correctly
got := string(bytes)
if got != conf {
t.Errorf("failed to write configs: want: %s, got: %s", conf, got)
}
}

View File

@ -0,0 +1,53 @@
package config
import (
"io/ioutil"
"os"
"testing"
)
// TestPathConfigNotExists asserts that `Path` identifies non-existent config
// files
func TestPathConfigNotExists(t *testing.T) {
// package (invalid) cheatpaths
paths := []string{"/cheat-test-conf-does-not-exist"}
// assert
if _, err := Path(paths); err == nil {
t.Errorf("failed to identify non-existent config file")
}
}
// TestPathConfigExists asserts that `Path` identifies existent config files
func TestPathConfigExists(t *testing.T) {
// initialize a temporary config file
confFile, err := ioutil.TempFile("", "cheat-test")
if err != nil {
t.Errorf("failed to create temp file: %v", err)
}
// clean up the temp file
defer os.Remove(confFile.Name())
// package cheatpaths
paths := []string{
"/cheat-test-conf-does-not-exist",
confFile.Name(),
}
// assert
got, err := Path(paths)
if err != nil {
t.Errorf("failed to identify config file: %v", err)
}
if got != confFile.Name() {
t.Errorf(
"failed to return config path: want: %s, got: %s",
confFile.Name(),
got,
)
}
}

View File

@ -36,10 +36,10 @@ func Paths(
paths = append(paths, path.Join(xdgpath, "/cheat/conf.yml")) paths = append(paths, path.Join(xdgpath, "/cheat/conf.yml"))
} }
// if `XDG_CONFIG_HOME` is not set, search the user's home directory
paths = append(paths, []string{ paths = append(paths, []string{
path.Join(home, ".config/cheat/conf.yml"), path.Join(home, ".config/cheat/conf.yml"),
path.Join(home, ".cheat/conf.yml"), path.Join(home, ".cheat/conf.yml"),
"/etc/cheat/conf.yml",
}...) }...)
return paths, nil return paths, nil

View File

@ -39,6 +39,7 @@ func TestValidatePathsNix(t *testing.T) {
"/home/bar/cheat/conf.yml", "/home/bar/cheat/conf.yml",
"/home/foo/.config/cheat/conf.yml", "/home/foo/.config/cheat/conf.yml",
"/home/foo/.cheat/conf.yml", "/home/foo/.cheat/conf.yml",
"/etc/cheat/conf.yml",
} }
// assert that output matches expectations // assert that output matches expectations
@ -81,6 +82,7 @@ func TestValidatePathsNixNoXDG(t *testing.T) {
want := []string{ want := []string{
"/home/foo/.config/cheat/conf.yml", "/home/foo/.config/cheat/conf.yml",
"/home/foo/.cheat/conf.yml", "/home/foo/.cheat/conf.yml",
"/etc/cheat/conf.yml",
} }
// assert that output matches expectations // assert that output matches expectations

18
internal/display/faint.go Normal file
View File

@ -0,0 +1,18 @@
package display
import (
"fmt"
"github.com/cheat/cheat/internal/config"
)
// Faint returns an faint string
func Faint(str string, conf config.Config) string {
// make `str` faint only if colorization has been requested
if conf.Colorize {
return fmt.Sprintf(fmt.Sprintf("\033[2m%s\033[0m", str))
}
// otherwise, return the string unmodified
return str
}

View File

@ -0,0 +1,27 @@
package display
import (
"testing"
"github.com/cheat/cheat/internal/config"
)
// TestFaint asserts that Faint applies faint formatting
func TestFaint(t *testing.T) {
// case: apply colorization
conf := config.Config{Colorize: true}
want := "\033[2mfoo\033[0m"
got := Faint("foo", conf)
if want != got {
t.Errorf("failed to faint: want: %s, got: %s", want, got)
}
// case: do not apply colorization
conf.Colorize = false
want = "foo"
got = Faint("foo", conf)
if want != got {
t.Errorf("failed to faint: want: %s, got: %s", want, got)
}
}

View File

@ -0,0 +1,21 @@
package display
import (
"fmt"
"strings"
)
// Indent prepends each line of a string with a tab
func Indent(str string) string {
// trim superfluous whitespace
str = strings.TrimSpace(str)
// prepend each line with a tab character
out := ""
for _, line := range strings.Split(str, "\n") {
out += fmt.Sprintf("\t%s\n", line)
}
return out
}

View File

@ -0,0 +1,12 @@
package display
import "testing"
// TestIndent asserts that Indent prepends a tab to each line
func TestIndent(t *testing.T) {
got := Indent("foo\nbar\nbaz")
want := "\tfoo\n\tbar\n\tbaz\n"
if got != want {
t.Errorf("failed to indent: want: %s, got: %s", want, got)
}
}

View File

@ -0,0 +1,8 @@
package display
import "fmt"
// Underline returns an underlined string
func Underline(str string) string {
return fmt.Sprintf(fmt.Sprintf("\033[4m%s\033[0m", str))
}

View File

@ -0,0 +1,14 @@
package display
import (
"testing"
)
// TestUnderline asserts that Underline applies underline formatting
func TestUnderline(t *testing.T) {
want := "\033[4mfoo\033[0m"
got := Underline("foo")
if want != got {
t.Errorf("failed to underline: want: %s, got: %s", want, got)
}
}

37
internal/display/write.go Normal file
View File

@ -0,0 +1,37 @@
package display
import (
"fmt"
"os"
"os/exec"
"strings"
"github.com/cheat/cheat/internal/config"
)
// Write writes output either directly to stdout, or through a pager,
// depending upon configuration.
func Write(out string, conf config.Config) {
// if no pager was configured, print the output to stdout and exit
if conf.Pager == "" {
fmt.Print(out)
os.Exit(0)
}
// otherwise, pipe output through the pager
parts := strings.Split(conf.Pager, " ")
pager := parts[0]
args := parts[1:]
// run the pager
cmd := exec.Command(pager, args...)
cmd.Stdin = strings.NewReader(out)
cmd.Stdout = os.Stdout
// handle errors
err := cmd.Run()
if err != nil {
fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to write to pager: %v", err))
os.Exit(1)
}
}

View File

@ -8,8 +8,8 @@ import (
const cloneURL = "https://github.com/cheat/cheatsheets.git" const cloneURL = "https://github.com/cheat/cheatsheets.git"
// Clone clones the community cheatsheets // clone clones the community cheatsheets
func Clone(path string) error { func clone(path string) error {
// perform the clone in a shell // perform the clone in a shell
cmd := exec.Command("git", "clone", cloneURL, path) cmd := exec.Command("git", "clone", cloneURL, path)

55
internal/installer/run.go Normal file
View File

@ -0,0 +1,55 @@
package installer
import (
"fmt"
"os"
"path"
"strings"
"github.com/cheat/cheat/internal/config"
)
// Run runs the installer
func Run(configs string, confpath string) error {
// determine the appropriate paths for config data and (optional) community
// cheatsheets based on the user's platform
confdir := path.Dir(confpath)
// create paths for community and personal cheatsheets
community := path.Join(confdir, "/cheatsheets/community")
personal := path.Join(confdir, "/cheatsheets/personal")
// template the above paths into the default configs
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
// prompt the user to download the community cheatsheets
yes, err := Prompt(
"Would you like to download the community cheatsheets? [Y/n]",
true,
)
if err != nil {
return fmt.Errorf("failed to prompt: %v", err)
}
// clone the community cheatsheets if so instructed
if yes {
// clone the community cheatsheets
if err := clone(community); err != nil {
return fmt.Errorf("failed to clone cheatsheets: %v", err)
}
// also create a directory for personal cheatsheets
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
return fmt.Errorf("failed to create directory: %v", err)
}
}
// the config file does not exist, so we'll try to create one
if err = config.Init(confpath, configs); err != nil {
return fmt.Errorf("failed to create config file: %v", err)
}
return nil
}

View File

@ -13,7 +13,7 @@ func Path(filename string) string {
// determine the path of this file during runtime // determine the path of this file during runtime
_, thisfile, _, _ := runtime.Caller(0) _, thisfile, _, _ := runtime.Caller(0)
// compute the config path // compute the mock path
file, err := filepath.Abs( file, err := filepath.Abs(
path.Join( path.Join(
filepath.Dir(thisfile), filepath.Dir(thisfile),
@ -22,7 +22,7 @@ func Path(filename string) string {
), ),
) )
if err != nil { if err != nil {
panic(fmt.Errorf("failed to resolve config path: %v", err)) panic(fmt.Errorf("failed to resolve mock path: %v", err))
} }
return file return file

View File

@ -0,0 +1,34 @@
package sheet
import (
"testing"
"github.com/cheat/cheat/internal/config"
)
// TestColorize asserts that syntax-highlighting is correctly applied
func TestColorize(t *testing.T) {
// mock configs
conf := config.Config{
Formatter: "terminal16m",
Style: "solarized-dark",
}
// mock a sheet
s := Sheet{
Text: "echo 'foo'",
}
// colorize the sheet text
s.Colorize(conf)
// initialize expectations
want := "echo"
want += " 'foo'"
// assert
if s.Text != want {
t.Errorf("failed to colorize sheet: want: %s, got: %s", want, s.Text)
}
}

View File

@ -25,7 +25,7 @@ func TestCopyFlat(t *testing.T) {
} }
// mock a cheatsheet struct // mock a cheatsheet struct
sheet, err := New("foo", src.Name(), []string{}, false) sheet, err := New("foo", "community", src.Name(), []string{}, false)
if err != nil { if err != nil {
t.Errorf("failed to init cheatsheet: %v", err) t.Errorf("failed to init cheatsheet: %v", err)
} }
@ -72,7 +72,13 @@ func TestCopyDeep(t *testing.T) {
} }
// mock a cheatsheet struct // mock a cheatsheet struct
sheet, err := New("/cheat-tests/alpha/bravo/foo", src.Name(), []string{}, false) sheet, err := New(
"/cheat-tests/alpha/bravo/foo",
"community",
src.Name(),
[]string{},
false,
)
if err != nil { if err != nil {
t.Errorf("failed to init cheatsheet: %v", err) t.Errorf("failed to init cheatsheet: %v", err)
} }

View File

@ -11,6 +11,7 @@ import (
// Sheet encapsulates sheet information // Sheet encapsulates sheet information
type Sheet struct { type Sheet struct {
Title string Title string
CheatPath string
Path string Path string
Text string Text string
Tags []string Tags []string
@ -21,6 +22,7 @@ type Sheet struct {
// New initializes a new Sheet // New initializes a new Sheet
func New( func New(
title string, title string,
cheatpath string,
path string, path string,
tags []string, tags []string,
readOnly bool, readOnly bool,
@ -47,6 +49,7 @@ func New(
// initialize and return a sheet // initialize and return a sheet
return Sheet{ return Sheet{
Title: title, Title: title,
CheatPath: cheatpath,
Path: path, Path: path,
Text: text + "\n", Text: text + "\n",
Tags: tags, Tags: tags,

View File

@ -13,6 +13,7 @@ func TestSheetSuccess(t *testing.T) {
// initialize a sheet // initialize a sheet
sheet, err := New( sheet, err := New(
"foo", "foo",
"community",
mock.Path("sheet/foo"), mock.Path("sheet/foo"),
[]string{"alpha", "bravo"}, []string{"alpha", "bravo"},
false, false,
@ -61,6 +62,7 @@ func TestSheetFailure(t *testing.T) {
// initialize a sheet // initialize a sheet
_, err := New( _, err := New(
"foo", "foo",
"community",
mock.Path("/does-not-exist"), mock.Path("/does-not-exist"),
[]string{"alpha", "bravo"}, []string{"alpha", "bravo"},
false, false,
@ -69,3 +71,20 @@ func TestSheetFailure(t *testing.T) {
t.Errorf("failed to return an error on unreadable sheet") t.Errorf("failed to return an error on unreadable sheet")
} }
} }
// TestSheetFrontMatterFailure asserts that an error is returned if the sheet's
// frontmatter cannot be parsed.
func TestSheetFrontMatterFailure(t *testing.T) {
// initialize a sheet
_, err := New(
"foo",
"community",
mock.Path("sheet/bad-fm"),
[]string{"alpha", "bravo"},
false,
)
if err == nil {
t.Errorf("failed to return an error on malformed front-matter")
}
}

View File

@ -59,7 +59,13 @@ func Load(cheatpaths []cp.Cheatpath) ([]map[string]sheet.Sheet, error) {
} }
// parse the cheatsheet file into a `sheet` struct // parse the cheatsheet file into a `sheet` struct
s, err := sheet.New(title, path, cheatpath.Tags, cheatpath.ReadOnly) s, err := sheet.New(
title,
cheatpath.Name,
path,
cheatpath.Tags,
cheatpath.ReadOnly,
)
if err != nil { if err != nil {
return fmt.Errorf( return fmt.Errorf(
"failed to load sheet: %s, path: %s, err: %v", "failed to load sheet: %s, path: %s, err: %v",

View File

@ -1,3 +1,62 @@
package sheets package sheets
// TODO import (
"path"
"testing"
"github.com/cheat/cheat/internal/cheatpath"
"github.com/cheat/cheat/internal/mock"
)
// TestLoad asserts that sheets on valid cheatpaths can be loaded successfully
func TestLoad(t *testing.T) {
// mock cheatpaths
cheatpaths := []cheatpath.Cheatpath{
{
Name: "community",
Path: path.Join(mock.Path("cheatsheets"), "community"),
ReadOnly: true,
},
{
Name: "personal",
Path: path.Join(mock.Path("cheatsheets"), "personal"),
ReadOnly: false,
},
}
// load cheatsheets
sheets, err := Load(cheatpaths)
if err != nil {
t.Errorf("failed to load cheatsheets: %v", err)
}
// assert that the correct number of sheets loaded
// (sheet load details are tested in `sheet_test.go`)
want := 4
if len(sheets) != want {
t.Errorf(
"failed to load correct number of cheatsheets: want: %d, got: %d",
want,
len(sheets),
)
}
}
// TestLoadBadPath asserts that an error is returned if a cheatpath is invalid
func TestLoadBadPath(t *testing.T) {
// mock a bad cheatpath
cheatpaths := []cheatpath.Cheatpath{
{
Name: "badpath",
Path: "/cheat/test/path/does/not/exist",
ReadOnly: true,
},
}
// attempt to load the cheatpath
if _, err := Load(cheatpaths); err == nil {
t.Errorf("failed to reject invalid cheatpath")
}
}

View File

@ -9,7 +9,7 @@ import (
"github.com/cheat/cheat/internal/sheet" "github.com/cheat/cheat/internal/sheet"
) )
// TestTags asserts that cheetsheet tags are properly returned // TestTags asserts that cheatsheet tags are properly returned
func TestTags(t *testing.T) { func TestTags(t *testing.T) {
// mock cheatsheets available on multiple cheatpaths // mock cheatsheets available on multiple cheatpaths

View File

View File

@ -0,0 +1,4 @@
---
tags: [ community ]
---
This is the bar cheatsheet.

View File

@ -0,0 +1,4 @@
---
tags: [ community ]
---
This is the foo cheatsheet.

View File

@ -0,0 +1,4 @@
---
tags: [ personal ]
---
This is the bat cheatsheet.

View File

@ -0,0 +1,4 @@
---
tags: [ personal ]
---
This is the baz cheatsheet.

4
mocks/sheet/bad-fm Normal file
View File

@ -0,0 +1,4 @@
---
syntax: sh
This is malformed frontmatter.

View File

@ -2,12 +2,18 @@
local cheats taglist pathlist local cheats taglist pathlist
_cheat_complete_cheatsheets() _cheat_complete_personal_cheatsheets()
{ {
cheats=("${(f)$(cheat -l -t personal | tail -n +2 | cut -d' ' -f1)}") cheats=("${(f)$(cheat -l -t personal | tail -n +2 | cut -d' ' -f1)}")
_describe -t cheats 'cheats' cheats _describe -t cheats 'cheats' cheats
} }
_cheat_complete_full_cheatsheets()
{
cheats=("${(f)$(cheat -l | tail -n +2 | cut -d' ' -f1)}")
_describe -t cheats 'cheats' cheats
}
_cheat_complete_tags() _cheat_complete_tags()
{ {
taglist=("${(f)$(cheat -T)}") taglist=("${(f)$(cheat -T)}")
@ -26,7 +32,7 @@ _cheat() {
'(--init)--init[Write a default config file to stdout]: :->none' \ '(--init)--init[Write a default config file to stdout]: :->none' \
'(-c --colorize)'{-c,--colorize}'[Colorize output]: :->none' \ '(-c --colorize)'{-c,--colorize}'[Colorize output]: :->none' \
'(-d --directories)'{-d,--directories}'[List cheatsheet directories]: :->none' \ '(-d --directories)'{-d,--directories}'[List cheatsheet directories]: :->none' \
'(-e --edit)'{-e,--edit}'[Edit <sheet>]: :->full' \ '(-e --edit)'{-e,--edit}'[Edit <sheet>]: :->personal' \
'(-l --list)'{-l,--list}'[List cheatsheets]: :->full' \ '(-l --list)'{-l,--list}'[List cheatsheets]: :->full' \
'(-p --path)'{-p,--path}'[Return only sheets found on path <name>]: :->pathlist' \ '(-p --path)'{-p,--path}'[Return only sheets found on path <name>]: :->pathlist' \
'(-r --regex)'{-r,--regex}'[Treat search <phrase> as a regex]: :->none' \ '(-r --regex)'{-r,--regex}'[Treat search <phrase> as a regex]: :->none' \
@ -34,13 +40,17 @@ _cheat() {
'(-t --tag)'{-t,--tag}'[Return only sheets matching <tag>]: :->taglist' \ '(-t --tag)'{-t,--tag}'[Return only sheets matching <tag>]: :->taglist' \
'(-T --tags)'{-T,--tags}'[List all tags in use]: :->none' \ '(-T --tags)'{-T,--tags}'[List all tags in use]: :->none' \
'(-v --version)'{-v,--version}'[Print the version number]: :->none' \ '(-v --version)'{-v,--version}'[Print the version number]: :->none' \
'(--rm)--rm[Remove (delete) <sheet>]: :->full' \ '(--rm)--rm[Remove (delete) <sheet>]: :->personal' \
'(-)*: :->full'
case $state in case $state in
(none) (none)
;; ;;
(full) (full)
_cheat_complete_cheatsheets _cheat_complete_full_cheatsheets
;;
(personal)
_cheat_complete_personal_cheatsheets
;; ;;
(taglist) (taglist)
_cheat_complete_tags _cheat_complete_tags
@ -49,7 +59,6 @@ _cheat() {
_cheat_complete_paths _cheat_complete_paths
;; ;;
(*) (*)
_cheat_complete_cheatsheets
;; ;;
esac esac
} }

View File

@ -20,6 +20,11 @@ linters:
- wsl - wsl
- gomnd - gomnd
- gocognit - gocognit
- goerr113
- nolintlint
- testpackage
- godot
- nestif
linters-settings: linters-settings:
govet: govet:

View File

@ -3,8 +3,11 @@ release:
github: github:
owner: alecthomas owner: alecthomas
name: chroma name: chroma
brew: brews:
-
install: bin.install "chroma" install: bin.install "chroma"
env:
- CGO_ENABLED=0
builds: builds:
- goos: - goos:
- linux - linux
@ -18,7 +21,8 @@ builds:
main: ./cmd/chroma/main.go main: ./cmd/chroma/main.go
ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
binary: chroma binary: chroma
archive: archives:
-
format: tar.gz format: tar.gz
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
.Arm }}{{ end }}' .Arm }}{{ end }}'

View File

@ -1,12 +0,0 @@
sudo: false
language: go
go:
- "1.13.x"
script:
- go test -v ./...
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.22.2
- ./bin/golangci-lint run
- git clean -fdx .
after_success:
curl -sL https://git.io/goreleaser | bash && goreleaser

View File

@ -1,4 +1,4 @@
# Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![Build Status](https://travis-ci.org/alecthomas/chroma.svg)](https://travis-ci.org/alecthomas/chroma) [![Gitter chat](https://badges.gitter.im/alecthomas.svg)](https://gitter.im/alecthomas/Lobby) # Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CircleCI](https://img.shields.io/circleci/project/github/alecthomas/chroma.svg)](https://circleci.com/gh/alecthomas/chroma) [![Go Report Card](https://goreportcard.com/badge/github.com/alecthomas/chroma)](https://goreportcard.com/report/github.com/alecthomas/chroma) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://gophers.slack.com/messages/CN9DS8YF3)
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly. > **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
@ -36,29 +36,30 @@ translators for Pygments lexers and styles.
Prefix | Language Prefix | Language
:----: | -------- :----: | --------
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
B | Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, BNF, Brainfuck B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, BlitzBasic, BNF, Brainfuck
C | C, C#, C++, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
D | D, Dart, Diff, Django/Jinja, Docker, DTD D | D, Dart, Diff, Django/Jinja, Docker, DTD
E | EBNF, Elixir, Elm, EmacsLisp, Erlang E | EBNF, Elixir, Elm, EmacsLisp, Erlang
F | Factor, Fish, Forth, Fortran, FSharp F | Factor, Fish, Forth, Fortran, FSharp
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HTML, HTTP, Hy H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy
I | Idris, INI, Io I | Idris, Igor, INI, Io
J | J, Java, JavaScript, JSON, Julia, Jungle J | J, Java, JavaScript, JSON, Julia, Jungle
K | Kotlin K | Kotlin
L | Lighttpd configuration file, LLVM, Lua L | Lighttpd configuration file, LLVM, Lua
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
N | NASM, Newspeak, Nginx configuration file, Nim, Nix N | NASM, Newspeak, Nginx configuration file, Nim, Nix
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
P | PacmanConf, Perl, PHP, Pig, PkgConfig, PL/pgSQL, plaintext, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3 P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python, Python 3
Q | QBasic Q | QBasic
R | R, Racket, Ragel, react, reg, reStructuredText, Rexx, Ruby, Rust R | R, Racket, Ragel, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust
S | Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, SML, Snobol, Solidity, SPARQL, SQL, SquidConf, Swift, SYSTEMD, systemverilog S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Swift, SYSTEMD, systemverilog
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
V | VB.net, verilog, VHDL, VimL, vue V | VB.net, verilog, VHDL, VimL, vue
W | WDTE W | WDTE
X | XML, Xorg X | XML, Xorg
Y | YAML Y | YAML, YANG
Z | Zig
_I will attempt to keep this section up to date, but an authoritative list can be _I will attempt to keep this section up to date, but an authoritative list can be
@ -183,7 +184,7 @@ following constructor options:
- `ClassPrefix(prefix)` - prefix each generated CSS class. - `ClassPrefix(prefix)` - prefix each generated CSS class.
- `TabWidth(width)` - Set the rendered tab width, in characters. - `TabWidth(width)` - Set the rendered tab width, in characters.
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). - `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
- `LinkableLineNumbers()` - Make the line numbers linkable. - `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves.
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). - `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. - `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.

View File

@ -22,6 +22,9 @@ func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix =
// WithClasses emits HTML using CSS classes, rather than inline styles. // WithClasses emits HTML using CSS classes, rather than inline styles.
func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } } func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } }
// WithAllClasses disables an optimisation that omits redundant CSS classes.
func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } }
// TabWidth sets the number of characters for a tab. Defaults to 8. // TabWidth sets the number of characters for a tab. Defaults to 8.
func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } } func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } }
@ -141,6 +144,7 @@ type Formatter struct {
standalone bool standalone bool
prefix string prefix string
Classes bool // Exported field to detect when classes are being used Classes bool // Exported field to detect when classes are being used
allClasses bool
preWrapper PreWrapper preWrapper PreWrapper
tabWidth int tabWidth int
lineNumbers bool lineNumbers bool
@ -188,7 +192,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
wrapInTable := f.lineNumbers && f.lineNumbersInTable wrapInTable := f.lineNumbers && f.lineNumbersInTable
lines := chroma.SplitTokensIntoLines(tokens) lines := chroma.SplitTokensIntoLines(tokens)
lineDigits := len(fmt.Sprintf("%d", len(lines))) lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
highlightIndex := 0 highlightIndex := 0
if wrapInTable { if wrapInTable {
@ -207,7 +211,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight)) fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
} }
fmt.Fprintf(w, "<span%s%s>%*d\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), lineDigits, line) fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
if highlight { if highlight {
fmt.Fprintf(w, "</span>") fmt.Fprintf(w, "</span>")
@ -233,7 +237,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
} }
if f.lineNumbers && !wrapInTable { if f.lineNumbers && !wrapInTable {
fmt.Fprintf(w, "<span%s%s>%*d</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), lineDigits, line) fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
} }
for _, token := range tokens { for _, token := range tokens {
@ -268,7 +272,19 @@ func (f *Formatter) lineIDAttribute(line int) string {
if !f.linkableLineNumbers { if !f.linkableLineNumbers {
return "" return ""
} }
return fmt.Sprintf(" id=\"%s%d\"", f.lineNumbersIDPrefix, line) return fmt.Sprintf(" id=\"%s\"", f.lineID(line))
}
func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string {
title := fmt.Sprintf("%*d", lineDigits, line)
if !f.linkableLineNumbers {
return title
}
return fmt.Sprintf("<a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#%s\">%s</a>", f.lineID(line), title)
}
func (f *Formatter) lineID(line int) string {
return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line)
} }
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) { func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {
@ -362,8 +378,12 @@ func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
if tt == chroma.Background { if tt == chroma.Background {
continue continue
} }
class := f.class(tt)
if class == "" {
continue
}
styles := css[tt] styles := css[tt]
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, f.class(tt), styles); err != nil { if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil {
return err return err
} }
} }
@ -379,7 +399,7 @@ func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string
if t != chroma.Background { if t != chroma.Background {
entry = entry.Sub(bg) entry = entry.Sub(bg)
} }
if entry.IsZero() { if !f.allClasses && entry.IsZero() {
continue continue
} }
classes[t] = StyleEntryToCSS(entry) classes[t] = StyleEntryToCSS(entry)

View File

@ -1,19 +1,18 @@
module github.com/alecthomas/chroma module github.com/alecthomas/chroma
go 1.13
require ( require (
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 // indirect github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 // indirect
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae github.com/alecthomas/kong v0.2.4
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
github.com/dlclark/regexp2 v1.1.6 github.com/dlclark/regexp2 v1.2.0
github.com/mattn/go-colorable v0.0.9 github.com/mattn/go-colorable v0.1.6
github.com/mattn/go-isatty v0.0.4 github.com/mattn/go-isatty v0.0.12
github.com/pkg/errors v0.9.1 // indirect
github.com/sergi/go-diff v1.0.0 // indirect github.com/sergi/go-diff v1.0.0 // indirect
github.com/stretchr/testify v1.3.0 // indirect github.com/stretchr/testify v1.3.0 // indirect
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 // indirect golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
) )
replace github.com/GeertJohan/go.rice => github.com/alecthomas/go.rice v1.0.1-0.20190719113735-961b99d742e7
go 1.13

View File

@ -2,8 +2,8 @@ github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae h1:C4Q9m+oXOxcSWwYk9XzzafY2xAVAaeubZbUHJkw3PlY= github.com/alecthomas/kong v0.2.4 h1:Y0ZBCHAvHhTHw7FFJ2FzCAAG4pkbTgA45nc7BpMhDNk=
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI= github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY= github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ= github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ= github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
@ -11,15 +11,16 @@ github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg= github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4= github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs= github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
@ -29,5 +30,7 @@ github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 h1:YAFjXN64LMvktoUZH9zgY4lGc/msGN7HQfoSuKCgaDU= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=

View File

@ -7,6 +7,7 @@ import (
var ( var (
defaultOptions = &TokeniseOptions{ defaultOptions = &TokeniseOptions{
State: "root", State: "root",
EnsureLF: true,
} }
) )
@ -80,6 +81,10 @@ type TokeniseOptions struct {
State string State string
// Nested tokenisation. // Nested tokenisation.
Nested bool Nested bool
// If true, all EOLs are converted into LF
// by replacing CRLF and CR
EnsureLF bool
} }
// A Lexer for tokenising source code. // A Lexer for tokenising source code.

View File

@ -30,14 +30,14 @@ var Awk = internal.Register(MustNewLexer(
"root": { "root": {
{`^(?=\s|/)`, Text, Push("slashstartsregex")}, {`^(?=\s|/)`, Text, Push("slashstartsregex")},
Include("commentsandwhitespace"), Include("commentsandwhitespace"),
{`\+\+|--|\|\||&&|in\b|\$|!?~|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, {`\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, {`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
{`[})\].]`, Punctuation, nil}, {`[})\].]`, Punctuation, nil},
{`(break|continue|do|while|exit|for|if|else|return)\b`, Keyword, Push("slashstartsregex")}, {`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")},
{`function\b`, KeywordDeclaration, Push("slashstartsregex")}, {`function\b`, KeywordDeclaration, Push("slashstartsregex")},
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next|nextfile|print|printf|strftime|systime|delete|system)\b`, KeywordReserved, nil}, {`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil},
{`(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|SUBSEP)\b`, NameBuiltin, nil}, {`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil},
{`[$a-zA-Z_]\w*`, NameOther, nil}, {`[@$a-zA-Z_]\w*`, NameOther, nil},
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`[0-9]+`, LiteralNumberInteger, nil}, {`[0-9]+`, LiteralNumberInteger, nil},

View File

@ -14,7 +14,7 @@ var Bash = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "Bash", Name: "Bash",
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"}, Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"},
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"}, Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
MimeTypes: []string{"application/x-sh", "application/x-shellscript"}, MimeTypes: []string{"application/x-sh", "application/x-shellscript"},
}, },
Rules{ Rules{
@ -36,7 +36,7 @@ var Bash = internal.Register(MustNewLexer(
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil}, {`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil}, {"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
{`\A#!.+\n`, CommentPreproc, nil}, {`\A#!.+\n`, CommentPreproc, nil},
{`#.*\S`, CommentSingle, nil}, {`#.*(\S|$)`, CommentSingle, nil},
{`\\[\w\W]`, LiteralStringEscape, nil}, {`\\[\w\W]`, LiteralStringEscape, nil},
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil}, {`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
{`[\[\]{}()=]`, Operator, nil}, {`[\[\]{}()=]`, Operator, nil},

View File

@ -0,0 +1,206 @@
package c
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// caddyfileCommon are the rules common to both of the lexer variants
var caddyfileCommon = Rules{
"site_block_common": {
// Import keyword
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// These cannot have matchers but may have things that look like
// matchers in their arguments, so we just parse as a subdirective.
{`try_files`, Keyword, Push("subdirective")},
// These are special, they can nest more directives
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"matcher": {
{`\{`, Punctuation, Push("block")},
// Not can be one-liner
{`not`, Keyword, Push("deep_not_matcher")},
// Any other same-line matcher
{`[^\s#]+`, Keyword, Push("arguments")},
// Terminators
{`\n`, Text, Pop(1)},
{`\}`, Punctuation, Pop(1)},
Include("base"),
},
"block": {
{`\}`, Punctuation, Pop(2)},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Any other subdirective
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"nested_block": {
{`\}`, Punctuation, Pop(2)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Something that starts with literally < is probably a docs stub
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("nested_directive")},
Include("base"),
},
"not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("arguments")},
{`\s+`, Text, nil},
},
"deep_not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
{`\s+`, Text, nil},
},
"directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"nested_directive": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"arguments": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_2"),
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\n`, Text, Pop(2)},
Include("base"),
},
"deep_subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_3"),
{`\n`, Text, Pop(3)},
Include("base"),
},
"matcher_token": {
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
},
"comments": {
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
},
"comments_pop_1": {
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
},
"comments_pop_2": {
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
},
"comments_pop_3": {
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
},
"base": {
Include("comments"),
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`\]|\|`, Punctuation, nil},
{`[^\s#{}$\]]+`, LiteralString, nil},
{`/[^\s#]*`, Name, nil},
{`\s+`, Text, nil},
},
}
// Caddyfile lexer.
var Caddyfile = internal.Register(MustNewLexer(
&Config{
Name: "Caddyfile",
Aliases: []string{"caddyfile", "caddy"},
Filenames: []string{"Caddyfile*"},
MimeTypes: []string{},
},
Rules{
"root": {
Include("comments"),
// Global options block
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
// Snippets
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
// Site label
{`[^#{(\s,]+`, GenericHeading, Push("label")},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
{`\s+`, Text, nil},
},
"globals": {
{`\}`, Punctuation, Pop(1)},
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"snippet": {
{`\}`, Punctuation, Pop(1)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Any directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"label": {
// Allow multiple labels, comma separated, newlines after
// a comma means another label is coming
{`,\s*\n?`, Text, nil},
{` `, Text, nil},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
// Site label
{`[^#{(\s,]+`, GenericHeading, nil},
// Comment after non-block label (hack because comments end in \n)
{`#.*\n`, CommentSingle, Push("site_block")},
// Note: if \n, we'll never pop out of the site_block, it's valid
{`\{(?=\s)|\n`, Punctuation, Push("site_block")},
},
"site_block": {
{`\}`, Punctuation, Pop(2)},
Include("site_block_common"),
},
}.Merge(caddyfileCommon),
))
// Caddyfile directive-only lexer.
var CaddyfileDirectives = internal.Register(MustNewLexer(
&Config{
Name: "Caddyfile Directives",
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
Filenames: []string{},
MimeTypes: []string{},
},
Rules{
// Same as "site_block" in Caddyfile
"root": {
Include("site_block_common"),
},
}.Merge(caddyfileCommon),
))

View File

@ -1,15 +1,12 @@
package circular package circular
import ( import (
"strings"
. "github.com/alecthomas/chroma" // nolint . "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/h"
"github.com/alecthomas/chroma/lexers/internal" "github.com/alecthomas/chroma/lexers/internal"
) )
// PHP lexer. // PHP lexer for pure PHP code (not embedded in HTML).
var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer( var PHP = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "PHP", Name: "PHP",
Aliases: []string{"php", "php3", "php4", "php5"}, Aliases: []string{"php", "php3", "php4", "php5"},
@ -19,12 +16,10 @@ var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
CaseInsensitive: true, CaseInsensitive: true,
EnsureNL: true, EnsureNL: true,
}, },
Rules{ phpCommonRules.Rename("php", "root"),
"root": { ))
{`<\?(php)?`, CommentPreproc, Push("php")},
{`[^<]+`, Other, nil}, var phpCommonRules = Rules{
{`<`, Other, nil},
},
"php": { "php": {
{`\?>`, CommentPreproc, Pop(1)}, {`\?>`, CommentPreproc, Pop(1)},
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil}, {`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
@ -82,10 +77,4 @@ var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil}, {`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
{`[${\\]`, LiteralStringDouble, nil}, {`[${\\]`, LiteralStringDouble, nil},
}, },
}, }
).SetAnalyser(func(text string) float32 {
if strings.Contains(text, "<?php") {
return 0.5
}
return 0.0
})))

View File

@ -0,0 +1,34 @@
package circular
import (
"strings"
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/h"
"github.com/alecthomas/chroma/lexers/internal"
)
// PHTML lexer is PHP in HTML.
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
&Config{
Name: "PHTML",
Aliases: []string{"phtml"},
Filenames: []string{"*.phtml"},
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5"},
DotAll: true,
CaseInsensitive: true,
EnsureNL: true,
},
Rules{
"root": {
{`<\?(php)?`, CommentPreproc, Push("php")},
{`[^<]+`, Other, nil},
{`<`, Other, nil},
},
}.Merge(phpCommonRules),
).SetAnalyser(func(text string) float32 {
if strings.Contains(text, "<?php") {
return 0.5
}
return 0.0
})))

View File

@ -28,6 +28,13 @@ var Elixir = internal.Register(MustNewLexer(
{`:"`, LiteralStringSymbol, Push("string_double_atom")}, {`:"`, LiteralStringSymbol, Push("string_double_atom")},
{`:'`, LiteralStringSymbol, Push("string_single_atom")}, {`:'`, LiteralStringSymbol, Push("string_single_atom")},
{`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, {`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil},
{`(fn|do|end|after|else|rescue|catch)\b`, Keyword, nil},
{`(not|and|or|when|in)\b`, OperatorWord, nil},
{`(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b`, Keyword, nil},
{`(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b`, KeywordDeclaration, nil},
{`(import|require|use|alias)\b`, KeywordNamespace, nil},
{`(nil|true|false)\b`, NameConstant, nil},
{`(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b`, NamePseudo, nil},
{`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil}, {`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil},
{`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil}, {`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil},
{`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil}, {`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil},

118
vendor/github.com/alecthomas/chroma/lexers/g/gherkin.go generated vendored Normal file
View File

@ -0,0 +1,118 @@
package g
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
var stepKeywords = `^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )`
var featureKeywords = `^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$`
var featureElementKeywords = `^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$`
var examplesKeywords = `^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$`
// Gherkin lexer.
var Gherkin = internal.Register(MustNewLexer(
&Config{
Name: "Gherkin",
Aliases: []string{"cucumber", "Cucumber", "gherkin", "Gherkin"},
Filenames: []string{"*.feature", "*.FEATURE"},
MimeTypes: []string{"text/x-gherkin"},
},
Rules{
"comments": {
{`\s*#.*$`, Comment, nil},
},
"featureElements": {
{stepKeywords, Keyword, Push("stepContentStack")},
Include("comments"),
{`(\s|.)`, NameFunction, nil},
},
"featureElementsOnStack": {
{stepKeywords, Keyword, Pop(2)},
Include("comments"),
{`(\s|.)`, NameFunction, nil},
},
"examplesTable": {
{`\s+\|`, Keyword, Push("examplesTableHeader")},
Include("comments"),
{`(\s|.)`, NameFunction, nil},
},
"examplesTableHeader": {
{`\s+\|\s*$`, Keyword, Pop(2)},
Include("comments"),
{`\\\|`, NameVariable, nil},
{`\s*\|`, Keyword, nil},
{`[^|]`, NameVariable, nil},
},
"scenarioSectionsOnStack": {
{featureElementKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("featureElementsOnStack")},
},
"narrative": {
Include("scenarioSectionsOnStack"),
{`(\s|.)`, NameFunction, nil},
},
"tableVars": {
{`(<[^>]+>)`, NameVariable, nil},
},
"numbers": {
{`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralString, nil},
},
"string": {
Include("tableVars"),
{`(\s|.)`, LiteralString, nil},
},
"pyString": {
{`"""`, Keyword, Pop(1)},
Include("string"),
},
"stepContentRoot": {
{`$`, Keyword, Pop(1)},
Include("stepContent"),
},
"stepContentStack": {
{`$`, Keyword, Pop(2)},
Include("stepContent"),
},
"stepContent": {
{`"`, NameFunction, Push("doubleString")},
Include("tableVars"),
Include("numbers"),
Include("comments"),
{`(\s|.)`, NameFunction, nil},
},
"tableContent": {
{`\s+\|\s*$`, Keyword, Pop(1)},
Include("comments"),
{`\\\|`, LiteralString, nil},
{`\s*\|`, Keyword, nil},
{`"`, LiteralString, Push("doubleStringTable")},
Include("string"),
},
"doubleString": {
{`"`, NameFunction, Pop(1)},
Include("string"),
},
"doubleStringTable": {
{`"`, LiteralString, Pop(1)},
Include("string"),
},
"root": {
{`\n`, NameFunction, nil},
Include("comments"),
{`"""`, Keyword, Push("pyString")},
{`\s+\|`, Keyword, Push("tableContent")},
{`"`, NameFunction, Push("doubleString")},
Include("tableVars"),
Include("numbers"),
{`(\s*)(@[^@\r\n\t ]+)`, ByGroups(NameFunction, NameTag), nil},
{stepKeywords, ByGroups(NameFunction, Keyword), Push("stepContentRoot")},
{featureKeywords, ByGroups(Keyword, Keyword, NameFunction), Push("narrative")},
{featureElementKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("featureElements")},
{examplesKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("examplesTable")},
{`(\s|.)`, NameFunction, nil},
},
},
))

View File

@ -15,6 +15,7 @@ var Go = internal.Register(MustNewLexer(
Aliases: []string{"go", "golang"}, Aliases: []string{"go", "golang"},
Filenames: []string{"*.go"}, Filenames: []string{"*.go"},
MimeTypes: []string{"text/x-gosrc"}, MimeTypes: []string{"text/x-gosrc"},
EnsureNL: true,
}, },
Rules{ Rules{
"root": { "root": {
@ -59,13 +60,13 @@ var Go = internal.Register(MustNewLexer(
var goTemplateRules = Rules{ var goTemplateRules = Rules{
"root": { "root": {
{`{{(- )?/\*(.|\n)*?\*/( -)?}}`, CommentMultiline, nil},
{`{{[-]?`, CommentPreproc, Push("template")}, {`{{[-]?`, CommentPreproc, Push("template")},
{`[^{]+`, Other, nil}, {`[^{]+`, Other, nil},
{`{`, Other, nil}, {`{`, Other, nil},
}, },
"template": { "template": {
{`[-]?}}`, CommentPreproc, Pop(1)}, {`[-]?}}`, CommentPreproc, Pop(1)},
{`/\*.*?\*/`, Comment, nil},
{`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline {`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline
{`\(`, Operator, Push("subexpression")}, {`\(`, Operator, Push("subexpression")},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, {`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
@ -79,19 +80,19 @@ var goTemplateRules = Rules{
{`\s+`, Whitespace, nil}, {`\s+`, Whitespace, nil},
{`\(`, Operator, Push("subexpression")}, {`\(`, Operator, Push("subexpression")},
{`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil}, {`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil},
{`\||:=`, Operator, nil}, {`\||:?=|,`, Operator, nil},
{`[$]?[^\W\d]\w*`, NameOther, nil}, {`[$]?[^\W\d]\w*`, NameOther, nil},
{`[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil}, {`[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, {`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`\d+i`, LiteralNumber, nil}, {`-?\d+i`, LiteralNumber, nil},
{`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil}, {`-?\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
{`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil}, {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
{`\d+[Ee][-+]\d+i`, LiteralNumber, nil}, {`-?\d+[Ee][-+]\d+i`, LiteralNumber, nil},
{`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil}, {`-?\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
{`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil}, {`-?\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
{`0[0-7]+`, LiteralNumberOct, nil}, {`-?0[0-7]+`, LiteralNumberOct, nil},
{`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, {`-?0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
{`(0|[1-9][0-9]*)`, LiteralNumberInteger, nil}, {`-?(0|[1-9][0-9]*)`, LiteralNumberInteger, nil},
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil}, {`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
{"`[^`]*`", LiteralString, nil}, {"`[^`]*`", LiteralString, nil},
}, },

View File

@ -19,8 +19,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
}, },
Rules{ Rules{
"root": { "root": {
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")}, {`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")}, {`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
}, },
"headers": { "headers": {
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil}, {`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},

54
vendor/github.com/alecthomas/chroma/lexers/hlb.go generated vendored Normal file
View File

@ -0,0 +1,54 @@
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// HLB lexer.
var HLB = internal.Register(MustNewLexer(
&Config{
Name: "HLB",
Aliases: []string{"hlb"},
Filenames: []string{"*.hlb"},
MimeTypes: []string{},
},
Rules{
"root": {
{`(#.*)`, ByGroups(CommentSingle), nil},
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
{`((\b(true|false)\b))`, ByGroups(NameBuiltin), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil},
{`(\b[a-zA-Z_][a-zA-Z0-9]*\b)(\()`, ByGroups(NameFunction, Punctuation), Push("params")},
{`(\{)`, ByGroups(Punctuation), Push("block")},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\"`, LiteralString, nil},
{`[^\\"]+`, LiteralString, nil},
},
"block": {
{`(\})`, ByGroups(Punctuation), Pop(1)},
{`(#.*)`, ByGroups(CommentSingle), nil},
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
{`((\b(true|false)\b))`, ByGroups(KeywordConstant), nil},
{`"`, LiteralString, Push("string")},
{`(with)`, ByGroups(KeywordReserved), nil},
{`(as)([\t ]+)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(KeywordReserved, Text, NameFunction), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)([\t ]+)(\{)`, ByGroups(KeywordType, Text, Punctuation), Push("block")},
{`(?!\b(?:scratch|image|resolve|http|checksum|chmod|filename|git|keepGitDir|local|includePatterns|excludePatterns|followPaths|generate|frontendInput|shell|run|readonlyRootfs|env|dir|user|network|security|host|ssh|secret|mount|target|localPath|uid|gid|mode|readonly|tmpfs|sourcePath|cache|mkdir|createParents|chown|createdTime|mkfile|rm|allowNotFound|allowWildcards|copy|followSymlinks|contentsOnly|unpack|createDestPath)\b)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
"params": {
{`(\))`, ByGroups(Punctuation), Pop(1)},
{`(variadic)`, ByGroups(Keyword), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil},
{`(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
},
))

View File

@ -10,7 +10,7 @@ var Ini = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "INI", Name: "INI",
Aliases: []string{"ini", "cfg", "dosini"}, Aliases: []string{"ini", "cfg", "dosini"},
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig"}, Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"},
MimeTypes: []string{"text/x-ini", "text/inf"}, MimeTypes: []string{"text/x-ini", "text/inf"},
}, },
Rules{ Rules{

View File

@ -37,19 +37,20 @@ func Names(withAliases bool) []string {
// Get a Lexer by name, alias or file extension. // Get a Lexer by name, alias or file extension.
func Get(name string) chroma.Lexer { func Get(name string) chroma.Lexer {
candidates := chroma.PrioritisedLexers{}
if lexer := Registry.byName[name]; lexer != nil { if lexer := Registry.byName[name]; lexer != nil {
candidates = append(candidates, lexer) return lexer
} }
if lexer := Registry.byAlias[name]; lexer != nil { if lexer := Registry.byAlias[name]; lexer != nil {
candidates = append(candidates, lexer) return lexer
} }
if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil { if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil {
candidates = append(candidates, lexer) return lexer
} }
if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil { if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil {
candidates = append(candidates, lexer) return lexer
} }
candidates := chroma.PrioritisedLexers{}
// Try file extension. // Try file extension.
if lexer := Match("filename." + name); lexer != nil { if lexer := Match("filename." + name); lexer != nil {
candidates = append(candidates, lexer) candidates = append(candidates, lexer)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -24,32 +24,71 @@ var Kotlin = internal.Register(MustNewLexer(
{`//[^\n]*\n?`, CommentSingle, nil}, {`//[^\n]*\n?`, CommentSingle, nil},
{`/[*].*?[*]/`, CommentMultiline, nil}, {`/[*].*?[*]/`, CommentMultiline, nil},
{`\n`, Text, nil}, {`\n`, Text, nil},
{`::|!!|\?[:.]`, Operator, nil}, {`!==|!in|!is|===`, Operator, nil},
{`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil}, {`%=|&&|\*=|\+\+|\+=|--|-=|->|\.\.|\/=|::|<=|==|>=|!!|!=|\|\||\?[:.]`, Operator, nil},
{`[~!%^&*()+=|\[\]:;,.<>\/?-]`, Punctuation, nil},
{`[{}]`, Punctuation, nil}, {`[{}]`, Punctuation, nil},
{`"""[^"]*"""`, LiteralString, nil}, {`"""`, LiteralString, Push("rawstring")},
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, {`"`, LiteralStringDouble, Push("string")},
{`(')(\\u[0-9a-fA-F]{4})(')`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralStringChar), nil},
{`'\\.'|'[^\\]'`, LiteralStringChar, nil}, {`'\\.'|'[^\\]'`, LiteralStringChar, nil},
{`0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?`, LiteralNumber, nil}, {`0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?`, LiteralNumber, nil},
{`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil}, {`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil},
{`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")}, {`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")},
{`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")}, {`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")},
{`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")}, {`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")},
{`(fun)(\s+)(<[^>]*>\s+)?`, ByGroups(Keyword, Text, Text), Push("function")}, {`(fun)(\s+)`, ByGroups(Keyword, Text), Push("function")},
{`(abstract|actual|annotation|as|break|by|catch|class|companion|const|constructor|continue|crossinline|data|do|dynamic|else|enum|expect|external|false|final|finally|for|fun|get|if|import|in|infix|inline|inner|interface|internal|is|lateinit|noinline|null|object|open|operator|out|override|package|private|protected|public|reified|return|sealed|set|super|suspend|tailrec|this|throw|true|try|val|var|vararg|when|where|while)\b`, Keyword, nil}, {`(abstract|actual|annotation|as|as\?|break|by|catch|class|companion|const|constructor|continue|crossinline|data|delegate|do|dynamic|else|enum|expect|external|false|field|file|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|it|lateinit|noinline|null|object|open|operator|out|override|package|param|private|property|protected|public|receiver|reified|return|sealed|set|setparam|super|suspend|tailrec|this|throw|true|try|typealias|typeof|val|var|vararg|when|where|while)\b`, Keyword, nil},
{"(@?[" + kotlinIdentifier + "]*`)", Name, nil}, {`@[` + kotlinIdentifier + `]+`, NameDecorator, nil},
{`[` + kotlinIdentifier + `]+`, Name, nil},
}, },
"package": { "package": {
{`\S+`, NameNamespace, Pop(1)}, {`\S+`, NameNamespace, Pop(1)},
}, },
"class": { "class": {
{"(@?[" + kotlinIdentifier + "]*`)", NameClass, Pop(1)}, // \x60 is the back tick character (`)
{`\x60[^\x60]+?\x60`, NameClass, Pop(1)},
{`[` + kotlinIdentifier + `]+`, NameClass, Pop(1)},
}, },
"property": { "property": {
{"(@?[" + kotlinIdentifier + " ]*`)", NameProperty, Pop(1)}, {`\x60[^\x60]+?\x60`, NameProperty, Pop(1)},
{`[` + kotlinIdentifier + `]+`, NameProperty, Pop(1)},
},
"generics-specification": {
{`<`, Punctuation, Push("generics-specification")}, // required for generics inside generics e.g. <T : List<Int> >
{`>`, Punctuation, Pop(1)},
{`[,:*?]`, Punctuation, nil},
{`(in|out|reified)`, Keyword, nil},
{`\x60[^\x60]+?\x60`, NameClass, nil},
{`[` + kotlinIdentifier + `]+`, NameClass, nil},
{`\s+`, Text, nil},
}, },
"function": { "function": {
{"(@?[" + kotlinIdentifier + " ]*`)", NameFunction, Pop(1)}, {`<`, Punctuation, Push("generics-specification")},
{`\x60[^\x60]+?\x60`, NameFunction, Pop(1)},
{`[` + kotlinIdentifier + `]+`, NameFunction, Pop(1)},
{`\s+`, Text, nil},
},
"rawstring": {
// raw strings don't allow character escaping
{`"""`, LiteralString, Pop(1)},
{`(?:[^$"]+|\"{1,2}[^"])+`, LiteralString, nil},
Include("string-interpol"),
// remaining dollar signs are just a string
{`\$`, LiteralString, nil},
},
"string": {
{`\\[tbnr'"\\\$]`, LiteralStringEscape, nil},
{`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil},
{`"`, LiteralStringDouble, Pop(1)},
Include("string-interpol"),
{`[^\n\\"$]+`, LiteralStringDouble, nil},
// remaining dollar signs are just a string
{`\$`, LiteralStringDouble, nil},
},
"string-interpol": {
{`\$[` + kotlinIdentifier + `]+`, LiteralStringInterpol, nil},
{`\${[^}\n]*}`, LiteralStringInterpol, nil},
}, },
}, },
)) ))

View File

@ -32,6 +32,7 @@ import (
_ "github.com/alecthomas/chroma/lexers/w" _ "github.com/alecthomas/chroma/lexers/w"
_ "github.com/alecthomas/chroma/lexers/x" _ "github.com/alecthomas/chroma/lexers/x"
_ "github.com/alecthomas/chroma/lexers/y" _ "github.com/alecthomas/chroma/lexers/y"
_ "github.com/alecthomas/chroma/lexers/z"
) )
// Registry of Lexers. // Registry of Lexers.

View File

@ -2,11 +2,12 @@ package m
import ( import (
. "github.com/alecthomas/chroma" // nolint . "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/h"
"github.com/alecthomas/chroma/lexers/internal" "github.com/alecthomas/chroma/lexers/internal"
) )
// Markdown lexer. // Markdown lexer.
var Markdown = internal.Register(MustNewLexer( var Markdown = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
&Config{ &Config{
Name: "markdown", Name: "markdown",
Aliases: []string{"md", "mkd"}, Aliases: []string{"md", "mkd"},
@ -40,8 +41,8 @@ var Markdown = internal.Register(MustNewLexer(
{"`[^`]+`", LiteralStringBacktick, nil}, {"`[^`]+`", LiteralStringBacktick, nil},
{`[@#][\w/:]+`, NameEntity, nil}, {`[@#][\w/:]+`, NameEntity, nil},
{`(!?\[)([^]]+)(\])(\()([^)]+)(\))`, ByGroups(Text, NameTag, Text, Text, NameAttribute, Text), nil}, {`(!?\[)([^]]+)(\])(\()([^)]+)(\))`, ByGroups(Text, NameTag, Text, Text, NameAttribute, Text), nil},
{`[^\\\s]+`, Text, nil}, {`[^\\\s]+`, Other, nil},
{`.|\n`, Text, nil}, {`.|\n`, Other, nil},
}, },
}, },
)) )))

View File

@ -11,6 +11,7 @@ var Plaintext = internal.Register(MustNewLexer(
Aliases: []string{"text", "plain", "no-highlight"}, Aliases: []string{"text", "plain", "no-highlight"},
Filenames: []string{"*.txt"}, Filenames: []string{"*.txt"},
MimeTypes: []string{"text/plain"}, MimeTypes: []string{"text/plain"},
Priority: 0.1,
}, },
internal.PlaintextRules, internal.PlaintextRules,
)) ))

59
vendor/github.com/alecthomas/chroma/lexers/p/pony.go generated vendored Normal file
View File

@ -0,0 +1,59 @@
package p
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Pony lexer.
var Pony = internal.Register(MustNewLexer(
&Config{
Name: "Pony",
Aliases: []string{"pony"},
Filenames: []string{"*.pony"},
MimeTypes: []string{},
},
Rules{
"root": {
{`\n`, Text, nil},
{`[^\S\n]+`, Text, nil},
{`//.*\n`, CommentSingle, nil},
{`/\*`, CommentMultiline, Push("nested_comment")},
{`"""(?:.|\n)*?"""`, LiteralStringDoc, nil},
{`"`, LiteralString, Push("string")},
{`\'.*\'`, LiteralStringChar, nil},
{`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil},
{Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil},
{`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil},
{Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil},
{`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")},
{`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")},
{Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil},
{`_?[A-Z]\w*`, NameClass, nil},
{`string\(\)`, NameOther, nil},
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`\d+`, LiteralNumberInteger, nil},
{`(true|false)\b`, Keyword, nil},
{`_\d*`, Name, nil},
{`_?[a-z][\w\'_]*`, Name, nil},
},
"typename": {
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)},
},
"methodname": {
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)},
},
"nested_comment": {
{`[^*/]+`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push()},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\"`, LiteralString, nil},
{`[^\\"]+`, LiteralString, nil},
},
},
))

55
vendor/github.com/alecthomas/chroma/lexers/p/promql.go generated vendored Normal file
View File

@ -0,0 +1,55 @@
package p
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Promql lexer.
var Promql = internal.Register(MustNewLexer(
&Config{
Name: "PromQL",
Aliases: []string{"promql"},
Filenames: []string{"*.promql"},
MimeTypes: []string{},
},
Rules{
"root": {
{`\n`, TextWhitespace, nil},
{`\s+`, TextWhitespace, nil},
{`,`, Punctuation, nil},
{Words(``, `\b`, `bool`, `by`, `group_left`, `group_right`, `ignoring`, `offset`, `on`, `without`), Keyword, nil},
{Words(``, `\b`, `sum`, `min`, `max`, `avg`, `group`, `stddev`, `stdvar`, `count`, `count_values`, `bottomk`, `topk`, `quantile`), Keyword, nil},
{Words(``, `\b`, `abs`, `absent`, `absent_over_time`, `avg_over_time`, `ceil`, `changes`, `clamp_max`, `clamp_min`, `count_over_time`, `day_of_month`, `day_of_week`, `days_in_month`, `delta`, `deriv`, `exp`, `floor`, `histogram_quantile`, `holt_winters`, `hour`, `idelta`, `increase`, `irate`, `label_join`, `label_replace`, `ln`, `log10`, `log2`, `max_over_time`, `min_over_time`, `minute`, `month`, `predict_linear`, `quantile_over_time`, `rate`, `resets`, `round`, `scalar`, `sort`, `sort_desc`, `sqrt`, `stddev_over_time`, `stdvar_over_time`, `sum_over_time`, `time`, `timestamp`, `vector`, `year`), KeywordReserved, nil},
{`[1-9][0-9]*[smhdwy]`, LiteralString, nil},
{`-?[0-9]+\.[0-9]+`, LiteralNumberFloat, nil},
{`-?[0-9]+`, LiteralNumberInteger, nil},
{`#.*?$`, CommentSingle, nil},
{`(\+|\-|\*|\/|\%|\^)`, Operator, nil},
{`==|!=|>=|<=|<|>`, Operator, nil},
{`and|or|unless`, OperatorWord, nil},
{`[_a-zA-Z][a-zA-Z0-9_]+`, NameVariable, nil},
{`(["\'])(.*?)(["\'])`, ByGroups(Punctuation, LiteralString, Punctuation), nil},
{`\(`, Operator, Push("function")},
{`\)`, Operator, nil},
{`\{`, Punctuation, Push("labels")},
{`\[`, Punctuation, Push("range")},
},
"labels": {
{`\}`, Punctuation, Pop(1)},
{`\n`, TextWhitespace, nil},
{`\s+`, TextWhitespace, nil},
{`,`, Punctuation, nil},
{`([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|~!)(\s*?)(")(.*?)(")`, ByGroups(NameLabel, TextWhitespace, Operator, TextWhitespace, Punctuation, LiteralString, Punctuation), nil},
},
"range": {
{`\]`, Punctuation, Pop(1)},
{`[1-9][0-9]*[smhdwy]`, LiteralString, nil},
},
"function": {
{`\)`, Operator, Pop(1)},
{`\(`, Operator, Push()},
Default(Pop(1)),
},
},
))

54
vendor/github.com/alecthomas/chroma/lexers/qml.go generated vendored Normal file
View File

@ -0,0 +1,54 @@
package lexers
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Qml lexer.
var Qml = internal.Register(MustNewLexer(
&Config{
Name: "QML",
Aliases: []string{"qml", "qbs"},
Filenames: []string{"*.qml", "*.qbs"},
MimeTypes: []string{"application/x-qml", "application/x-qt.qbs+qml"},
DotAll: true,
},
Rules{
"commentsandwhitespace": {
{`\s+`, Text, nil},
{`<!--`, Comment, nil},
{`//.*?\n`, CommentSingle, nil},
{`/\*.*?\*/`, CommentMultiline, nil},
},
"slashstartsregex": {
Include("commentsandwhitespace"),
{`/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)},
{`(?=/)`, Text, Push("#pop", "badregex")},
Default(Pop(1)),
},
"badregex": {
{`\n`, Text, Pop(1)},
},
"root": {
{`^(?=\s|/|<!--)`, Text, Push("slashstartsregex")},
Include("commentsandwhitespace"),
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
{`[})\].]`, Punctuation, nil},
{`\bid\s*:\s*[A-Za-z][\w.]*`, KeywordDeclaration, Push("slashstartsregex")},
{`\b[A-Za-z][\w.]*\s*:`, Keyword, Push("slashstartsregex")},
{`(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b`, Keyword, Push("slashstartsregex")},
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
{`(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b`, KeywordReserved, nil},
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
{`[$a-zA-Z_]\w*`, NameOther, nil},
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`[0-9]+`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
},
},
))

View File

@ -0,0 +1,67 @@
package r
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Reasonml lexer.
var Reasonml = internal.Register(MustNewLexer(
&Config{
Name: "ReasonML",
Aliases: []string{"reason", "reasonml"},
Filenames: []string{"*.re", "*.rei"},
MimeTypes: []string{"text/x-reasonml"},
},
Rules{
"escape-sequence": {
{`\\[\\"\'ntbr]`, LiteralStringEscape, nil},
{`\\[0-9]{3}`, LiteralStringEscape, nil},
{`\\x[0-9a-fA-F]{2}`, LiteralStringEscape, nil},
},
"root": {
{`\s+`, Text, nil},
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil},
{`\b([A-Z][\w\']*)(?=\s*\.)`, NameNamespace, Push("dotted")},
{`\b([A-Z][\w\']*)`, NameClass, nil},
{`//.*?\n`, CommentSingle, nil},
{`\/\*(?![\/])`, CommentMultiline, Push("comment")},
{`\b(as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|false|for|fun|esfun|function|functor|if|in|include|inherit|initializer|lazy|let|switch|module|pub|mutable|new|nonrec|object|of|open|pri|rec|sig|struct|then|to|true|try|type|val|virtual|when|while|with)\b`, Keyword, nil},
{"(~|\\}|\\|]|\\||\\|\\||\\{<|\\{|`|_|]|\\[\\||\\[>|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\\.\\.\\.|\\.\\.|\\.|=>|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", OperatorWord, nil},
{`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil},
{`\b(and|asr|land|lor|lsl|lsr|lxor|mod|or)\b`, OperatorWord, nil},
{`\b(unit|int|float|bool|string|char|list|array)\b`, KeywordType, nil},
{`[^\W\d][\w']*`, Name, nil},
{`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil},
{`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil},
{`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil},
{`0[bB][01][01_]*`, LiteralNumberBin, nil},
{`\d[\d_]*`, LiteralNumberInteger, nil},
{`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil},
{`'.'`, LiteralStringChar, nil},
{`'`, Keyword, nil},
{`"`, LiteralStringDouble, Push("string")},
{`[~?][a-z][\w\']*:`, NameVariable, nil},
},
"comment": {
{`[^\/*]+`, CommentMultiline, nil},
{`\/\*`, CommentMultiline, Push()},
{`\*\/`, CommentMultiline, Pop(1)},
{`[\*]`, CommentMultiline, nil},
},
"string": {
{`[^\\"]+`, LiteralStringDouble, nil},
Include("escape-sequence"),
{`\\\n`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)},
},
"dotted": {
{`\s+`, Text, nil},
{`\.`, Punctuation, nil},
{`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil},
{`[A-Z][\w\']*`, NameClass, Pop(1)},
{`[a-z_][\w\']*`, Name, Pop(1)},
Default(Pop(1)),
},
},
))

View File

@ -28,18 +28,18 @@ var Rust = internal.Register(MustNewLexer(
{`/\*\*(\n|[^/*])`, LiteralStringDoc, Push("doccomment")}, {`/\*\*(\n|[^/*])`, LiteralStringDoc, Push("doccomment")},
{`/\*!`, LiteralStringDoc, Push("doccomment")}, {`/\*!`, LiteralStringDoc, Push("doccomment")},
{`/\*`, CommentMultiline, Push("comment")}, {`/\*`, CommentMultiline, Push("comment")},
{`r#*"(?:\\.|[^\\\r\n;])*"#*`, LiteralString, nil}, {`r#*"(?:\\.|[^\\;])*"#*`, LiteralString, nil},
{`"(?:\\.|[^\\\r\n"])*"`, LiteralString, nil}, {`"(?:\\.|[^\\"])*"`, LiteralString, nil},
{`\$([a-zA-Z_]\w*|\(,?|\),?|,?)`, CommentPreproc, nil}, {`\$([a-zA-Z_]\w*|\(,?|\),?|,?)`, CommentPreproc, nil},
{Words(``, `\b`, `as`, `box`, `const`, `crate`, `else`, `extern`, `for`, `if`, `impl`, `in`, `loop`, `match`, `move`, `mut`, `pub`, `ref`, `return`, `static`, `super`, `trait`, `unsafe`, `use`, `where`, `while`), Keyword, nil}, {Words(``, `\b`, `as`, `async`, `await`, `const`, `crate`, `else`, `extern`, `for`, `if`, `impl`, `in`, `loop`, `match`, `move`, `mut`, `pub`, `ref`, `return`, `static`, `super`, `trait`, `unsafe`, `use`, `where`, `while`), Keyword, nil},
{Words(``, `\b`, `abstract`, `alignof`, `become`, `do`, `final`, `macro`, `offsetof`, `override`, `priv`, `proc`, `pure`, `sizeof`, `typeof`, `unsized`, `virtual`, `yield`), KeywordReserved, nil}, {Words(``, `\b`, `abstract`, `become`, `box`, `do`, `final`, `macro`, `override`, `priv`, `try`, `typeof`, `unsized`, `virtual`, `yield`), KeywordReserved, nil},
{`(true|false)\b`, KeywordConstant, nil}, {`(true|false)\b`, KeywordConstant, nil},
{`mod\b`, Keyword, Push("modname")}, {`mod\b`, Keyword, Push("modname")},
{`let\b`, KeywordDeclaration, nil}, {`let\b`, KeywordDeclaration, nil},
{`fn\b`, Keyword, Push("funcname")}, {`fn\b`, Keyword, Push("funcname")},
{`(struct|enum|type|union)\b`, Keyword, Push("typename")}, {`(struct|enum|type|union)\b`, Keyword, Push("typename")},
{`(default)(\s+)(type|fn)\b`, ByGroups(Keyword, Text, Keyword), nil}, {`(default)(\s+)(type|fn)\b`, ByGroups(Keyword, Text, Keyword), nil},
{Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `i8`, `i16`, `i32`, `i64`, `usize`, `isize`, `f32`, `f64`, `str`, `bool`), KeywordType, nil}, {Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `u128`, `i8`, `i16`, `i32`, `i64`, `i128`, `usize`, `isize`, `f32`, `f64`, `str`, `bool`), KeywordType, nil},
{`self\b`, NameBuiltinPseudo, nil}, {`self\b`, NameBuiltinPseudo, nil},
{Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `SliceConcatExt`, `String`, `ToString`, `Vec`), NameBuiltin, nil}, {Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `SliceConcatExt`, `String`, `ToString`, `Vec`), NameBuiltin, nil},
{`::\b`, Text, nil}, {`::\b`, Text, nil},
@ -58,7 +58,7 @@ var Rust = internal.Register(MustNewLexer(
{`'[a-zA-Z_]\w*`, NameAttribute, nil}, {`'[a-zA-Z_]\w*`, NameAttribute, nil},
{`[{}()\[\],.;]`, Punctuation, nil}, {`[{}()\[\],.;]`, Punctuation, nil},
{`[+\-*/%&|<>^!~@=:?]`, Operator, nil}, {`[+\-*/%&|<>^!~@=:?]`, Operator, nil},
{`[a-zA-Z_]\w*`, Name, nil}, {`(r#)?[a-zA-Z_]\w*`, Name, nil},
{`#!?\[`, CommentPreproc, Push("attribute[")}, {`#!?\[`, CommentPreproc, Push("attribute[")},
{`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, TextWhitespace, Punctuation), Push("macro{")}, {`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, TextWhitespace, Punctuation), Push("macro{")},
{`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, Punctuation), Push("macro(")}, {`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, Punctuation), Push("macro(")},

94
vendor/github.com/alecthomas/chroma/lexers/s/sas.go generated vendored Normal file
View File

@ -0,0 +1,94 @@
package s
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Sas lexer.
var Sas = internal.Register(MustNewLexer(
&Config{
Name: "SAS",
Aliases: []string{"sas"},
Filenames: []string{"*.SAS", "*.sas"},
MimeTypes: []string{"text/x-sas", "text/sas", "application/x-sas"},
CaseInsensitive: true,
},
Rules{
"root": {
Include("comments"),
Include("proc-data"),
Include("cards-datalines"),
Include("logs"),
Include("general"),
{`.`, Text, nil},
{`\\\n`, Text, nil},
{`\n`, Text, nil},
},
"comments": {
{`^\s*\*.*?;`, Comment, nil},
{`/\*.*?\*/`, Comment, nil},
{`^\s*\*(.|\n)*?;`, CommentMultiline, nil},
{`/[*](.|\n)*?[*]/`, CommentMultiline, nil},
},
"proc-data": {
{`(^|;)\s*(proc \w+|data|run|quit)[\s;]`, KeywordReserved, nil},
},
"cards-datalines": {
{`^\s*(datalines|cards)\s*;\s*$`, Keyword, Push("data")},
},
"data": {
{`(.|\n)*^\s*;\s*$`, Other, Pop(1)},
},
"logs": {
{`\n?^\s*%?put `, Keyword, Push("log-messages")},
},
"log-messages": {
{`NOTE(:|-).*`, Generic, Pop(1)},
{`WARNING(:|-).*`, GenericEmph, Pop(1)},
{`ERROR(:|-).*`, GenericError, Pop(1)},
Include("general"),
},
"general": {
Include("keywords"),
Include("vars-strings"),
Include("special"),
Include("numbers"),
},
"keywords": {
{Words(`\b`, `\b`, `abort`, `array`, `attrib`, `by`, `call`, `cards`, `cards4`, `catname`, `continue`, `datalines`, `datalines4`, `delete`, `delim`, `delimiter`, `display`, `dm`, `drop`, `endsas`, `error`, `file`, `filename`, `footnote`, `format`, `goto`, `in`, `infile`, `informat`, `input`, `keep`, `label`, `leave`, `length`, `libname`, `link`, `list`, `lostcard`, `merge`, `missing`, `modify`, `options`, `output`, `out`, `page`, `put`, `redirect`, `remove`, `rename`, `replace`, `retain`, `return`, `select`, `set`, `skip`, `startsas`, `stop`, `title`, `update`, `waitsas`, `where`, `window`, `x`, `systask`), Keyword, nil},
{Words(`\b`, `\b`, `add`, `and`, `alter`, `as`, `cascade`, `check`, `create`, `delete`, `describe`, `distinct`, `drop`, `foreign`, `from`, `group`, `having`, `index`, `insert`, `into`, `in`, `key`, `like`, `message`, `modify`, `msgtype`, `not`, `null`, `on`, `or`, `order`, `primary`, `references`, `reset`, `restrict`, `select`, `set`, `table`, `unique`, `update`, `validate`, `view`, `where`), Keyword, nil},
{Words(`\b`, `\b`, `do`, `if`, `then`, `else`, `end`, `until`, `while`), Keyword, nil},
{Words(`%`, `\b`, `bquote`, `nrbquote`, `cmpres`, `qcmpres`, `compstor`, `datatyp`, `display`, `do`, `else`, `end`, `eval`, `global`, `goto`, `if`, `index`, `input`, `keydef`, `label`, `left`, `length`, `let`, `local`, `lowcase`, `macro`, `mend`, `nrquote`, `nrstr`, `put`, `qleft`, `qlowcase`, `qscan`, `qsubstr`, `qsysfunc`, `qtrim`, `quote`, `qupcase`, `scan`, `str`, `substr`, `superq`, `syscall`, `sysevalf`, `sysexec`, `sysfunc`, `sysget`, `syslput`, `sysprod`, `sysrc`, `sysrput`, `then`, `to`, `trim`, `unquote`, `until`, `upcase`, `verify`, `while`, `window`), NameBuiltin, nil},
{Words(`\b`, `\(`, `abs`, `addr`, `airy`, `arcos`, `arsin`, `atan`, `attrc`, `attrn`, `band`, `betainv`, `blshift`, `bnot`, `bor`, `brshift`, `bxor`, `byte`, `cdf`, `ceil`, `cexist`, `cinv`, `close`, `cnonct`, `collate`, `compbl`, `compound`, `compress`, `cos`, `cosh`, `css`, `curobs`, `cv`, `daccdb`, `daccdbsl`, `daccsl`, `daccsyd`, `dacctab`, `dairy`, `date`, `datejul`, `datepart`, `datetime`, `day`, `dclose`, `depdb`, `depdbsl`, `depsl`, `depsyd`, `deptab`, `dequote`, `dhms`, `dif`, `digamma`, `dim`, `dinfo`, `dnum`, `dopen`, `doptname`, `doptnum`, `dread`, `dropnote`, `dsname`, `erf`, `erfc`, `exist`, `exp`, `fappend`, `fclose`, `fcol`, `fdelete`, `fetch`, `fetchobs`, `fexist`, `fget`, `fileexist`, `filename`, `fileref`, `finfo`, `finv`, `fipname`, `fipnamel`, `fipstate`, `floor`, `fnonct`, `fnote`, `fopen`, `foptname`, `foptnum`, `fpoint`, `fpos`, `fput`, `fread`, `frewind`, `frlen`, `fsep`, `fuzz`, `fwrite`, `gaminv`, `gamma`, `getoption`, `getvarc`, `getvarn`, `hbound`, `hms`, `hosthelp`, `hour`, `ibessel`, `index`, `indexc`, `indexw`, `input`, `inputc`, `inputn`, `int`, `intck`, `intnx`, `intrr`, `irr`, `jbessel`, `juldate`, `kurtosis`, `lag`, `lbound`, `left`, `length`, `lgamma`, `libname`, `libref`, `log`, `log10`, `log2`, `logpdf`, `logpmf`, `logsdf`, `lowcase`, `max`, `mdy`, `mean`, `min`, `minute`, `mod`, `month`, `mopen`, `mort`, `n`, `netpv`, `nmiss`, `normal`, `note`, `npv`, `open`, `ordinal`, `pathname`, `pdf`, `peek`, `peekc`, `pmf`, `point`, `poisson`, `poke`, `probbeta`, `probbnml`, `probchi`, `probf`, `probgam`, `probhypr`, `probit`, `probnegb`, `probnorm`, `probt`, `put`, `putc`, `putn`, `qtr`, `quote`, `ranbin`, `rancau`, `ranexp`, `rangam`, `range`, `rank`, `rannor`, `ranpoi`, `rantbl`, `rantri`, `ranuni`, `repeat`, `resolve`, `reverse`, `rewind`, `right`, `round`, `saving`, `scan`, `sdf`, `second`, `sign`, `sin`, `sinh`, `skewness`, `soundex`, `spedis`, `sqrt`, `std`, `stderr`, `stfips`, `stname`, `stnamel`, `substr`, `sum`, `symget`, `sysget`, `sysmsg`, `sysprod`, `sysrc`, `system`, `tan`, `tanh`, `time`, `timepart`, `tinv`, `tnonct`, `today`, `translate`, `tranwrd`, `trigamma`, `trim`, `trimn`, `trunc`, `uniform`, `upcase`, `uss`, `var`, `varfmt`, `varinfmt`, `varlabel`, `varlen`, `varname`, `varnum`, `varray`, `varrayx`, `vartype`, `verify`, `vformat`, `vformatd`, `vformatdx`, `vformatn`, `vformatnx`, `vformatw`, `vformatwx`, `vformatx`, `vinarray`, `vinarrayx`, `vinformat`, `vinformatd`, `vinformatdx`, `vinformatn`, `vinformatnx`, `vinformatw`, `vinformatwx`, `vinformatx`, `vlabel`, `vlabelx`, `vlength`, `vlengthx`, `vname`, `vnamex`, `vtype`, `vtypex`, `weekday`, `year`, `yyq`, `zipfips`, `zipname`, `zipnamel`, `zipstate`), NameBuiltin, nil},
},
"vars-strings": {
{`&[a-z_]\w{0,31}\.?`, NameVariable, nil},
{`%[a-z_]\w{0,31}`, NameFunction, nil},
{`\'`, LiteralString, Push("string_squote")},
{`"`, LiteralString, Push("string_dquote")},
},
"string_squote": {
{`'`, LiteralString, Pop(1)},
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil},
{`[^$\'\\]+`, LiteralString, nil},
{`[$\'\\]`, LiteralString, nil},
},
"string_dquote": {
{`"`, LiteralString, Pop(1)},
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil},
{`&`, NameVariable, Push("validvar")},
{`[^$&"\\]+`, LiteralString, nil},
{`[$"\\]`, LiteralString, nil},
},
"validvar": {
{`[a-z_]\w{0,31}\.?`, NameVariable, Pop(1)},
},
"numbers": {
{`\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b`, LiteralNumber, nil},
},
"special": {
{`(null|missing|_all_|_automatic_|_character_|_n_|_infile_|_name_|_null_|_numeric_|_user_|_webout_)`, KeywordConstant, nil},
},
},
))

62
vendor/github.com/alecthomas/chroma/lexers/s/stylus.go generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -15,55 +15,46 @@ var Terraform = internal.Register(MustNewLexer(
}, },
Rules{ Rules{
"root": { "root": {
Include("string"), {`[\[\](),.{}]`, Punctuation, nil},
Include("punctuation"), {`-?[0-9]+`, LiteralNumber, nil},
Include("curly"), {`=>`, Punctuation, nil},
Include("basic"), {Words(``, `\b`, `true`, `false`), KeywordConstant, nil},
Include("whitespace"), {`/(?s)\*(((?!\*/).)*)\*/`, CommentMultiline, nil},
{`[0-9]+`, LiteralNumber, nil}, {`\s*(#|//).*\n`, CommentSingle, nil},
{`([a-zA-Z]\w*)(\s*)(=(?!>))`, ByGroups(NameAttribute, Text, Text), nil},
{Words(`^\s*`, `\b`, `variable`, `data`, `resource`, `provider`, `provisioner`, `module`, `output`), KeywordReserved, nil},
{Words(``, `\b`, `for`, `in`), Keyword, nil},
{Words(``, ``, `count`, `data`, `var`, `module`, `each`), NameBuiltin, nil},
{Words(``, `\b`, `abs`, `ceil`, `floor`, `log`, `max`, `min`, `parseint`, `pow`, `signum`), NameBuiltin, nil},
{Words(``, `\b`, `chomp`, `format`, `formatlist`, `indent`, `join`, `lower`, `regex`, `regexall`, `replace`, `split`, `strrev`, `substr`, `title`, `trim`, `trimprefix`, `trimsuffix`, `trimspace`, `upper`), NameBuiltin, nil},
{Words(`[^.]`, `\b`, `chunklist`, `coalesce`, `coalescelist`, `compact`, `concat`, `contains`, `distinct`, `element`, `flatten`, `index`, `keys`, `length`, `list`, `lookup`, `map`, `matchkeys`, `merge`, `range`, `reverse`, `setintersection`, `setproduct`, `setsubtract`, `setunion`, `slice`, `sort`, `transpose`, `values`, `zipmap`), NameBuiltin, nil},
{Words(`[^.]`, `\b`, `base64decode`, `base64encode`, `base64gzip`, `csvdecode`, `jsondecode`, `jsonencode`, `urlencode`, `yamldecode`, `yamlencode`), NameBuiltin, nil},
{Words(``, `\b`, `abspath`, `dirname`, `pathexpand`, `basename`, `file`, `fileexists`, `fileset`, `filebase64`, `templatefile`), NameBuiltin, nil},
{Words(``, `\b`, `formatdate`, `timeadd`, `timestamp`), NameBuiltin, nil},
{Words(``, `\b`, `base64sha256`, `base64sha512`, `bcrypt`, `filebase64sha256`, `filebase64sha512`, `filemd5`, `filesha1`, `filesha256`, `filesha512`, `md5`, `rsadecrypt`, `sha1`, `sha256`, `sha512`, `uuid`, `uuidv5`), NameBuiltin, nil},
{Words(``, `\b`, `cidrhost`, `cidrnetmask`, `cidrsubnet`), NameBuiltin, nil},
{Words(``, `\b`, `can`, `tobool`, `tolist`, `tomap`, `tonumber`, `toset`, `tostring`, `try`), NameBuiltin, nil},
{`=(?!>)|\+|-|\*|\/|:|!|%|>|<(?!<)|>=|<=|==|!=|&&|\||\?`, Operator, nil},
{`\n|\s+|\\\n`, Text, nil},
{`[a-zA-Z]\w*`, NameOther, nil},
{`"`, LiteralStringDouble, Push("string")},
{`(?s)(<<-?)(\w+)(\n\s*(?:(?!\2).)*\s*\n\s*)(\2)`, ByGroups(Operator, Operator, String, Operator), nil},
}, },
"basic": { "declaration": {
{Words(`\b`, `\b`, `true`, `false`), KeywordType, nil}, {`(\s*)("(?:\\\\|\\"|[^"])*")(\s*)`, ByGroups(Text, NameVariable, Text), nil},
{`\s*/\*`, CommentMultiline, Push("comment")}, {`\{`, Punctuation, Pop(1)},
{`\s*#.*\n`, CommentSingle, nil},
{`(.*?)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil},
{Words(`\b`, `\b`, `variable`, `resource`, `provider`, `provisioner`, `module`), KeywordReserved, Push("function")},
{Words(`\b`, `\b`, `ingress`, `egress`, `listener`, `default`, `connection`, `alias`), KeywordDeclaration, nil},
{`\$\{`, LiteralStringInterpol, Push("var_builtin")},
},
"function": {
{`(\s+)(".*")(\s+)`, ByGroups(Text, LiteralString, Text), nil},
Include("punctuation"),
Include("curly"),
},
"var_builtin": {
{`\$\{`, LiteralStringInterpol, Push()},
{Words(`\b`, `\b`, `concat`, `file`, `join`, `lookup`, `element`), NameBuiltin, nil},
Include("string"),
Include("punctuation"),
{`\s+`, Text, nil},
{`\}`, LiteralStringInterpol, Pop(1)},
}, },
"string": { "string": {
{`(".*")`, ByGroups(LiteralStringDouble), nil}, {`"`, LiteralStringDouble, Pop(1)},
{`\\\\`, LiteralStringDouble, nil},
{`\\\\"`, LiteralStringDouble, nil},
{`\$\{`, LiteralStringInterpol, Push("interp-inside")},
{`\$`, LiteralStringDouble, nil},
{`[^"\\\\$]+`, LiteralStringDouble, nil},
}, },
"punctuation": { "interp-inside": {
{`[\[\](),.]`, Punctuation, nil}, {`\}`, LiteralStringInterpol, Pop(1)},
}, Include("root"),
"curly": {
{`\{`, TextPunctuation, nil},
{`\}`, TextPunctuation, nil},
},
"comment": {
{`[^*/]`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push()},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
"whitespace": {
{`\n`, Text, nil},
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
}, },
}, },
)) ))

View File

@ -22,7 +22,7 @@ var TOML = internal.Register(MustNewLexer(
{`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil}, {`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, StringDouble, nil}, {`"(\\\\|\\"|[^"])*"`, StringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, StringSingle, nil}, {`'(\\\\|\\'|[^'])*'`, StringSingle, nil},
{`[.,=\[\]]`, Punctuation, nil}, {`[.,=\[\]{}]`, Punctuation, nil},
{`[^\W\d]\w*`, NameOther, nil}, {`[^\W\d]\w*`, NameOther, nil},
}, },
}, },

View File

@ -9,7 +9,7 @@ import (
var TypeScript = internal.Register(MustNewLexer( var TypeScript = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "TypeScript", Name: "TypeScript",
Aliases: []string{"ts", "typescript"}, Aliases: []string{"ts", "tsx", "typescript"},
Filenames: []string{"*.ts", "*.tsx"}, Filenames: []string{"*.ts", "*.tsx"},
MimeTypes: []string{"text/x-typescript"}, MimeTypes: []string{"text/x-typescript"},
DotAll: true, DotAll: true,
@ -32,19 +32,20 @@ var TypeScript = internal.Register(MustNewLexer(
{`\n`, Text, Pop(1)}, {`\n`, Text, Pop(1)},
}, },
"root": { "root": {
Include("jsx"),
{`^(?=\s|/|<!--)`, Text, Push("slashstartsregex")}, {`^(?=\s|/|<!--)`, Text, Push("slashstartsregex")},
Include("commentsandwhitespace"), Include("commentsandwhitespace"),
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")}, {`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, {`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
{`[})\].]`, Punctuation, nil}, {`[})\].]`, Punctuation, nil},
{`(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b`, Keyword, Push("slashstartsregex")}, {`(for|in|of|while|do|break|return|yield|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|keyof|asserts|is|infer|await|void|this)\b`, Keyword, Push("slashstartsregex")},
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")}, {`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
{`(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b`, KeywordReserved, nil}, {`(abstract|async|boolean|class|const|debugger|enum|export|extends|from|get|global|goto|implements|import|interface|namespace|package|private|protected|public|readonly|require|set|static|super|type)\b`, KeywordReserved, nil},
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil}, {`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil}, {`(Array|Boolean|Date|Error|Function|Math|Number|Object|Packages|RegExp|String|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
{`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")}, {`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")},
{`\b(string|bool|number)\b`, KeywordType, nil}, {`\b(string|bool|number|any|never|object|symbol|unique|unknown|bigint)\b`, KeywordType, nil},
{`\b(constructor|declare|interface|as|AS)\b`, KeywordReserved, nil}, {`\b(constructor|declare|interface|as)\b`, KeywordReserved, nil},
{`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")}, {`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")},
{`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")}, {`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")},
{`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil}, {`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil},
@ -69,5 +70,28 @@ var TypeScript = internal.Register(MustNewLexer(
{`\}`, LiteralStringInterpol, Pop(1)}, {`\}`, LiteralStringInterpol, Pop(1)},
Include("root"), Include("root"),
}, },
"jsx": {
{`(<)(/?)(>)`, ByGroups(Punctuation, Punctuation, Punctuation), nil},
{`(<)([\w\.]+)`, ByGroups(Punctuation, NameTag), Push("tag")},
{`(<)(/)([\w\.]*)(>)`, ByGroups(Punctuation, Punctuation, NameTag, Punctuation), nil},
},
"tag": {
{`\s+`, Text, nil},
{`([\w]+\s*)(=)(\s*)`, ByGroups(NameAttribute, Operator, Text), Push("attr")},
{`[{}]+`, Punctuation, nil},
{`[\w\.]+`, NameAttribute, nil},
{`(/?)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation), Pop(1)},
},
"attr": {
{`{`, Punctuation, Push("expression")},
{`".*?"`, LiteralString, Pop(1)},
{`'.*?'`, LiteralString, Pop(1)},
Default(Pop(1)),
},
"expression": {
{`{`, Punctuation, Push()},
{`}`, Punctuation, Pop(1)},
Include("root"),
},
}, },
)) ))

View File

@ -10,9 +10,10 @@ var Typoscript = internal.Register(MustNewLexer(
&Config{ &Config{
Name: "TypoScript", Name: "TypoScript",
Aliases: []string{"typoscript"}, Aliases: []string{"typoscript"},
Filenames: []string{"*.ts", "*.txt"}, Filenames: []string{"*.ts"},
MimeTypes: []string{"text/x-typoscript"}, MimeTypes: []string{"text/x-typoscript"},
DotAll: true, DotAll: true,
Priority: 0.1,
}, },
Rules{ Rules{
"root": { "root": {

View File

@ -15,32 +15,36 @@ var YAML = internal.Register(MustNewLexer(
Rules{ Rules{
"root": { "root": {
Include("whitespace"), Include("whitespace"),
{`^---`, Text, nil}, {`^---`, NameNamespace, nil},
{`^\.\.\.`, NameNamespace, nil},
{`[\n?]?\s*- `, Text, nil}, {`[\n?]?\s*- `, Text, nil},
{`#.*$`, Comment, nil}, {`#.*$`, Comment, nil},
{`!![^\s]+`, CommentPreproc, nil}, {`!![^\s]+`, CommentPreproc, nil},
{`&[^\s]+`, CommentPreproc, nil}, {`&[^\s]+`, CommentPreproc, nil},
{`\*[^\s]+`, CommentPreproc, nil}, {`\*[^\s]+`, CommentPreproc, nil},
{`^%include\s+[^\n\r]+`, CommentPreproc, nil}, {`^%include\s+[^\n\r]+`, CommentPreproc, nil},
{`([>|+-]\s+)(\s+)((?:(?:.*?$)(?:[\n\r]*?)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil},
Include("key"), Include("key"),
Include("value"), Include("value"),
{`[?:,\[\]]`, Punctuation, nil}, {`[?:,\[\]]`, Punctuation, nil},
{`.`, Text, nil}, {`.`, Text, nil},
}, },
"value": { "value": {
{Words(``, `\b`, "true", "false", "null"), KeywordConstant, nil}, {`([>|](?:[+-])?)(\n(^ {1,})(?:.*\n*(?:^\3 *).*)*)`, ByGroups(Punctuation, StringDoc, Whitespace), nil},
{Words(``, `\b`, "true", "True", "TRUE", "false", "False", "FALSE", "null",
"y", "Y", "yes", "Yes", "YES", "n", "N", "no", "No", "NO",
"on", "On", "ON", "off", "Off", "OFF"), KeywordConstant, nil},
{`"(?:\\.|[^"])*"`, StringDouble, nil}, {`"(?:\\.|[^"])*"`, StringDouble, nil},
{`'(?:\\.|[^'])*'`, StringSingle, nil}, {`'(?:\\.|[^'])*'`, StringSingle, nil},
{`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil}, {`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil},
{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil}, {`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil},
{`\b[\w]+\b`, Text, nil}, {`([^\{\}\[\]\?,\:\!\-\*&\@].*)( )+(#.*)`, ByGroups(Literal, Whitespace, Comment), nil},
{`[^\{\}\[\]\?,\:\!\-\*&\@].*`, Literal, nil},
}, },
"key": { "key": {
{`"[^"\n].*": `, Keyword, nil}, {`"[^"\n].*": `, NameTag, nil},
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, Keyword, Punctuation, Whitespace), nil}, {`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, NameTag, Punctuation, Whitespace), nil},
{`([^"\n{]*)(:)( )`, ByGroups(Keyword, Punctuation, Whitespace), nil}, {`([^"\n{]*)(:)( )`, ByGroups(NameTag, Punctuation, Whitespace), nil},
{`([^"\n{]*)(:)(\n)`, ByGroups(Keyword, Punctuation, Whitespace), nil}, {`([^"\n{]*)(:)(\n)`, ByGroups(NameTag, Punctuation, Whitespace), nil},
}, },
"whitespace": { "whitespace": {
{`\s+`, Whitespace, nil}, {`\s+`, Whitespace, nil},

67
vendor/github.com/alecthomas/chroma/lexers/y/yang.go generated vendored Normal file
View File

@ -0,0 +1,67 @@
package y
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
var YANG = internal.Register(MustNewLexer(
&Config{
Name: "YANG",
Aliases: []string{"yang"},
Filenames: []string{"*.yang"},
MimeTypes: []string{"application/yang"},
},
Rules{
"root": {
{`\s+`, Whitespace, nil},
{`[\{\}\;]+`, Punctuation, nil},
{`(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])`, Operator, nil},
{`"(?:\\"|[^"])*?"`, StringDouble, nil},
{`'(?:\\'|[^'])*?'`, StringSingle, nil},
{`/\*`, CommentMultiline, Push("comments")},
{`//.*?$`, CommentSingle, nil},
//match BNF stmt for `node-identifier` with [ prefix ":"]
{`(?:^|(?<=[\s{};]))([\w.-]+)(:)([\w.-]+)(?=[\s{};])`, ByGroups(KeywordNamespace, Punctuation, Text), nil},
//match BNF stmt `date-arg-str`
{`([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s\{\}\;])`, LiteralDate, nil},
{`([0-9]+\.[0-9]+)(?=[\s\{\}\;])`, NumberFloat, nil},
{`([0-9]+)(?=[\s\{\}\;])`, NumberInteger, nil},
//TOP_STMTS_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `module`, `submodule`), Keyword, nil},
//MODULE_HEADER_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `belongs-to`, `namespace`, `prefix`, `yang-version`), Keyword, nil},
//META_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `contact`, `description`, `organization`, `reference`, `revision`), Keyword, nil},
//LINKAGE_STMTS_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `import`, `include`, `revision-date`), Keyword, nil},
//BODY_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `action`, `argument`, `augment`, `deviation`, `extension`, `feature`, `grouping`, `identity`, `if-feature`, `input`, `notification`, `output`, `rpc`, `typedef`), Keyword, nil},
//DATA_DEF_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `anydata`, `anyxml`, `case`, `choice`, `config`, `container`, `deviate`, `leaf`, `leaf-list`, `list`, `must`, `presence`, `refine`, `uses`, `when`), Keyword, nil},
//TYPE_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `base`, `bit`, `default`, `enum`, `error-app-tag`, `error-message`, `fraction-digits`, `length`, `max-elements`, `min-elements`, `modifier`, `ordered-by`, `path`, `pattern`, `position`, `range`, `require-instance`, `status`, `type`, `units`, `value`, `yin-element`), Keyword, nil},
//LIST_STMT_KEYWORDS
{Words(``, `(?=[^\w\-\:])`, `key`, `mandatory`, `unique`), Keyword, nil},
//CONSTANTS_KEYWORDS - RFC7950 other keywords
{Words(``, `(?=[^\w\-\:])`, `add`, `current`, `delete`, `deprecated`, `false`, `invert-match`, `max`, `min`, `not-supported`, `obsolete`, `replace`, `true`, `unbounded`, `user`), NameClass, nil},
//RFC7950 Built-In Types
{Words(``, `(?=[^\w\-\:])`, `binary`, `bits`, `boolean`, `decimal64`, `empty`, `enumeration`, `identityref`, `instance-identifier`, `int16`, `int32`, `int64`, `int8`, `leafref`, `string`, `uint16`, `uint32`, `uint64`, `uint8`, `union`), NameClass, nil},
{`[^;{}\s\'\"]+`, Text, nil},
},
"comments": {
{`[^*/]`, CommentMultiline, nil},
{`/\*`, CommentMultiline, Push("comment")},
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
},
))

54
vendor/github.com/alecthomas/chroma/lexers/z/zig.go generated vendored Normal file
View File

@ -0,0 +1,54 @@
package z
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Zig lexer.
var Zig = internal.Register(MustNewLexer(
&Config{
Name: "Zig",
Aliases: []string{"zig"},
Filenames: []string{"*.zig"},
MimeTypes: []string{"text/zig"},
},
Rules{
"root": {
{`\n`, TextWhitespace, nil},
{`\s+`, TextWhitespace, nil},
{`//.*?\n`, CommentSingle, nil},
{Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil},
{Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil},
{Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil},
{Words(``, `\b`, `while`, `for`), Keyword, nil},
{Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil},
{Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil},
{Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil},
{Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil},
{`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil},
{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil},
{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil},
{`0b(?:_?[01])+`, LiteralNumberBin, nil},
{`0o(?:_?[0-7])+`, LiteralNumberOct, nil},
{`0x(?:_?[0-9a-fA-F])+`, LiteralNumberHex, nil},
{`(?:_?[0-9])+`, LiteralNumberInteger, nil},
{`@[a-zA-Z_]\w*`, NameBuiltin, nil},
{`[a-zA-Z_]\w*`, Name, nil},
{`\'\\\'\'`, LiteralStringEscape, nil},
{`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil},
{`\'[^\\\']\'`, LiteralString, nil},
{`\\\\[^\n]*`, LiteralStringHeredoc, nil},
{`c\\\\[^\n]*`, LiteralStringHeredoc, nil},
{`c?"`, LiteralString, Push("string")},
{`[+%=><|^!?/\-*&~:]`, Operator, nil},
{`[{}()\[\],.;]`, Punctuation, nil},
},
"string": {
{`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil},
{`[^\\"\n]+`, LiteralString, nil},
{`"`, LiteralString, Pop(1)},
},
},
))

View File

@ -6,6 +6,7 @@ import (
"regexp" "regexp"
"strings" "strings"
"sync" "sync"
"time"
"unicode/utf8" "unicode/utf8"
"github.com/dlclark/regexp2" "github.com/dlclark/regexp2"
@ -160,6 +161,14 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
// Rules maps from state to a sequence of Rules. // Rules maps from state to a sequence of Rules.
type Rules map[string][]Rule type Rules map[string][]Rule
// Rename clones rules then a rule.
func (r Rules) Rename(old, new string) Rules {
r = r.Clone()
r[new] = r[old]
delete(r, old)
return r
}
// Clone returns a clone of the Rules. // Clone returns a clone of the Rules.
func (r Rules) Clone() Rules { func (r Rules) Clone() Rules {
out := map[string][]Rule{} out := map[string][]Rule{}
@ -170,6 +179,15 @@ func (r Rules) Clone() Rules {
return out return out
} }
// Merge creates a clone of "r" then merges "rules" into the clone.
func (r Rules) Merge(rules Rules) Rules {
out := r.Clone()
for k, v := range rules.Clone() {
out[k] = v
}
return out
}
// MustNewLexer creates a new Lexer or panics. // MustNewLexer creates a new Lexer or panics.
func MustNewLexer(config *Config, rules Rules) *RegexLexer { func MustNewLexer(config *Config, rules Rules) *RegexLexer {
lexer, err := NewLexer(config, rules) lexer, err := NewLexer(config, rules)
@ -376,6 +394,7 @@ func (r *RegexLexer) maybeCompile() (err error) {
if err != nil { if err != nil {
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err) return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
} }
rule.Regexp.MatchTimeout = time.Millisecond * 250
} }
} }
} }
@ -410,6 +429,9 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
if options == nil { if options == nil {
options = defaultOptions options = defaultOptions
} }
if options.EnsureLF {
text = ensureLF(text)
}
if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") { if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") {
text += "\n" text += "\n"
} }
@ -437,3 +459,22 @@ func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule
} }
return 0, &CompiledRule{}, nil return 0, &CompiledRule{}, nil
} }
// replace \r and \r\n with \n
// same as strings.ReplaceAll but more efficient
func ensureLF(text string) string {
buf := make([]byte, len(text))
var j int
for i := 0; i < len(text); i++ {
c := text[i]
if c == '\r' {
if i < len(text)-1 && text[i+1] == '\n' {
continue
}
c = '\n'
}
buf[j] = c
j++
}
return string(buf[:j])
}

View File

@ -0,0 +1,81 @@
package styles
import (
"github.com/alecthomas/chroma"
)
// Base16Snazzy style
var Base16Snazzy = Register(chroma.MustNewStyle("base16-snazzy", chroma.StyleEntries{
chroma.Comment: "#78787e",
chroma.CommentHashbang: "#78787e",
chroma.CommentMultiline: "#78787e",
chroma.CommentPreproc: "#78787e",
chroma.CommentSingle: "#78787e",
chroma.CommentSpecial: "#78787e",
chroma.Generic: "#e2e4e5",
chroma.GenericDeleted: "#ff5c57",
chroma.GenericEmph: "#e2e4e5 underline",
chroma.GenericError: "#ff5c57",
chroma.GenericHeading: "#e2e4e5 bold",
chroma.GenericInserted: "#e2e4e5 bold",
chroma.GenericOutput: "#43454f",
chroma.GenericPrompt: "#e2e4e5",
chroma.GenericStrong: "#e2e4e5 italic",
chroma.GenericSubheading: "#e2e4e5 bold",
chroma.GenericTraceback: "#e2e4e5",
chroma.GenericUnderline: "underline",
chroma.Error: "#ff5c57",
chroma.Keyword: "#ff6ac1",
chroma.KeywordConstant: "#ff6ac1",
chroma.KeywordDeclaration: "#ff5c57",
chroma.KeywordNamespace: "#ff6ac1",
chroma.KeywordPseudo: "#ff6ac1",
chroma.KeywordReserved: "#ff6ac1",
chroma.KeywordType: "#9aedfe",
chroma.Literal: "#e2e4e5",
chroma.LiteralDate: "#e2e4e5",
chroma.Name: "#e2e4e5",
chroma.NameAttribute: "#57c7ff",
chroma.NameBuiltin: "#ff5c57",
chroma.NameBuiltinPseudo: "#e2e4e5",
chroma.NameClass: "#f3f99d",
chroma.NameConstant: "#ff9f43",
chroma.NameDecorator: "#ff9f43",
chroma.NameEntity: "#e2e4e5",
chroma.NameException: "#e2e4e5",
chroma.NameFunction: "#57c7ff",
chroma.NameLabel: "#ff5c57",
chroma.NameNamespace: "#e2e4e5",
chroma.NameOther: "#e2e4e5",
chroma.NameTag: "#ff6ac1",
chroma.NameVariable: "#ff5c57",
chroma.NameVariableClass: "#ff5c57",
chroma.NameVariableGlobal: "#ff5c57",
chroma.NameVariableInstance: "#ff5c57",
chroma.LiteralNumber: "#ff9f43",
chroma.LiteralNumberBin: "#ff9f43",
chroma.LiteralNumberFloat: "#ff9f43",
chroma.LiteralNumberHex: "#ff9f43",
chroma.LiteralNumberInteger: "#ff9f43",
chroma.LiteralNumberIntegerLong: "#ff9f43",
chroma.LiteralNumberOct: "#ff9f43",
chroma.Operator: "#ff6ac1",
chroma.OperatorWord: "#ff6ac1",
chroma.Other: "#e2e4e5",
chroma.Punctuation: "#e2e4e5",
chroma.LiteralString: "#5af78e",
chroma.LiteralStringBacktick: "#5af78e",
chroma.LiteralStringChar: "#5af78e",
chroma.LiteralStringDoc: "#5af78e",
chroma.LiteralStringDouble: "#5af78e",
chroma.LiteralStringEscape: "#5af78e",
chroma.LiteralStringHeredoc: "#5af78e",
chroma.LiteralStringInterpol: "#5af78e",
chroma.LiteralStringOther: "#5af78e",
chroma.LiteralStringRegex: "#5af78e",
chroma.LiteralStringSingle: "#5af78e",
chroma.LiteralStringSymbol: "#5af78e",
chroma.Text: "#e2e4e5",
chroma.TextWhitespace: "#e2e4e5",
chroma.Background: " bg:#282a36",
}))

View File

@ -1,8 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import re
from collections import defaultdict from collections import defaultdict
from subprocess import check_output from subprocess import check_output
lines = check_output(["go", "run", "./cmd/chroma/main.go", "--list"]).decode('utf-8').splitlines() README_FILE = "README.md"
lines = check_output(["go", "run", "./cmd/chroma/main.go", "--list"]).decode("utf-8").splitlines()
lines = [line.strip() for line in lines if line.startswith(" ") and not line.startswith(" ")] lines = [line.strip() for line in lines if line.startswith(" ") and not line.startswith(" ")]
lines = sorted(lines, key=lambda l: l.lower()) lines = sorted(lines, key=lambda l: l.lower())
@ -11,5 +15,18 @@ table = defaultdict(list)
for line in lines: for line in lines:
table[line[0].upper()].append(line) table[line[0].upper()].append(line)
rows = []
for key, value in table.items(): for key, value in table.items():
print("{} | {}".format(key, ', '.join(value))) rows.append("{} | {}".format(key, ", ".join(value)))
tbody = "\n".join(rows)
with open(README_FILE, "r") as f:
content = f.read()
with open(README_FILE, "w") as f:
marker = re.compile(r"(?P<start>:----: \\| --------\n).*?(?P<end>\n\n)", re.DOTALL)
replacement = r"\g<start>%s\g<end>" % tbody
updated_content = marker.sub(replacement, content)
f.write(updated_content)
print(tbody)

Binary file not shown.

View File

@ -23,3 +23,5 @@ _testmain.go
*.test *.test
*.prof *.prof
*.out *.out
.DS_Store

View File

@ -1,5 +1,5 @@
language: go language: go
go: go:
- 1.5 - 1.9
- tip - tip

View File

@ -43,8 +43,8 @@ The __last__ capture is embedded in each group, so `g.String()` will return the
| Category | regexp | regexp2 | | Category | regexp | regexp2 |
| --- | --- | --- | | --- | --- | --- |
| Catastrophic backtracking possible | no, constant execution time guarantees | yes, if your pattern is at risk you can use the `re.MatchTimeout` field | | Catastrophic backtracking possible | no, constant execution time guarantees | yes, if your pattern is at risk you can use the `re.MatchTimeout` field |
| Python-style capture groups `(P<name>re)` | yes | no | | Python-style capture groups `(?P<name>re)` | yes | no (yes in RE2 compat mode) |
| .NET-style capture groups `(<name>re)` or `('name're)` | no | yes | | .NET-style capture groups `(?<name>re)` or `(?'name're)` | no | yes |
| comments `(?#comment)` | no | yes | | comments `(?#comment)` | no | yes |
| branch numbering reset `(?\|a\|b)` | no | no | | branch numbering reset `(?\|a\|b)` | no | no |
| possessive match `(?>re)` | no | yes | | possessive match `(?>re)` | no | yes |
@ -54,8 +54,24 @@ The __last__ capture is embedded in each group, so `g.String()` will return the
| negative lookbehind `(?<!re)` | no | yes | | negative lookbehind `(?<!re)` | no | yes |
| back reference `\1` | no | yes | | back reference `\1` | no | yes |
| named back reference `\k'name'` | no | yes | | named back reference `\k'name'` | no | yes |
| named ascii character class `[[:foo:]]`| yes | no | | named ascii character class `[[:foo:]]`| yes | no (yes in RE2 compat mode) |
| conditionals `((expr)yes\|no)` | no | yes | | conditionals `(?(expr)yes\|no)` | no | yes |
## RE2 compatibility mode
The default behavior of `regexp2` is to match the .NET regexp engine, however the `RE2` option is provided to change the parsing to increase compatibility with RE2. Using the `RE2` option when compiling a regexp will not take away any features, but will change the following behaviors:
* add support for named ascii character classes (e.g. `[[:foo:]]`)
* add support for python-style capture groups (e.g. `(P<name>re)`)
* change singleline behavior for `$` to only match end of string (like RE2) (see [#24](https://github.com/dlclark/regexp2/issues/24))
```go
re := regexp2.MustCompile(`Your RE2-compatible pattern`, regexp2.RE2)
if isMatch, _ := re.MatchString(`Something to match`); isMatch {
//do something
}
```
This feature is a work in progress and I'm open to ideas for more things to put here (maybe more relaxed character escaping rules?).
## Library features that I'm still working on ## Library features that I'm still working on
- Regex split - Regex split

View File

@ -120,6 +120,7 @@ const (
RightToLeft = 0x0040 // "r" RightToLeft = 0x0040 // "r"
Debug = 0x0080 // "d" Debug = 0x0080 // "d"
ECMAScript = 0x0100 // "e" ECMAScript = 0x0100 // "e"
RE2 = 0x0200 // RE2 (regexp package) compatibility mode
) )
func (re *Regexp) RightToLeft() bool { func (re *Regexp) RightToLeft() bool {
@ -234,17 +235,14 @@ func (re *Regexp) getRunesAndStart(s string, startAt int) ([]rune, int) {
ret[i] = r ret[i] = r
i++ i++
} }
if startAt == len(s) {
runeIdx = i
}
return ret[:i], runeIdx return ret[:i], runeIdx
} }
func getRunes(s string) []rune { func getRunes(s string) []rune {
ret := make([]rune, len(s)) return []rune(s)
i := 0
for _, r := range s {
ret[i] = r
i++
}
return ret[:i]
} }
// MatchRunes return true if the runes matches the regex // MatchRunes return true if the runes matches the regex

View File

@ -566,9 +566,22 @@ func (r *runner) execute() error {
continue continue
case syntax.EndZ: case syntax.EndZ:
if r.rightchars() > 1 || r.rightchars() == 1 && r.charAt(r.textPos()) != '\n' { rchars := r.rightchars()
if rchars > 1 {
break break
} }
// RE2 and EcmaScript define $ as "asserts position at the end of the string"
// PCRE/.NET adds "or before the line terminator right at the end of the string (if any)"
if (r.re.options & (RE2 | ECMAScript)) != 0 {
// RE2/Ecmascript mode
if rchars > 0 {
break
}
} else if rchars == 1 && r.charAt(r.textPos()) != '\n' {
// "regular" mode
break
}
r.advance(0) r.advance(0)
continue continue
@ -938,8 +951,8 @@ func (r *runner) advance(i int) {
} }
func (r *runner) goTo(newpos int) { func (r *runner) goTo(newpos int) {
// when branching backward, ensure storage // when branching backward or in place, ensure storage
if newpos < r.codepos { if newpos <= r.codepos {
r.ensureStorage() r.ensureStorage()
} }

View File

@ -484,6 +484,29 @@ func (c *CharSet) addRanges(ranges []singleRange) {
c.canonicalize() c.canonicalize()
} }
// Merges everything but the new ranges into our own
func (c *CharSet) addNegativeRanges(ranges []singleRange) {
if c.anything {
return
}
var hi rune
// convert incoming ranges into opposites, assume they are in order
for _, r := range ranges {
if hi < r.first {
c.ranges = append(c.ranges, singleRange{hi, r.first - 1})
}
hi = r.last + 1
}
if hi < utf8.MaxRune {
c.ranges = append(c.ranges, singleRange{hi, utf8.MaxRune})
}
c.canonicalize()
}
func isValidUnicodeCat(catName string) bool { func isValidUnicodeCat(catName string) bool {
_, ok := unicodeCategories[catName] _, ok := unicodeCategories[catName]
return ok return ok
@ -515,6 +538,53 @@ func (c *CharSet) addRange(chMin, chMax rune) {
c.canonicalize() c.canonicalize()
} }
func (c *CharSet) addNamedASCII(name string, negate bool) bool {
var rs []singleRange
switch name {
case "alnum":
rs = []singleRange{singleRange{'0', '9'}, singleRange{'A', 'Z'}, singleRange{'a', 'z'}}
case "alpha":
rs = []singleRange{singleRange{'A', 'Z'}, singleRange{'a', 'z'}}
case "ascii":
rs = []singleRange{singleRange{0, 0x7f}}
case "blank":
rs = []singleRange{singleRange{'\t', '\t'}, singleRange{' ', ' '}}
case "cntrl":
rs = []singleRange{singleRange{0, 0x1f}, singleRange{0x7f, 0x7f}}
case "digit":
c.addDigit(false, negate, "")
case "graph":
rs = []singleRange{singleRange{'!', '~'}}
case "lower":
rs = []singleRange{singleRange{'a', 'z'}}
case "print":
rs = []singleRange{singleRange{' ', '~'}}
case "punct": //[!-/:-@[-`{-~]
rs = []singleRange{singleRange{'!', '/'}, singleRange{':', '@'}, singleRange{'[', '`'}, singleRange{'{', '~'}}
case "space":
c.addSpace(true, negate)
case "upper":
rs = []singleRange{singleRange{'A', 'Z'}}
case "word":
c.addWord(true, negate)
case "xdigit":
rs = []singleRange{singleRange{'0', '9'}, singleRange{'A', 'F'}, singleRange{'a', 'f'}}
default:
return false
}
if len(rs) > 0 {
if negate {
c.addNegativeRanges(rs)
} else {
c.addRanges(rs)
}
}
return true
}
type singleRangeSorter []singleRange type singleRangeSorter []singleRange
func (p singleRangeSorter) Len() int { return len(p) } func (p singleRangeSorter) Len() int { return len(p) }

View File

@ -21,6 +21,7 @@ const (
RightToLeft = 0x0040 // "r" RightToLeft = 0x0040 // "r"
Debug = 0x0080 // "d" Debug = 0x0080 // "d"
ECMAScript = 0x0100 // "e" ECMAScript = 0x0100 // "e"
RE2 = 0x0200 // RE2 compat mode
) )
func optionFromCode(ch rune) RegexOptions { func optionFromCode(ch rune) RegexOptions {
@ -310,7 +311,7 @@ func (p *parser) countCaptures() error {
switch ch { switch ch {
case '\\': case '\\':
if p.charsRight() > 0 { if p.charsRight() > 0 {
p.moveRight(1) p.scanBackslash(true)
} }
case '#': case '#':
@ -354,6 +355,14 @@ func (p *parser) countCaptures() error {
p.noteCaptureName(p.scanCapname(), pos) p.noteCaptureName(p.scanCapname(), pos)
} }
} }
} else if p.useRE2() && p.charsRight() > 2 && (p.rightChar(0) == 'P' && p.rightChar(1) == '<') {
// RE2-compat (?P<)
p.moveRight(2)
ch = p.rightChar(0)
if IsWordChar(ch) {
p.noteCaptureName(p.scanCapname(), pos)
}
} else { } else {
// (?... // (?...
@ -520,7 +529,7 @@ func (p *parser) scanRegex() (*regexNode, error) {
} }
case '\\': case '\\':
n, err := p.scanBackslash() n, err := p.scanBackslash(false)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -1022,6 +1031,50 @@ func (p *parser) scanGroupOpen() (*regexNode, error) {
} }
} }
case 'P':
if p.useRE2() {
// support for P<name> syntax
if p.charsRight() < 3 {
goto BreakRecognize
}
ch = p.moveRightGetChar()
if ch != '<' {
goto BreakRecognize
}
ch = p.moveRightGetChar()
p.moveLeft()
if IsWordChar(ch) {
capnum := -1
capname := p.scanCapname()
if p.isCaptureName(capname) {
capnum = p.captureSlotFromName(capname)
}
// check if we have bogus character after the name
if p.charsRight() > 0 && p.rightChar(0) != '>' {
return nil, p.getErr(ErrInvalidGroupName)
}
// actually make the node
if capnum != -1 && p.charsRight() > 0 && p.moveRightGetChar() == '>' {
return newRegexNodeMN(ntCapture, p.options, capnum, -1), nil
}
goto BreakRecognize
} else {
// bad group name - starts with something other than a word character and isn't a number
return nil, p.getErr(ErrInvalidGroupName)
}
}
// if we're not using RE2 compat mode then
// we just behave like normal
fallthrough
default: default:
p.moveLeft() p.moveLeft()
@ -1055,7 +1108,7 @@ BreakRecognize:
} }
// scans backslash specials and basics // scans backslash specials and basics
func (p *parser) scanBackslash() (*regexNode, error) { func (p *parser) scanBackslash(scanOnly bool) (*regexNode, error) {
if p.charsRight() == 0 { if p.charsRight() == 0 {
return nil, p.getErr(ErrIllegalEndEscape) return nil, p.getErr(ErrIllegalEndEscape)
@ -1123,12 +1176,12 @@ func (p *parser) scanBackslash() (*regexNode, error) {
return newRegexNodeSet(ntSet, p.options, cc), nil return newRegexNodeSet(ntSet, p.options, cc), nil
default: default:
return p.scanBasicBackslash() return p.scanBasicBackslash(scanOnly)
} }
} }
// Scans \-style backreferences and character escapes // Scans \-style backreferences and character escapes
func (p *parser) scanBasicBackslash() (*regexNode, error) { func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
if p.charsRight() == 0 { if p.charsRight() == 0 {
return nil, p.getErr(ErrIllegalEndEscape) return nil, p.getErr(ErrIllegalEndEscape)
} }
@ -1184,19 +1237,23 @@ func (p *parser) scanBasicBackslash() (*regexNode, error) {
if p.charsRight() > 0 && p.moveRightGetChar() == close { if p.charsRight() > 0 && p.moveRightGetChar() == close {
if p.isCaptureSlot(capnum) { if p.isCaptureSlot(capnum) {
return newRegexNodeM(ntRef, p.options, capnum), nil return newRegexNodeM(ntRef, p.options, capnum), nil
} else {
return nil, p.getErr(ErrUndefinedBackRef, capnum)
} }
return nil, p.getErr(ErrUndefinedBackRef, capnum)
} }
} else if !angled && ch >= '1' && ch <= '9' { // Try to parse backreference or octal: \1 } else if !angled && ch >= '1' && ch <= '9' { // Try to parse backreference or octal: \1
capnum, err := p.scanDecimal() capnum, err := p.scanDecimal()
if err != nil { if err != nil {
return nil, err return nil, err
} }
if p.useOptionE() || p.isCaptureSlot(capnum) {
if scanOnly {
return nil, nil
}
if p.isCaptureSlot(capnum) {
return newRegexNodeM(ntRef, p.options, capnum), nil return newRegexNodeM(ntRef, p.options, capnum), nil
} }
if capnum <= 9 { if capnum <= 9 && !p.useOptionE() {
return nil, p.getErr(ErrUndefinedBackRef, capnum) return nil, p.getErr(ErrUndefinedBackRef, capnum)
} }
@ -1448,11 +1505,26 @@ func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) {
savePos := p.textpos() savePos := p.textpos()
p.moveRight(1) p.moveRight(1)
p.scanCapname() // throwaway the name negate := false
if p.charsRight() > 1 && p.rightChar(0) == '^' {
negate = true
p.moveRight(1)
}
nm := p.scanCapname() // snag the name
if !scanOnly && p.useRE2() {
// look up the name since these are valid for RE2
// add the group based on the name
if ok := cc.addNamedASCII(nm, negate); !ok {
return nil, p.getErr(ErrInvalidCharRange)
}
}
if p.charsRight() < 2 || p.moveRightGetChar() != ':' || p.moveRightGetChar() != ']' { if p.charsRight() < 2 || p.moveRightGetChar() != ':' || p.moveRightGetChar() != ']' {
p.textto(savePos) p.textto(savePos)
} else if p.useRE2() {
// move on
continue
} }
// else lookup name (nyi)
} }
} }
@ -1547,7 +1619,7 @@ func (p *parser) scanDecimal() (int, error) {
// Returns true for options allowed only at the top level // Returns true for options allowed only at the top level
func isOnlyTopOption(option RegexOptions) bool { func isOnlyTopOption(option RegexOptions) bool {
return option == RightToLeft || option == ECMAScript return option == RightToLeft || option == ECMAScript || option == RE2
} }
// Scans cimsx-cimsx option string, stops at the first unrecognized char. // Scans cimsx-cimsx option string, stops at the first unrecognized char.
@ -1576,7 +1648,7 @@ func (p *parser) scanOptions() {
} }
// Scans \ code for escape codes that map to single unicode chars. // Scans \ code for escape codes that map to single unicode chars.
func (p *parser) scanCharEscape() (rune, error) { func (p *parser) scanCharEscape() (r rune, err error) {
ch := p.moveRightGetChar() ch := p.moveRightGetChar()
@ -1585,16 +1657,22 @@ func (p *parser) scanCharEscape() (rune, error) {
return p.scanOctal(), nil return p.scanOctal(), nil
} }
pos := p.textpos()
switch ch { switch ch {
case 'x': case 'x':
// support for \x{HEX} syntax from Perl and PCRE // support for \x{HEX} syntax from Perl and PCRE
if p.charsRight() > 0 && p.rightChar(0) == '{' { if p.charsRight() > 0 && p.rightChar(0) == '{' {
if p.useOptionE() {
return ch, nil
}
p.moveRight(1) p.moveRight(1)
return p.scanHexUntilBrace() return p.scanHexUntilBrace()
} else {
r, err = p.scanHex(2)
} }
return p.scanHex(2)
case 'u': case 'u':
return p.scanHex(4) r, err = p.scanHex(4)
case 'a': case 'a':
return '\u0007', nil return '\u0007', nil
case 'b': case 'b':
@ -1612,13 +1690,18 @@ func (p *parser) scanCharEscape() (rune, error) {
case 'v': case 'v':
return '\u000B', nil return '\u000B', nil
case 'c': case 'c':
return p.scanControl() r, err = p.scanControl()
default: default:
if !p.useOptionE() && IsWordChar(ch) { if !p.useOptionE() && IsWordChar(ch) {
return 0, p.getErr(ErrUnrecognizedEscape, string(ch)) return 0, p.getErr(ErrUnrecognizedEscape, string(ch))
} }
return ch, nil return ch, nil
} }
if err != nil && p.useOptionE() {
p.textto(pos)
return ch, nil
}
return
} }
// Grabs and converts an ascii control character // Grabs and converts an ascii control character
@ -1735,12 +1818,12 @@ func (p *parser) scanOctal() rune {
//we know the first char is good because the caller had to check //we know the first char is good because the caller had to check
i := 0 i := 0
d := int(p.rightChar(0) - '0') d := int(p.rightChar(0) - '0')
for c > 0 && d <= 7 { for c > 0 && d <= 7 && d >= 0 {
i *= 8 if i >= 0x20 && p.useOptionE() {
i += d
if p.useOptionE() && i >= 0x20 {
break break
} }
i *= 8
i += d
c-- c--
p.moveRight(1) p.moveRight(1)
@ -1861,6 +1944,11 @@ func (p *parser) useOptionE() bool {
return (p.options & ECMAScript) != 0 return (p.options & ECMAScript) != 0
} }
// true to use RE2 compatibility parsing behavior.
func (p *parser) useRE2() bool {
return (p.options & RE2) != 0
}
// True if options stack is empty. // True if options stack is empty.
func (p *parser) emptyOptionsStack() bool { func (p *parser) emptyOptionsStack() bool {
return len(p.optionsStack) == 0 return len(p.optionsStack) == 0

Some files were not shown because too many files have changed in this diff Show More