mirror of
https://github.com/cheat/cheat.git
synced 2025-09-01 09:38:29 +02:00
Compare commits
83 Commits
Author | SHA1 | Date | |
---|---|---|---|
4250b854c9 | |||
cb0243e7fc | |||
e5d04d41ea | |||
2474ea4fb1 | |||
7467c9fbc0 | |||
dfba3da003 | |||
ad7ad64a75 | |||
c4dcfd5da0 | |||
278a5d9154 | |||
9fa0c466fd | |||
4e9b2928b3 | |||
fa5eb44be8 | |||
49afd7c16b | |||
59d5c96c24 | |||
8e602b0e93 | |||
fb04cb1fcd | |||
d42726101e | |||
93b3a711f5 | |||
9c3d41c8bd | |||
4eeec6c868 | |||
1b17ab1914 | |||
477650ee44 | |||
c4dd3b52fd | |||
e8a0ea0dc3 | |||
992ee66a56 | |||
c9840c2d6f | |||
bd53768f67 | |||
8092687956 | |||
16ade50672 | |||
62c80d76eb | |||
3e67eaa3b7 | |||
38b13655fe | |||
749d5c1182 | |||
521f83377c | |||
b15ff10537 | |||
5288bd0c1c | |||
bddbee4158 | |||
ce27cf2cc0 | |||
5733b1d6d4 | |||
2d221050d8 | |||
ce37b670c7 | |||
47a9eeb4fd | |||
be56c9cf0c | |||
7be57cb01c | |||
8453af8601 | |||
6e388c3693 | |||
b13246978a | |||
a39d36cd34 | |||
87cba04ff2 | |||
bc623da74b | |||
a6c25d4b9c | |||
e24ac2b385 | |||
e0c35a74d4 | |||
3e4c1818a9 | |||
7b4a268ebd | |||
f7183aa17a | |||
1ce6c29e6a | |||
219db679e1 | |||
53177cb09d | |||
ef7a41f9a9 | |||
008316d030 | |||
a59c019642 | |||
57225442be | |||
2c7ce48859 | |||
a3fe4f40bb | |||
506fb8be15 | |||
408e944eea | |||
8a313b92ca | |||
6912771c39 | |||
d4c6200702 | |||
9251849d23 | |||
313b5ebd27 | |||
ca91b25b02 | |||
bbf6af50b1 | |||
9f05442bce | |||
3fc4c2f89e | |||
9e88ff2642 | |||
e3764b81e7 | |||
3786ac96a5 | |||
4cb7a3b42c | |||
ff6a866abe | |||
2e7ccb2a68 | |||
126231db1f |
17
.github/workflows/homebrew.yml
vendored
Normal file
17
.github/workflows/homebrew.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
name: homebrew
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: '*'
|
||||
|
||||
jobs:
|
||||
homebrew:
|
||||
name: Bump Homebrew formula
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: mislav/bump-homebrew-formula-action@v1
|
||||
with:
|
||||
# A PR will be sent to github.com/Homebrew/homebrew-core to update this formula:
|
||||
formula-name: cheat
|
||||
env:
|
||||
COMMITTER_TOKEN: ${{ secrets.COMMITTER_TOKEN }}
|
13
.travis.yml
13
.travis.yml
@ -1,4 +1,15 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.13.x
|
||||
- 1.14.x
|
||||
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
|
||||
env:
|
||||
- GO111MODULE=on
|
||||
|
||||
install: true
|
||||
|
||||
script: make ci
|
||||
|
168
Makefile
168
Makefile
@ -1,8 +1,7 @@
|
||||
# paths
|
||||
makefile := $(realpath $(lastword $(MAKEFILE_LIST)))
|
||||
root_dir := $(shell dirname $(makefile))
|
||||
cmd_dir := $(root_dir)/cmd/cheat
|
||||
dist_dir := $(root_dir)/dist
|
||||
cmd_dir := ./cmd/cheat
|
||||
dist_dir := ./dist
|
||||
|
||||
# executables
|
||||
CAT := cat
|
||||
@ -10,120 +9,163 @@ COLUMN := column
|
||||
CTAGS := ctags
|
||||
GO := go
|
||||
GREP := grep
|
||||
GZIP := gzip --best
|
||||
LINT := revive
|
||||
MAN := man
|
||||
MKDIR := mkdir -p
|
||||
PANDOC := pandoc
|
||||
RM := rm
|
||||
SCC := scc
|
||||
SED := sed
|
||||
SORT := sort
|
||||
ZIP := zip -m
|
||||
|
||||
# build flags
|
||||
BUILD_FLAGS := -ldflags="-s -w" -mod vendor
|
||||
BUILD_FLAGS := -ldflags="-s -w" -mod vendor -trimpath
|
||||
GOBIN :=
|
||||
TMPDIR := /tmp
|
||||
|
||||
# NB: this is a kludge to specify the desired build targets. This information
|
||||
# would "naturally" be best structured as an array of structs, but lacking that
|
||||
# capability, we're condensing that information into strings which we will
|
||||
# later split.
|
||||
#
|
||||
# Format: <architecture>/<os>/<arm-version>/<executable-name>
|
||||
.PHONY: $(RELEASES)
|
||||
RELEASES := \
|
||||
amd64/darwin/0/cheat-darwin-amd64 \
|
||||
amd64/linux/0/cheat-linux-amd64 \
|
||||
amd64/windows/0/cheat-windows-amd64.exe \
|
||||
arm/linux/5/cheat-linux-arm5 \
|
||||
arm/linux/6/cheat-linux-arm6 \
|
||||
arm/linux/7/cheat-linux-arm7
|
||||
# release binaries
|
||||
releases := \
|
||||
$(dist_dir)/cheat-darwin-amd64 \
|
||||
$(dist_dir)/cheat-linux-386 \
|
||||
$(dist_dir)/cheat-linux-amd64 \
|
||||
$(dist_dir)/cheat-linux-arm5 \
|
||||
$(dist_dir)/cheat-linux-arm6 \
|
||||
$(dist_dir)/cheat-linux-arm7 \
|
||||
$(dist_dir)/cheat-windows-amd64.exe
|
||||
|
||||
# macros to unpack the above
|
||||
parts = $(subst /, ,$@)
|
||||
arch = $(word 1, $(parts))
|
||||
os = $(word 2, $(parts))
|
||||
arm = $(word 3, $(parts))
|
||||
bin = $(word 4, $(parts))
|
||||
|
||||
|
||||
## build: builds an executable for your architecture
|
||||
## build: build an executable for your architecture
|
||||
.PHONY: build
|
||||
build: clean generate
|
||||
build: $(dist_dir) clean vendor generate man
|
||||
$(GO) build $(BUILD_FLAGS) -o $(dist_dir)/cheat $(cmd_dir)
|
||||
|
||||
## build-release: builds release executables
|
||||
## build-release: build release executables
|
||||
.PHONY: build-release
|
||||
build-release: $(RELEASES)
|
||||
build-release: $(releases)
|
||||
|
||||
## ci: build a "release" executable for the current architecture (used in ci)
|
||||
.PHONY: ci
|
||||
ci: | setup prepare build
|
||||
|
||||
# cheat-darwin-amd64
|
||||
$(dist_dir)/cheat-darwin-amd64: prepare
|
||||
GOARCH=amd64 GOOS=darwin \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-linux-386
|
||||
$(dist_dir)/cheat-linux-386: prepare
|
||||
GOARCH=386 GOOS=linux \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-linux-amd64
|
||||
$(dist_dir)/cheat-linux-amd64: prepare
|
||||
GOARCH=amd64 GOOS=linux \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-linux-arm5
|
||||
$(dist_dir)/cheat-linux-arm5: prepare
|
||||
GOARCH=arm GOOS=linux GOARM=5 \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-linux-arm6
|
||||
$(dist_dir)/cheat-linux-arm6: prepare
|
||||
GOARCH=arm GOOS=linux GOARM=6 \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-linux-arm7
|
||||
$(dist_dir)/cheat-linux-arm7: prepare
|
||||
GOARCH=arm GOOS=linux GOARM=7 \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-windows-amd64
|
||||
$(dist_dir)/cheat-windows-amd64.exe: prepare
|
||||
GOARCH=amd64 GOOS=windows \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(ZIP) $@.zip $@
|
||||
|
||||
# ./dist
|
||||
$(dist_dir):
|
||||
$(MKDIR) $(dist_dir)
|
||||
|
||||
.PHONY: generate
|
||||
generate:
|
||||
$(GO) generate $(cmd_dir)
|
||||
|
||||
.PHONY: $(RELEASES)
|
||||
$(RELEASES): clean generate check
|
||||
ifeq ($(arch),arm)
|
||||
GOARCH=$(arch) GOOS=$(os) GOARM=$(arm) $(GO) build $(BUILD_FLAGS) -o $(dist_dir)/$(bin) $(cmd_dir)
|
||||
else
|
||||
GOARCH=$(arch) GOOS=$(os) $(GO) build $(BUILD_FLAGS) -o $(dist_dir)/$(bin) $(cmd_dir)
|
||||
endif
|
||||
|
||||
## install: builds and installs cheat on your PATH
|
||||
## install: build and install cheat on your PATH
|
||||
.PHONY: install
|
||||
install:
|
||||
install: build
|
||||
$(GO) install $(BUILD_FLAGS) $(GOBIN) $(cmd_dir)
|
||||
|
||||
$(dist_dir):
|
||||
$(MKDIR) $(dist_dir)
|
||||
|
||||
## clean: removes compiled executables
|
||||
## clean: remove compiled executables
|
||||
.PHONY: clean
|
||||
clean: $(dist_dir)
|
||||
$(RM) -f $(dist_dir)/*
|
||||
|
||||
## distclean: removes the tags file
|
||||
## distclean: remove the tags file
|
||||
.PHONY: distclean
|
||||
distclean:
|
||||
$(RM) $(root_dir)/tags
|
||||
$(RM) -f tags
|
||||
|
||||
## setup: installs revive (linter) and scc (sloc tool)
|
||||
## setup: install revive (linter) and scc (sloc tool)
|
||||
.PHONY: setup
|
||||
setup:
|
||||
GO111MODULE=off $(GO) get -u github.com/boyter/scc github.com/mgechev/revive
|
||||
|
||||
## sloc: counts "semantic lines of code"
|
||||
## sloc: count "semantic lines of code"
|
||||
.PHONY: sloc
|
||||
sloc:
|
||||
$(SCC) --exclude-dir=vendor
|
||||
|
||||
## tags: builds a tags file
|
||||
## tags: build a tags file
|
||||
.PHONY: tags
|
||||
tags:
|
||||
$(CTAGS) -R $(root_dir) --exclude=$(root_dir)/vendor
|
||||
$(CTAGS) -R --exclude=vendor --languages=go
|
||||
|
||||
## vendor: downloads, tidies, and verifies dependencies
|
||||
## man: build a man page
|
||||
# NB: pandoc may not be installed, so we're ignoring this error on failure
|
||||
.PHONY: man
|
||||
man:
|
||||
-$(PANDOC) -s -t man doc/cheat.1.md -o doc/cheat.1
|
||||
|
||||
## vendor: download, tidy, and verify dependencies
|
||||
.PHONY: vendor
|
||||
vendor: lint # kludge: revive appears to complain if the vendor directory disappears while a lint is running
|
||||
vendor:
|
||||
$(GO) mod vendor && $(GO) mod tidy && $(GO) mod verify
|
||||
|
||||
## fmt: runs go fmt
|
||||
## fmt: run go fmt
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
$(GO) fmt $(root_dir)/...
|
||||
$(GO) fmt ./...
|
||||
|
||||
## lint: lints go source files
|
||||
## lint: lint go source files
|
||||
.PHONY: lint
|
||||
lint:
|
||||
$(LINT) -exclude $(root_dir)/vendor/... $(root_dir)/...
|
||||
$(GO) vet $(root_dir)/...
|
||||
lint: vendor
|
||||
$(LINT) -exclude vendor/... ./...
|
||||
|
||||
## test: runs unit-tests
|
||||
## vet: vet go source files
|
||||
.PHONY: vet
|
||||
vet:
|
||||
$(GO) vet ./...
|
||||
|
||||
## test: run unit-tests
|
||||
.PHONY: test
|
||||
test:
|
||||
$(GO) test $(root_dir)/...
|
||||
$(GO) test ./...
|
||||
|
||||
## check: formats, lints, vendors, and run unit-tests
|
||||
## coverage: generate a test coverage report
|
||||
.PHONY: coverage
|
||||
coverage:
|
||||
$(GO) test ./... -coverprofile=$(TMPDIR)/cheat-coverage.out && \
|
||||
$(GO) tool cover -html=$(TMPDIR)/cheat-coverage.out
|
||||
|
||||
## check: format, lint, vet, vendor, and run unit-tests
|
||||
.PHONY: check
|
||||
check: fmt lint vendor test
|
||||
check: | vendor fmt lint vet test
|
||||
|
||||
## help: displays this help text
|
||||
.PHONY: prepare
|
||||
prepare: | $(dist_dir) clean generate vendor fmt lint vet test
|
||||
|
||||
## help: display this help text
|
||||
.PHONY: help
|
||||
help:
|
||||
@$(CAT) $(makefile) | \
|
||||
|
22
README.md
22
README.md
@ -51,7 +51,9 @@ Installing
|
||||
Configuring
|
||||
-----------
|
||||
### conf.yml ###
|
||||
`cheat` is configured by a YAML file that can be generated with `cheat --init`:
|
||||
`cheat` is configured by a YAML file that will be auto-generated on first run.
|
||||
Should you need to create a config file manually, you can do
|
||||
so via:
|
||||
|
||||
```sh
|
||||
mkdir -p ~/.config/cheat && cheat --init > ~/.config/cheat/conf.yml
|
||||
@ -89,13 +91,14 @@ const squares = [1, 2, 3, 4].map(x => x * x);
|
||||
```
|
||||
|
||||
The `cheat` executable includes no cheatsheets, but [community-sourced
|
||||
cheatsheets are available][cheatsheets].
|
||||
cheatsheets are available][cheatsheets]. You will be asked if you would like to
|
||||
install the community-sourced cheatsheets the first time you run `cheat`.
|
||||
|
||||
|
||||
Cheatpaths
|
||||
----------
|
||||
Cheatsheets are stored on "cheatpaths", which are directories that contain
|
||||
cheetsheets. Cheatpaths are specified in the `conf.yml` file.
|
||||
cheatsheets. Cheatpaths are specified in the `conf.yml` file.
|
||||
|
||||
It can be useful to configure `cheat` against multiple cheatpaths. A common
|
||||
pattern is to store cheatsheets from multiple repositories on individual
|
||||
@ -193,11 +196,18 @@ cheat -p personal -t networking --regex -s '(?:[0-9]{1,3}\.){3}[0-9]{1,3}'
|
||||
|
||||
Advanced Usage
|
||||
--------------
|
||||
`cheat` may be integrated with [fzf][]. See [fzf.bash][bash] for instructions.
|
||||
(Support for other shells will be added in future releases.)
|
||||
Shell autocompletion is currently available for `bash`, `fish`, and `zsh`. Copy
|
||||
the relevant [completion script][completions] into the appropriate directory on
|
||||
your filesystem to enable autocompletion. (This directory will vary depending
|
||||
on operating system and shell specifics.)
|
||||
|
||||
Additionally, `cheat` supports enhanced autocompletion via integration with
|
||||
[fzf][]. To enable `fzf` integration:
|
||||
|
||||
1. Ensure that `fzf` is available on your `$PATH`
|
||||
2. Set an envvar: `export CHEAT_USE_FZF=true`
|
||||
|
||||
[Releases]: https://github.com/cheat/cheat/releases
|
||||
[bash]: https://github.com/cheat/cheat/blob/master/scripts/fzf.bash
|
||||
[cheatsheets]: https://github.com/cheat/cheatsheets
|
||||
[completions]: https://github.com/cheat/cheat/tree/master/scripts
|
||||
[fzf]: https://github.com/junegunn/fzf
|
||||
|
@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# TODO: this script has been made obsolete by the Makefile, yet downstream
|
||||
# package managers plausibly rely on it for compiling locally. Remove this file
|
||||
# after downstream maintainers have had time to modify their packages to simply
|
||||
# invoke `make` in the project root.
|
||||
|
||||
# locate the cheat project root
|
||||
BINDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
APPDIR=$(readlink -f "$BINDIR/..")
|
||||
|
||||
# compile the executable
|
||||
cd $APPDIR
|
||||
|
||||
make
|
@ -1,18 +1,20 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
)
|
||||
|
||||
// cmdDirectories lists the configured cheatpaths.
|
||||
func cmdDirectories(opts map[string]interface{}, conf config.Config) {
|
||||
|
||||
// initialize a tabwriter to produce cleanly columnized output
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0)
|
||||
var out bytes.Buffer
|
||||
w := tabwriter.NewWriter(&out, 0, 0, 1, ' ', 0)
|
||||
|
||||
// generate sorted, columnized output
|
||||
for _, path := range conf.Cheatpaths {
|
||||
@ -25,4 +27,5 @@ func cmdDirectories(opts map[string]interface{}, conf config.Config) {
|
||||
|
||||
// write columnized output to stdout
|
||||
w.Flush()
|
||||
display.Display(out.String(), conf)
|
||||
}
|
||||
|
@ -99,8 +99,15 @@ func cmdEdit(opts map[string]interface{}, conf config.Config) {
|
||||
}
|
||||
}
|
||||
|
||||
// split `conf.Editor` into parts to separate the editor's executable from
|
||||
// any arguments it may have been passed. If this is not done, the nearby
|
||||
// call to `exec.Command` will fail.
|
||||
parts := strings.Fields(conf.Editor)
|
||||
editor := parts[0]
|
||||
args := append(parts[1:], editpath)
|
||||
|
||||
// edit the cheatsheet
|
||||
cmd := exec.Command(conf.Editor, editpath)
|
||||
cmd := exec.Command(editor, args...)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stderr = os.Stderr
|
||||
|
@ -2,9 +2,56 @@ package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/go-homedir"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
)
|
||||
|
||||
// cmdInit displays an example config file.
|
||||
func cmdInit() {
|
||||
fmt.Println(configs())
|
||||
|
||||
// get the user's home directory
|
||||
home, err := homedir.Dir()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to get user home directory: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// read the envvars into a map of strings
|
||||
envvars := map[string]string{}
|
||||
for _, e := range os.Environ() {
|
||||
pair := strings.SplitN(e, "=", 2)
|
||||
envvars[pair[0]] = pair[1]
|
||||
}
|
||||
|
||||
// load the config template
|
||||
configs := configs()
|
||||
|
||||
// identify the os-specifc paths at which configs may be located
|
||||
confpaths, err := config.Paths(runtime.GOOS, home, envvars)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to read config paths: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// determine the appropriate paths for config data and (optional) community
|
||||
// cheatsheets based on the user's platform
|
||||
confpath := confpaths[0]
|
||||
confdir := path.Dir(confpath)
|
||||
|
||||
// create paths for community and personal cheatsheets
|
||||
community := path.Join(confdir, "/cheatsheets/community")
|
||||
personal := path.Join(confdir, "/cheatsheets/personal")
|
||||
|
||||
// template the above paths into the default configs
|
||||
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
|
||||
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
|
||||
|
||||
// output the templated configs
|
||||
fmt.Println(configs)
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
@ -9,6 +10,7 @@ import (
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
"github.com/cheat/cheat/internal/sheet"
|
||||
"github.com/cheat/cheat/internal/sheets"
|
||||
)
|
||||
@ -79,16 +81,19 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
|
||||
flattened = filtered
|
||||
}
|
||||
|
||||
// exit early if no cheatsheets are available
|
||||
// return exit code 2 if no cheatsheets are available
|
||||
if len(flattened) == 0 {
|
||||
os.Exit(0)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// initialize a tabwriter to produce cleanly columnized output
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0)
|
||||
var out bytes.Buffer
|
||||
w := tabwriter.NewWriter(&out, 0, 0, 1, ' ', 0)
|
||||
|
||||
// write a header row
|
||||
fmt.Fprintln(w, "title:\tfile:\ttags:")
|
||||
|
||||
// generate sorted, columnized output
|
||||
fmt.Fprintln(w, "title:\tfile:\ttags:")
|
||||
for _, sheet := range flattened {
|
||||
fmt.Fprintln(w, fmt.Sprintf(
|
||||
"%s\t%s\t%s",
|
||||
@ -100,4 +105,5 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
|
||||
|
||||
// write columnized output to stdout
|
||||
w.Flush()
|
||||
display.Display(out.String(), conf)
|
||||
}
|
||||
|
@ -37,8 +37,8 @@ func cmdRemove(opts map[string]interface{}, conf config.Config) {
|
||||
// fail early if the requested cheatsheet does not exist
|
||||
sheet, ok := consolidated[cheatsheet]
|
||||
if !ok {
|
||||
fmt.Fprintln(os.Stderr, fmt.Sprintf("no cheatsheet found for '%s'.\n", cheatsheet))
|
||||
os.Exit(1)
|
||||
fmt.Fprintln(os.Stderr, fmt.Sprintf("No cheatsheet found for '%s'.\n", cheatsheet))
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// fail early if the sheet is read-only
|
||||
|
@ -7,6 +7,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
"github.com/cheat/cheat/internal/sheet"
|
||||
"github.com/cheat/cheat/internal/sheets"
|
||||
)
|
||||
|
||||
@ -35,7 +37,25 @@ func cmdSearch(opts map[string]interface{}, conf config.Config) {
|
||||
// local cheatsheets)
|
||||
consolidated := sheets.Consolidate(cheatsheets)
|
||||
|
||||
// if <cheatsheet> was provided, search that single sheet only
|
||||
if opts["<cheatsheet>"] != nil {
|
||||
|
||||
cheatsheet := opts["<cheatsheet>"].(string)
|
||||
|
||||
// assert that the cheatsheet exists
|
||||
s, ok := consolidated[cheatsheet]
|
||||
if !ok {
|
||||
fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
consolidated = map[string]sheet.Sheet{
|
||||
cheatsheet: s,
|
||||
}
|
||||
}
|
||||
|
||||
// sort the cheatsheets alphabetically, and search for matches
|
||||
out := ""
|
||||
for _, sheet := range sheets.Sort(consolidated) {
|
||||
|
||||
// assume that we want to perform a case-insensitive search for <phrase>
|
||||
@ -53,17 +73,30 @@ func cmdSearch(opts map[string]interface{}, conf config.Config) {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// search the sheet
|
||||
matches := sheet.Search(reg, conf.Color(opts))
|
||||
// `Search` will return text entries that match the search terms. We're
|
||||
// using it here to overwrite the prior cheatsheet Text, filtering it to
|
||||
// only what is relevant
|
||||
sheet.Text = sheet.Search(reg)
|
||||
|
||||
// display the results
|
||||
if len(matches) > 0 {
|
||||
fmt.Printf("%s:\n", sheet.Title)
|
||||
for _, m := range matches {
|
||||
fmt.Printf(" %d: %s\n", m.Line, m.Text)
|
||||
}
|
||||
fmt.Print("\n")
|
||||
// if the sheet did not match the search, ignore it and move on
|
||||
if sheet.Text == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// if colorization was requested, apply it here
|
||||
if conf.Color(opts) {
|
||||
sheet.Colorize(conf)
|
||||
}
|
||||
|
||||
// output the cheatsheet title
|
||||
out += fmt.Sprintf("%s:\n", sheet.Title)
|
||||
|
||||
// indent each line of content with two spaces
|
||||
for _, line := range strings.Split(sheet.Text, "\n") {
|
||||
out += fmt.Sprintf(" %s\n", line)
|
||||
}
|
||||
}
|
||||
|
||||
// display the output
|
||||
display.Display(out, conf)
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"os"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
"github.com/cheat/cheat/internal/sheets"
|
||||
)
|
||||
|
||||
@ -18,8 +19,12 @@ func cmdTags(opts map[string]interface{}, conf config.Config) {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// write sheet tags to stdout
|
||||
// assemble the output
|
||||
out := ""
|
||||
for _, tag := range sheets.Tags(cheatsheets) {
|
||||
fmt.Println(tag)
|
||||
out += fmt.Sprintln(tag)
|
||||
}
|
||||
|
||||
// display the output
|
||||
display.Display(out, conf)
|
||||
}
|
||||
|
@ -5,9 +5,8 @@ import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/alecthomas/chroma/quick"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
"github.com/cheat/cheat/internal/sheets"
|
||||
)
|
||||
|
||||
@ -40,32 +39,14 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
|
||||
sheet, ok := consolidated[cheatsheet]
|
||||
if !ok {
|
||||
fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet)
|
||||
os.Exit(0)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
if !conf.Color(opts) {
|
||||
fmt.Print(sheet.Text)
|
||||
os.Exit(0)
|
||||
// apply colorization if requested
|
||||
if conf.Color(opts) {
|
||||
sheet.Colorize(conf)
|
||||
}
|
||||
|
||||
// otherwise, colorize the output
|
||||
// if the syntax was not specified, default to bash
|
||||
lex := sheet.Syntax
|
||||
if lex == "" {
|
||||
lex = "bash"
|
||||
}
|
||||
|
||||
// apply syntax highlighting
|
||||
err = quick.Highlight(
|
||||
os.Stdout,
|
||||
sheet.Text,
|
||||
lex,
|
||||
conf.Formatter,
|
||||
conf.Style,
|
||||
)
|
||||
|
||||
// if colorization somehow failed, output non-colorized text
|
||||
if err != nil {
|
||||
fmt.Print(sheet.Text)
|
||||
}
|
||||
// display the cheatsheet
|
||||
display.Display(sheet.Text, conf)
|
||||
}
|
||||
|
@ -2,18 +2,18 @@ Usage:
|
||||
cheat [options] [<cheatsheet>]
|
||||
|
||||
Options:
|
||||
--init Write a default config file to stdout
|
||||
-c --colorize Colorize output
|
||||
-d --directories List cheatsheet directories
|
||||
-e --edit=<sheet> Edit <sheet>
|
||||
-l --list List cheatsheets
|
||||
-p --path=<name> Return only sheets found on path <name>
|
||||
-r --regex Treat search <phrase> as a regex
|
||||
-s --search=<phrase> Search cheatsheets for <phrase>
|
||||
-t --tag=<tag> Return only sheets matching <tag>
|
||||
-T --tags List all tags in use
|
||||
-v --version Print the version number
|
||||
--rm=<sheet> Remove (delete) <sheet>
|
||||
--init Write a default config file to stdout
|
||||
-c --colorize Colorize output
|
||||
-d --directories List cheatsheet directories
|
||||
-e --edit=<cheatsheet> Edit <cheatsheet>
|
||||
-l --list List cheatsheets
|
||||
-p --path=<name> Return only sheets found on path <name>
|
||||
-r --regex Treat search <phrase> as a regex
|
||||
-s --search=<phrase> Search cheatsheets for <phrase>
|
||||
-t --tag=<tag> Return only sheets matching <tag>
|
||||
-T --tags List all tags in use
|
||||
-v --version Print the version number
|
||||
--rm=<cheatsheet> Remove (delete) <cheatsheet>
|
||||
|
||||
Examples:
|
||||
|
||||
|
@ -5,15 +5,19 @@ package main
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"github.com/docopt/docopt-go"
|
||||
"github.com/mitchellh/go-homedir"
|
||||
|
||||
"github.com/cheat/cheat/internal/cheatpath"
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/installer"
|
||||
)
|
||||
|
||||
const version = "3.3.0"
|
||||
const version = "4.0.4"
|
||||
|
||||
func main() {
|
||||
|
||||
@ -31,13 +35,106 @@ func main() {
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// load the config file
|
||||
confpath, err := config.Path(runtime.GOOS)
|
||||
// get the user's home directory
|
||||
home, err := homedir.Dir()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, "could not locate config file")
|
||||
fmt.Fprintf(os.Stderr, "failed to get user home directory: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// read the envvars into a map of strings
|
||||
envvars := map[string]string{}
|
||||
for _, e := range os.Environ() {
|
||||
pair := strings.SplitN(e, "=", 2)
|
||||
envvars[pair[0]] = pair[1]
|
||||
}
|
||||
|
||||
// identify the os-specifc paths at which configs may be located
|
||||
confpaths, err := config.Paths(runtime.GOOS, home, envvars)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to load config: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// search for the config file in the above paths
|
||||
confpath, err := config.Path(confpaths)
|
||||
if err != nil {
|
||||
// prompt the user to create a config file
|
||||
yes, err := installer.Prompt(
|
||||
"A config file was not found. Would you like to create one now? [Y/n]",
|
||||
true,
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// exit early on a negative answer
|
||||
if !yes {
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// read the config template
|
||||
configs := configs()
|
||||
|
||||
// determine the appropriate paths for config data and (optional) community
|
||||
// cheatsheets based on the user's platform
|
||||
confpath = confpaths[0]
|
||||
confdir := path.Dir(confpath)
|
||||
|
||||
// create paths for community and personal cheatsheets
|
||||
community := path.Join(confdir, "/cheatsheets/community")
|
||||
personal := path.Join(confdir, "/cheatsheets/personal")
|
||||
|
||||
// template the above paths into the default configs
|
||||
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
|
||||
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
|
||||
|
||||
// prompt the user to download the community cheatsheets
|
||||
yes, err = installer.Prompt(
|
||||
"Would you like to download the community cheatsheets? [Y/n]",
|
||||
true,
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// clone the community cheatsheets if so instructed
|
||||
if yes {
|
||||
// clone the community cheatsheets
|
||||
if err := installer.Clone(community); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// also create a directory for personal cheatsheets
|
||||
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
|
||||
fmt.Fprintf(
|
||||
os.Stderr,
|
||||
"failed to create config: failed to create directory: %s: %v\n",
|
||||
personal,
|
||||
err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// the config file does not exist, so we'll try to create one
|
||||
if err = config.Init(confpath, configs); err != nil {
|
||||
fmt.Fprintf(
|
||||
os.Stderr,
|
||||
"failed to create config file: %s: %v\n",
|
||||
confpath,
|
||||
err,
|
||||
)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
fmt.Printf("Created config file: %s\n", confpath)
|
||||
fmt.Println("Please read this file for advanced configuration information.")
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// initialize the configs
|
||||
conf, err := config.New(opts, confpath, true)
|
||||
if err != nil {
|
||||
|
@ -23,6 +23,9 @@ style: monokai
|
||||
# One of: "terminal", "terminal256", "terminal16m"
|
||||
formatter: terminal16m
|
||||
|
||||
# Through which pager should output be piped? (Unset this key for no pager.)
|
||||
pager: less -FRX
|
||||
|
||||
# The paths at which cheatsheets are available. Tags associated with a cheatpath
|
||||
# are automatically attached to all cheatsheets residing on that path.
|
||||
#
|
||||
@ -41,32 +44,34 @@ cheatpaths:
|
||||
# thus be overridden by more local cheatsheets. That being the case, you
|
||||
# should probably list community cheatsheets first.
|
||||
#
|
||||
# Note that the paths and tags listed below are just examples. You may freely
|
||||
# Note that the paths and tags listed below are placeholders. You may freely
|
||||
# change them to suit your needs.
|
||||
#
|
||||
# Community cheatsheets must be installed separately, though you may have
|
||||
# downloaded them automatically when installing 'cheat'. If not, you may
|
||||
# download them here:
|
||||
#
|
||||
# https://github.com/cheat/cheatsheets
|
||||
#
|
||||
# Once downloaded, ensure that 'path' below points to the location at which
|
||||
# you downloaded the community cheatsheets.
|
||||
- name: community
|
||||
path: ~/.dotfiles/cheat/community
|
||||
path: COMMUNITY_PATH
|
||||
tags: [ community ]
|
||||
readonly: true
|
||||
|
||||
# Maybe your company or department maintains a repository of cheatsheets as
|
||||
# well. It's probably sensible to list those second.
|
||||
- name: work
|
||||
path: ~/.dotfiles/cheat/work
|
||||
tags: [ work ]
|
||||
readonly: false
|
||||
|
||||
# If you have personalized cheatsheets, list them last. They will take
|
||||
# precedence over the more global cheatsheets.
|
||||
- name: personal
|
||||
path: ~/.dotfiles/cheat/personal
|
||||
path: PERSONAL_PATH
|
||||
tags: [ personal ]
|
||||
readonly: false
|
||||
|
||||
# While it requires no specific configuration here, it's also worth noting
|
||||
# that 'cheat' will automatically append directories named '.cheat' within
|
||||
# the current working directory to the 'cheatpath'. This can be very useful
|
||||
# if you'd like to closely associate cheatsheets with, for example, a
|
||||
# directory containing source code.
|
||||
# While it requires no configuration here, it's also worth noting that
|
||||
# 'cheat' will automatically append directories named '.cheat' within the
|
||||
# current working directory to the 'cheatpath'. This can be very useful if
|
||||
# you'd like to closely associate cheatsheets with, for example, a directory
|
||||
# containing source code.
|
||||
#
|
||||
# Such "directory-scoped" cheatsheets will be treated as the most "local"
|
||||
# cheatsheets, and will override less "local" cheatsheets. Likewise,
|
||||
|
@ -11,18 +11,18 @@ func usage() string {
|
||||
cheat [options] [<cheatsheet>]
|
||||
|
||||
Options:
|
||||
--init Write a default config file to stdout
|
||||
-c --colorize Colorize output
|
||||
-d --directories List cheatsheet directories
|
||||
-e --edit=<sheet> Edit <sheet>
|
||||
-l --list List cheatsheets
|
||||
-p --path=<name> Return only sheets found on path <name>
|
||||
-r --regex Treat search <phrase> as a regex
|
||||
-s --search=<phrase> Search cheatsheets for <phrase>
|
||||
-t --tag=<tag> Return only sheets matching <tag>
|
||||
-T --tags List all tags in use
|
||||
-v --version Print the version number
|
||||
--rm=<sheet> Remove (delete) <sheet>
|
||||
--init Write a default config file to stdout
|
||||
-c --colorize Colorize output
|
||||
-d --directories List cheatsheet directories
|
||||
-e --edit=<cheatsheet> Edit <cheatsheet>
|
||||
-l --list List cheatsheets
|
||||
-p --path=<name> Return only sheets found on path <name>
|
||||
-r --regex Treat search <phrase> as a regex
|
||||
-s --search=<phrase> Search cheatsheets for <phrase>
|
||||
-t --tag=<tag> Return only sheets matching <tag>
|
||||
-T --tags List all tags in use
|
||||
-v --version Print the version number
|
||||
--rm=<cheatsheet> Remove (delete) <cheatsheet>
|
||||
|
||||
Examples:
|
||||
|
||||
|
@ -14,6 +14,9 @@ style: monokai
|
||||
# One of: "terminal", "terminal256", "terminal16m"
|
||||
formatter: terminal16m
|
||||
|
||||
# Through which pager should output be piped? (Unset this key for no pager.)
|
||||
pager: less -FRX
|
||||
|
||||
# The paths at which cheatsheets are available. Tags associated with a cheatpath
|
||||
# are automatically attached to all cheatsheets residing on that path.
|
||||
#
|
||||
@ -32,32 +35,34 @@ cheatpaths:
|
||||
# thus be overridden by more local cheatsheets. That being the case, you
|
||||
# should probably list community cheatsheets first.
|
||||
#
|
||||
# Note that the paths and tags listed below are just examples. You may freely
|
||||
# Note that the paths and tags listed below are placeholders. You may freely
|
||||
# change them to suit your needs.
|
||||
#
|
||||
# Community cheatsheets must be installed separately, though you may have
|
||||
# downloaded them automatically when installing 'cheat'. If not, you may
|
||||
# download them here:
|
||||
#
|
||||
# https://github.com/cheat/cheatsheets
|
||||
#
|
||||
# Once downloaded, ensure that 'path' below points to the location at which
|
||||
# you downloaded the community cheatsheets.
|
||||
- name: community
|
||||
path: ~/.dotfiles/cheat/community
|
||||
path: COMMUNITY_PATH
|
||||
tags: [ community ]
|
||||
readonly: true
|
||||
|
||||
# Maybe your company or department maintains a repository of cheatsheets as
|
||||
# well. It's probably sensible to list those second.
|
||||
- name: work
|
||||
path: ~/.dotfiles/cheat/work
|
||||
tags: [ work ]
|
||||
readonly: false
|
||||
|
||||
# If you have personalized cheatsheets, list them last. They will take
|
||||
# precedence over the more global cheatsheets.
|
||||
- name: personal
|
||||
path: ~/.dotfiles/cheat/personal
|
||||
path: PERSONAL_PATH
|
||||
tags: [ personal ]
|
||||
readonly: false
|
||||
|
||||
# While it requires no specific configuration here, it's also worth noting
|
||||
# that 'cheat' will automatically append directories named '.cheat' within
|
||||
# the current working directory to the 'cheatpath'. This can be very useful
|
||||
# if you'd like to closely associate cheatsheets with, for example, a
|
||||
# directory containing source code.
|
||||
# While it requires no configuration here, it's also worth noting that
|
||||
# 'cheat' will automatically append directories named '.cheat' within the
|
||||
# current working directory to the 'cheatpath'. This can be very useful if
|
||||
# you'd like to closely associate cheatsheets with, for example, a directory
|
||||
# containing source code.
|
||||
#
|
||||
# Such "directory-scoped" cheatsheets will be treated as the most "local"
|
||||
# cheatsheets, and will override less "local" cheatsheets. Likewise,
|
||||
|
221
doc/cheat.1
Normal file
221
doc/cheat.1
Normal file
@ -0,0 +1,221 @@
|
||||
.\" Automatically generated by Pandoc 1.17.2
|
||||
.\"
|
||||
.TH "CHEAT" "1" "" "" "General Commands Manual"
|
||||
.hy
|
||||
.SH NAME
|
||||
.PP
|
||||
\f[B]cheat\f[] \[em] create and view command\-line cheatsheets
|
||||
.SH SYNOPSIS
|
||||
.PP
|
||||
\f[B]cheat\f[] [options] [\f[I]CHEATSHEET\f[]]
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
\f[B]cheat\f[] allows you to create and view interactive cheatsheets on
|
||||
the command\-line.
|
||||
It was designed to help remind *nix system administrators of options for
|
||||
commands that they use frequently, but not frequently enough to
|
||||
remember.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-\-init
|
||||
Print a config file to stdout.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-c, \-\-colorize
|
||||
Colorize output.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-d, \-\-directories
|
||||
List cheatsheet directories.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-e, \-\-edit=\f[I]CHEATSHEET\f[]
|
||||
Open \f[I]CHEATSHEET\f[] for editing.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-l, \-\-list
|
||||
List available cheatsheets.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-p, \-\-path=\f[I]PATH\f[]
|
||||
Filter only to sheets found on path \f[I]PATH\f[].
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-r, \-\-regex
|
||||
Treat search \f[I]PHRASE\f[] as a regular expression.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-s, \-\-search=\f[I]PHRASE\f[]
|
||||
Search cheatsheets for \f[I]PHRASE\f[].
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-t, \-\-tag=\f[I]TAG\f[]
|
||||
Filter only to sheets tagged with \f[I]TAG\f[].
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-T, \-\-tags
|
||||
List all tags in use.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-v, \-\-version
|
||||
Print the version number.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-rm=\f[I]CHEATSHEET\f[]
|
||||
Remove (deletes) \f[I]CHEATSHEET\f[].
|
||||
.RS
|
||||
.RE
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
.B To view the foo cheatsheet:
|
||||
cheat \f[I]foo\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To edit (or create) the foo cheatsheet:
|
||||
cheat \-e \f[I]foo\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To edit (or create) the foo/bar cheatsheet on the \[aq]work\[aq] cheatpath:
|
||||
cheat \-p \f[I]work\f[] \-e \f[I]foo/bar\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To view all cheatsheet directories:
|
||||
cheat \-d
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To list all available cheatsheets:
|
||||
cheat \-l
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To list all cheatsheets whose titles match \[aq]apt\[aq]:
|
||||
cheat \-l \f[I]apt\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To list all tags in use:
|
||||
cheat \-T
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To list available cheatsheets that are tagged as \[aq]personal\[aq]:
|
||||
cheat \-l \-t \f[I]personal\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To search for \[aq]ssh\[aq] among all cheatsheets, and colorize matches:
|
||||
cheat \-c \-s \f[I]ssh\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To search (by regex) for cheatsheets that contain an IP address:
|
||||
cheat \-c \-r \-s \f[I]\[aq](?:[0\-9]{1,3}.){3}[0\-9]{1,3}\[aq]\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To remove (delete) the foo/bar cheatsheet:
|
||||
cheat \-\-rm \f[I]foo/bar\f[]
|
||||
.RS
|
||||
.RE
|
||||
.SH FILES
|
||||
.SS Configuration
|
||||
.PP
|
||||
\f[B]cheat\f[] is configured via a YAML file that is conventionally
|
||||
named \f[I]conf.yaml\f[].
|
||||
\f[B]cheat\f[] will search for \f[I]conf.yaml\f[] in varying locations,
|
||||
depending upon your platform:
|
||||
.SS Linux, OSX, and other Unixes
|
||||
.IP "1." 3
|
||||
\f[B]CHEAT_CONFIG_PATH\f[]
|
||||
.IP "2." 3
|
||||
\f[B]XDG_CONFIG_HOME\f[]/cheat/conf.yaml
|
||||
.IP "3." 3
|
||||
\f[B]$HOME\f[]/.config/cheat/conf.yml
|
||||
.IP "4." 3
|
||||
\f[B]$HOME\f[]/.cheat/conf.yml
|
||||
.SS Windows
|
||||
.IP "1." 3
|
||||
\f[B]CHEAT_CONFIG_PATH\f[]
|
||||
.IP "2." 3
|
||||
\f[B]APPDATA\f[]/cheat/conf.yml
|
||||
.IP "3." 3
|
||||
\f[B]PROGRAMDATA\f[]/cheat/conf.yml
|
||||
.PP
|
||||
\f[B]cheat\f[] will search in the order specified above.
|
||||
The first \f[I]conf.yaml\f[] encountered will be respected.
|
||||
.PP
|
||||
If \f[B]cheat\f[] cannot locate a config file, it will ask if you\[aq]d
|
||||
like to generate one automatically.
|
||||
Alternatively, you may also generate a config file manually by running
|
||||
\f[B]cheat \-\-init\f[] and saving its output to the appropriate
|
||||
location for your platform.
|
||||
.SS Cheatpaths
|
||||
.PP
|
||||
\f[B]cheat\f[] reads its cheatsheets from "cheatpaths", which are the
|
||||
directories in which cheatsheets are stored.
|
||||
Cheatpaths may be configured in \f[I]conf.yaml\f[], and viewed via
|
||||
\f[B]cheat \-d\f[].
|
||||
.PP
|
||||
For detailed instructions on how to configure cheatpaths, please refer
|
||||
to the comments in conf.yml.
|
||||
.SS Autocompletion
|
||||
.PP
|
||||
Autocompletion scripts for \f[B]bash\f[], \f[B]zsh\f[], and
|
||||
\f[B]fish\f[] are available for download:
|
||||
.IP \[bu] 2
|
||||
<https://github.com/cheat/cheat/blob/master/scripts/cheat.bash>
|
||||
.IP \[bu] 2
|
||||
<https://github.com/cheat/cheat/blob/master/scripts/cheat.fish>
|
||||
.IP \[bu] 2
|
||||
<https://github.com/cheat/cheat/blob/master/scripts/cheat.zsh>
|
||||
.PP
|
||||
The \f[B]bash\f[] and \f[B]zsh\f[] scripts provide optional integration
|
||||
with \f[B]fzf\f[], if the latter is available on your \f[B]PATH\f[].
|
||||
.PP
|
||||
The installation process will vary per system and shell configuration,
|
||||
and thus will not be discussed here.
|
||||
.SH ENVIRONMENT
|
||||
.TP
|
||||
.B \f[B]CHEAT_CONFIG_PATH\f[]
|
||||
The path at which the config file is available.
|
||||
If \f[B]CHEAT_CONFIG_PATH\f[] is set, all other config paths will be
|
||||
ignored.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \f[B]CHEAT_USE_FZF\f[]
|
||||
If set, autocompletion scripts will attempt to integrate with
|
||||
\f[B]fzf\f[].
|
||||
.RS
|
||||
.RE
|
||||
.SH RETURN VALUES
|
||||
.IP "0." 3
|
||||
Successful termination
|
||||
.IP "1." 3
|
||||
Application error
|
||||
.IP "2." 3
|
||||
Cheatsheet(s) not found
|
||||
.SH BUGS
|
||||
.PP
|
||||
See GitHub issues: <https://github.com/cheat/cheat/issues>
|
||||
.SH AUTHOR
|
||||
.PP
|
||||
Christopher Allen Lane <chris@chris-allen-lane.com>
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
\f[B]fzf(1)\f[]
|
192
doc/cheat.1.md
Normal file
192
doc/cheat.1.md
Normal file
@ -0,0 +1,192 @@
|
||||
% CHEAT(1) | General Commands Manual
|
||||
|
||||
NAME
|
||||
====
|
||||
|
||||
**cheat** — create and view command-line cheatsheets
|
||||
|
||||
SYNOPSIS
|
||||
========
|
||||
|
||||
| **cheat** \[options] \[_CHEATSHEET_]
|
||||
|
||||
DESCRIPTION
|
||||
===========
|
||||
**cheat** allows you to create and view interactive cheatsheets on the
|
||||
command-line. It was designed to help remind \*nix system administrators of
|
||||
options for commands that they use frequently, but not frequently enough to
|
||||
remember.
|
||||
|
||||
OPTIONS
|
||||
=======
|
||||
|
||||
--init
|
||||
: Print a config file to stdout.
|
||||
|
||||
-c, --colorize
|
||||
: Colorize output.
|
||||
|
||||
-d, --directories
|
||||
: List cheatsheet directories.
|
||||
|
||||
-e, --edit=_CHEATSHEET_
|
||||
: Open _CHEATSHEET_ for editing.
|
||||
|
||||
-l, --list
|
||||
: List available cheatsheets.
|
||||
|
||||
-p, --path=_PATH_
|
||||
: Filter only to sheets found on path _PATH_.
|
||||
|
||||
-r, --regex
|
||||
: Treat search _PHRASE_ as a regular expression.
|
||||
|
||||
-s, --search=_PHRASE_
|
||||
: Search cheatsheets for _PHRASE_.
|
||||
|
||||
-t, --tag=_TAG_
|
||||
: Filter only to sheets tagged with _TAG_.
|
||||
|
||||
-T, --tags
|
||||
: List all tags in use.
|
||||
|
||||
-v, --version
|
||||
: Print the version number.
|
||||
|
||||
--rm=_CHEATSHEET_
|
||||
: Remove (deletes) _CHEATSHEET_.
|
||||
|
||||
|
||||
EXAMPLES
|
||||
========
|
||||
|
||||
To view the foo cheatsheet:
|
||||
: cheat _foo_
|
||||
|
||||
To edit (or create) the foo cheatsheet:
|
||||
: cheat -e _foo_
|
||||
|
||||
To edit (or create) the foo/bar cheatsheet on the 'work' cheatpath:
|
||||
: cheat -p _work_ -e _foo/bar_
|
||||
|
||||
To view all cheatsheet directories:
|
||||
: cheat -d
|
||||
|
||||
To list all available cheatsheets:
|
||||
: cheat -l
|
||||
|
||||
To list all cheatsheets whose titles match 'apt':
|
||||
: cheat -l _apt_
|
||||
|
||||
To list all tags in use:
|
||||
: cheat -T
|
||||
|
||||
To list available cheatsheets that are tagged as 'personal':
|
||||
: cheat -l -t _personal_
|
||||
|
||||
To search for 'ssh' among all cheatsheets, and colorize matches:
|
||||
: cheat -c -s _ssh_
|
||||
|
||||
To search (by regex) for cheatsheets that contain an IP address:
|
||||
: cheat -c -r -s _'(?:[0-9]{1,3}\.){3}[0-9]{1,3}'_
|
||||
|
||||
To remove (delete) the foo/bar cheatsheet:
|
||||
: cheat --rm _foo/bar_
|
||||
|
||||
|
||||
FILES
|
||||
=====
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
**cheat** is configured via a YAML file that is conventionally named
|
||||
_conf.yaml_. **cheat** will search for _conf.yaml_ in varying locations,
|
||||
depending upon your platform:
|
||||
|
||||
### Linux, OSX, and other Unixes ###
|
||||
|
||||
1. **CHEAT_CONFIG_PATH**
|
||||
2. **XDG_CONFIG_HOME**/cheat/conf.yaml
|
||||
3. **$HOME**/.config/cheat/conf.yml
|
||||
4. **$HOME**/.cheat/conf.yml
|
||||
|
||||
### Windows ###
|
||||
|
||||
1. **CHEAT_CONFIG_PATH**
|
||||
2. **APPDATA**/cheat/conf.yml
|
||||
3. **PROGRAMDATA**/cheat/conf.yml
|
||||
|
||||
**cheat** will search in the order specified above. The first _conf.yaml_
|
||||
encountered will be respected.
|
||||
|
||||
If **cheat** cannot locate a config file, it will ask if you'd like to generate
|
||||
one automatically. Alternatively, you may also generate a config file manually
|
||||
by running **cheat --init** and saving its output to the appropriate location
|
||||
for your platform.
|
||||
|
||||
|
||||
Cheatpaths
|
||||
----------
|
||||
**cheat** reads its cheatsheets from "cheatpaths", which are the directories in
|
||||
which cheatsheets are stored. Cheatpaths may be configured in _conf.yaml_, and
|
||||
viewed via **cheat -d**.
|
||||
|
||||
For detailed instructions on how to configure cheatpaths, please refer to the
|
||||
comments in conf.yml.
|
||||
|
||||
|
||||
Autocompletion
|
||||
--------------
|
||||
Autocompletion scripts for **bash**, **zsh**, and **fish** are available for
|
||||
download:
|
||||
|
||||
- <https://github.com/cheat/cheat/blob/master/scripts/cheat.bash>
|
||||
- <https://github.com/cheat/cheat/blob/master/scripts/cheat.fish>
|
||||
- <https://github.com/cheat/cheat/blob/master/scripts/cheat.zsh>
|
||||
|
||||
The **bash** and **zsh** scripts provide optional integration with **fzf**, if
|
||||
the latter is available on your **PATH**.
|
||||
|
||||
The installation process will vary per system and shell configuration, and thus
|
||||
will not be discussed here.
|
||||
|
||||
|
||||
ENVIRONMENT
|
||||
===========
|
||||
|
||||
**CHEAT_CONFIG_PATH**
|
||||
|
||||
: The path at which the config file is available. If **CHEAT_CONFIG_PATH** is
|
||||
set, all other config paths will be ignored.
|
||||
|
||||
**CHEAT_USE_FZF**
|
||||
|
||||
: If set, autocompletion scripts will attempt to integrate with **fzf**.
|
||||
|
||||
RETURN VALUES
|
||||
=============
|
||||
|
||||
0. Successful termination
|
||||
|
||||
1. Application error
|
||||
|
||||
2. Cheatsheet(s) not found
|
||||
|
||||
|
||||
BUGS
|
||||
====
|
||||
|
||||
See GitHub issues: <https://github.com/cheat/cheat/issues>
|
||||
|
||||
|
||||
AUTHOR
|
||||
======
|
||||
|
||||
Christopher Allen Lane <chris@chris-allen-lane.com>
|
||||
|
||||
|
||||
SEE ALSO
|
||||
========
|
||||
|
||||
**fzf(1)**
|
||||
|
13
go.mod
13
go.mod
@ -1,14 +1,17 @@
|
||||
module github.com/cheat/cheat
|
||||
|
||||
go 1.13
|
||||
go 1.14
|
||||
|
||||
require (
|
||||
github.com/alecthomas/chroma v0.7.1
|
||||
github.com/alecthomas/chroma v0.8.0
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815
|
||||
github.com/mattn/go-isatty v0.0.11
|
||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.12
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
|
||||
github.com/sergi/go-diff v1.1.0 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
||||
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0
|
||||
gopkg.in/yaml.v2 v2.2.7
|
||||
gopkg.in/yaml.v2 v2.3.0
|
||||
)
|
||||
|
56
go.sum
56
go.sum
@ -1,48 +1,64 @@
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||
github.com/alecthomas/chroma v0.7.1 h1:G1i02OhUbRi2nJxcNkwJaY/J1gHXj9tt72qN6ZouLFQ=
|
||||
github.com/alecthomas/chroma v0.7.1/go.mod h1:gHw09mkX1Qp80JlYbmN9L3+4R5o6DJJ3GRShh+AICNc=
|
||||
github.com/alecthomas/chroma v0.8.0 h1:HS+HE97sgcqjQGu5uVr8jIE55Mmh5UeQ7kckAhHg2pY=
|
||||
github.com/alecthomas/chroma v0.8.0/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI=
|
||||
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg=
|
||||
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
|
||||
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM=
|
||||
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
|
||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=
|
||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 h1:YAFjXN64LMvktoUZH9zgY4lGc/msGN7HQfoSuKCgaDU=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 h1:POO/ycCATvegFmVuPpQzZFJ+pGZeX22Ufu6fibxDVjU=
|
||||
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg=
|
||||
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
cp "github.com/cheat/cheat/internal/cheatpath"
|
||||
|
||||
@ -19,6 +20,7 @@ type Config struct {
|
||||
Cheatpaths []cp.Cheatpath `yaml:"cheatpaths"`
|
||||
Style string `yaml:"style"`
|
||||
Formatter string `yaml:"formatter"`
|
||||
Pager string `yaml:"pager"`
|
||||
}
|
||||
|
||||
// New returns a new Config struct
|
||||
@ -75,14 +77,16 @@ func New(opts map[string]interface{}, confPath string, resolve bool) (Config, er
|
||||
// `resolve` is a switch that allows us to turn off symlink resolution when
|
||||
// running the config tests.
|
||||
if resolve {
|
||||
expanded, err = filepath.EvalSymlinks(expanded)
|
||||
evaled, err := filepath.EvalSymlinks(expanded)
|
||||
if err != nil {
|
||||
return Config{}, fmt.Errorf(
|
||||
"failed to resolve symlink: %s, %v",
|
||||
"failed to resolve symlink: %s: %v",
|
||||
expanded,
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
expanded = evaled
|
||||
}
|
||||
|
||||
conf.Cheatpaths[i].Path = expanded
|
||||
@ -109,5 +113,10 @@ func New(opts map[string]interface{}, confPath string, resolve bool) (Config, er
|
||||
conf.Formatter = "terminal16m"
|
||||
}
|
||||
|
||||
// if a pager was not provided, set a default
|
||||
if strings.TrimSpace(conf.Pager) == "" {
|
||||
conf.Pager = ""
|
||||
}
|
||||
|
||||
return conf, nil
|
||||
}
|
||||
|
24
internal/config/init.go
Normal file
24
internal/config/init.go
Normal file
@ -0,0 +1,24 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// Init initializes a config file
|
||||
func Init(confpath string, configs string) error {
|
||||
|
||||
// assert that the config directory exists
|
||||
if err := os.MkdirAll(filepath.Dir(confpath), 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory: %v", err)
|
||||
}
|
||||
|
||||
// write the config file
|
||||
if err := ioutil.WriteFile(confpath, []byte(configs), 0644); err != nil {
|
||||
return fmt.Errorf("failed to create file: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -3,58 +3,10 @@ package config
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/mitchellh/go-homedir"
|
||||
)
|
||||
|
||||
// Path returns the config file path
|
||||
func Path(sys string) (string, error) {
|
||||
|
||||
var paths []string
|
||||
|
||||
// if CHEAT_CONFIG_PATH is set, return it
|
||||
if os.Getenv("CHEAT_CONFIG_PATH") != "" {
|
||||
|
||||
// expand ~
|
||||
expanded, err := homedir.Expand(os.Getenv("CHEAT_CONFIG_PATH"))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to expand ~: %v", err)
|
||||
}
|
||||
|
||||
return expanded, nil
|
||||
|
||||
// OSX config paths
|
||||
} else if sys == "darwin" {
|
||||
|
||||
paths = []string{
|
||||
path.Join(os.Getenv("XDG_CONFIG_HOME"), "/cheat/conf.yml"),
|
||||
path.Join(os.Getenv("HOME"), ".config/cheat/conf.yml"),
|
||||
path.Join(os.Getenv("HOME"), ".cheat/conf.yml"),
|
||||
}
|
||||
|
||||
// Linux config paths
|
||||
} else if sys == "linux" {
|
||||
|
||||
paths = []string{
|
||||
path.Join(os.Getenv("XDG_CONFIG_HOME"), "/cheat/conf.yml"),
|
||||
path.Join(os.Getenv("HOME"), ".config/cheat/conf.yml"),
|
||||
path.Join(os.Getenv("HOME"), ".cheat/conf.yml"),
|
||||
"/etc/cheat/conf.yml",
|
||||
}
|
||||
|
||||
// Windows config paths
|
||||
} else if sys == "windows" {
|
||||
|
||||
paths = []string{
|
||||
fmt.Sprintf("%s/cheat/conf.yml", os.Getenv("APPDATA")),
|
||||
fmt.Sprintf("%s/cheat/conf.yml", os.Getenv("PROGRAMDATA")),
|
||||
}
|
||||
|
||||
// Unsupported platforms
|
||||
} else {
|
||||
return "", fmt.Errorf("unsupported os: %s", sys)
|
||||
}
|
||||
func Path(paths []string) (string, error) {
|
||||
|
||||
// check if the config file exists on any paths
|
||||
for _, p := range paths {
|
||||
|
54
internal/config/paths.go
Normal file
54
internal/config/paths.go
Normal file
@ -0,0 +1,54 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
|
||||
"github.com/mitchellh/go-homedir"
|
||||
)
|
||||
|
||||
// Paths returns config file paths that are appropriate for the operating
|
||||
// system
|
||||
func Paths(
|
||||
sys string,
|
||||
home string,
|
||||
envvars map[string]string,
|
||||
) ([]string, error) {
|
||||
|
||||
// if `CHEAT_CONFIG_PATH` is set, expand ~ and return it
|
||||
if confpath, ok := envvars["CHEAT_CONFIG_PATH"]; ok {
|
||||
|
||||
// expand ~
|
||||
expanded, err := homedir.Expand(confpath)
|
||||
if err != nil {
|
||||
return []string{}, fmt.Errorf("failed to expand ~: %v", err)
|
||||
}
|
||||
|
||||
return []string{expanded}, nil
|
||||
}
|
||||
|
||||
switch sys {
|
||||
case "darwin", "linux", "freebsd":
|
||||
paths := []string{}
|
||||
|
||||
// don't include the `XDG_CONFIG_HOME` path if that envvar is not set
|
||||
if xdgpath, ok := envvars["XDG_CONFIG_HOME"]; ok {
|
||||
paths = append(paths, path.Join(xdgpath, "/cheat/conf.yml"))
|
||||
}
|
||||
|
||||
paths = append(paths, []string{
|
||||
path.Join(home, ".config/cheat/conf.yml"),
|
||||
path.Join(home, ".cheat/conf.yml"),
|
||||
"/etc/cheat/conf.yml",
|
||||
}...)
|
||||
|
||||
return paths, nil
|
||||
case "windows":
|
||||
return []string{
|
||||
path.Join(envvars["APPDATA"], "/cheat/conf.yml"),
|
||||
path.Join(envvars["PROGRAMDATA"], "/cheat/conf.yml"),
|
||||
}, nil
|
||||
default:
|
||||
return []string{}, fmt.Errorf("unsupported os: %s", sys)
|
||||
}
|
||||
}
|
175
internal/config/paths_test.go
Normal file
175
internal/config/paths_test.go
Normal file
@ -0,0 +1,175 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
// TestValidatePathsNix asserts that the proper config paths are returned on
|
||||
// *nix platforms
|
||||
func TestValidatePathsNix(t *testing.T) {
|
||||
|
||||
// mock the user's home directory
|
||||
home := "/home/foo"
|
||||
|
||||
// mock some envvars
|
||||
envvars := map[string]string{
|
||||
"XDG_CONFIG_HOME": "/home/bar",
|
||||
}
|
||||
|
||||
// specify the platforms to test
|
||||
oses := []string{
|
||||
"darwin",
|
||||
"freebsd",
|
||||
"linux",
|
||||
}
|
||||
|
||||
// test each *nix os
|
||||
for _, os := range oses {
|
||||
// get the paths for the platform
|
||||
paths, err := Paths(os, home, envvars)
|
||||
if err != nil {
|
||||
t.Errorf("paths returned an error: %v", err)
|
||||
}
|
||||
|
||||
// specify the expected output
|
||||
want := []string{
|
||||
"/home/bar/cheat/conf.yml",
|
||||
"/home/foo/.config/cheat/conf.yml",
|
||||
"/home/foo/.cheat/conf.yml",
|
||||
"/etc/cheat/conf.yml",
|
||||
}
|
||||
|
||||
// assert that output matches expectations
|
||||
if !reflect.DeepEqual(paths, want) {
|
||||
t.Errorf(
|
||||
"failed to return expected paths: want:\n%s, got:\n%s",
|
||||
spew.Sdump(want),
|
||||
spew.Sdump(paths),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestValidatePathsNixNoXDG asserts that the proper config paths are returned
|
||||
// on *nix platforms when `XDG_CONFIG_HOME is not set
|
||||
func TestValidatePathsNixNoXDG(t *testing.T) {
|
||||
|
||||
// mock the user's home directory
|
||||
home := "/home/foo"
|
||||
|
||||
// mock some envvars
|
||||
envvars := map[string]string{}
|
||||
|
||||
// specify the platforms to test
|
||||
oses := []string{
|
||||
"darwin",
|
||||
"freebsd",
|
||||
"linux",
|
||||
}
|
||||
|
||||
// test each *nix os
|
||||
for _, os := range oses {
|
||||
// get the paths for the platform
|
||||
paths, err := Paths(os, home, envvars)
|
||||
if err != nil {
|
||||
t.Errorf("paths returned an error: %v", err)
|
||||
}
|
||||
|
||||
// specify the expected output
|
||||
want := []string{
|
||||
"/home/foo/.config/cheat/conf.yml",
|
||||
"/home/foo/.cheat/conf.yml",
|
||||
"/etc/cheat/conf.yml",
|
||||
}
|
||||
|
||||
// assert that output matches expectations
|
||||
if !reflect.DeepEqual(paths, want) {
|
||||
t.Errorf(
|
||||
"failed to return expected paths: want:\n%s, got:\n%s",
|
||||
spew.Sdump(want),
|
||||
spew.Sdump(paths),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestValidatePathsWindows asserts that the proper config paths are returned
|
||||
// on Windows platforms
|
||||
func TestValidatePathsWindows(t *testing.T) {
|
||||
|
||||
// mock the user's home directory
|
||||
home := "not-used-on-windows"
|
||||
|
||||
// mock some envvars
|
||||
envvars := map[string]string{
|
||||
"APPDATA": "/apps",
|
||||
"PROGRAMDATA": "/programs",
|
||||
}
|
||||
|
||||
// get the paths for the platform
|
||||
paths, err := Paths("windows", home, envvars)
|
||||
if err != nil {
|
||||
t.Errorf("paths returned an error: %v", err)
|
||||
}
|
||||
|
||||
// specify the expected output
|
||||
want := []string{
|
||||
"/apps/cheat/conf.yml",
|
||||
"/programs/cheat/conf.yml",
|
||||
}
|
||||
|
||||
// assert that output matches expectations
|
||||
if !reflect.DeepEqual(paths, want) {
|
||||
t.Errorf(
|
||||
"failed to return expected paths: want:\n%s, got:\n%s",
|
||||
spew.Sdump(want),
|
||||
spew.Sdump(paths),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// TestValidatePathsUnsupported asserts that an error is returned on
|
||||
// unsupported platforms
|
||||
func TestValidatePathsUnsupported(t *testing.T) {
|
||||
_, err := Paths("unsupported", "", map[string]string{})
|
||||
if err == nil {
|
||||
t.Errorf("failed to return error on unsupported platform")
|
||||
}
|
||||
}
|
||||
|
||||
// TestValidatePathsCheatConfigPath asserts that the proper config path is
|
||||
// returned when `CHEAT_CONFIG_PATH` is explicitly specified.
|
||||
func TestValidatePathsCheatConfigPath(t *testing.T) {
|
||||
|
||||
// mock the user's home directory
|
||||
home := "/home/foo"
|
||||
|
||||
// mock some envvars
|
||||
envvars := map[string]string{
|
||||
"XDG_CONFIG_HOME": "/home/bar",
|
||||
"CHEAT_CONFIG_PATH": "/home/baz/conf.yml",
|
||||
}
|
||||
|
||||
// get the paths for the platform
|
||||
paths, err := Paths("linux", home, envvars)
|
||||
if err != nil {
|
||||
t.Errorf("paths returned an error: %v", err)
|
||||
}
|
||||
|
||||
// specify the expected output
|
||||
want := []string{
|
||||
"/home/baz/conf.yml",
|
||||
}
|
||||
|
||||
// assert that output matches expectations
|
||||
if !reflect.DeepEqual(paths, want) {
|
||||
t.Errorf(
|
||||
"failed to return expected paths: want:\n%s, got:\n%s",
|
||||
spew.Sdump(want),
|
||||
spew.Sdump(paths),
|
||||
)
|
||||
}
|
||||
}
|
37
internal/display/display.go
Normal file
37
internal/display/display.go
Normal file
@ -0,0 +1,37 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
)
|
||||
|
||||
// Display writes output either directly to stdout, or through a pager,
|
||||
// depending upon configuration.
|
||||
func Display(out string, conf config.Config) {
|
||||
// if no pager was configured, print the output to stdout and exit
|
||||
if conf.Pager == "" {
|
||||
fmt.Print(out)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// otherwise, pipe output through the pager
|
||||
parts := strings.Split(conf.Pager, " ")
|
||||
pager := parts[0]
|
||||
args := parts[1:]
|
||||
|
||||
// run the pager
|
||||
cmd := exec.Command(pager, args...)
|
||||
cmd.Stdin = strings.NewReader(out)
|
||||
cmd.Stdout = os.Stdout
|
||||
|
||||
// handle errors
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to write to pager: %v", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
package frontmatter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/yaml.v1"
|
||||
@ -28,7 +29,16 @@ func Parse(markdown string) (string, Frontmatter, error) {
|
||||
|
||||
// otherwise, split the frontmatter and cheatsheet text
|
||||
parts := strings.SplitN(markdown, delim, 3)
|
||||
err := yaml.Unmarshal([]byte(parts[1]), &fm)
|
||||
|
||||
return strings.TrimSpace(parts[2]), fm, err
|
||||
// return an error if the frontmatter parses into the wrong number of parts
|
||||
if len(parts) != 3 {
|
||||
return markdown, fm, fmt.Errorf("failed to delimit frontmatter")
|
||||
}
|
||||
|
||||
// return an error if the YAML cannot be unmarshalled
|
||||
if err := yaml.Unmarshal([]byte(parts[1]), &fm); err != nil {
|
||||
return markdown, fm, fmt.Errorf("failed to unmarshal frontmatter: %v", err)
|
||||
}
|
||||
|
||||
return strings.TrimSpace(parts[2]), fm, nil
|
||||
}
|
||||
|
@ -69,3 +69,27 @@ func TestHasNoFrontmatter(t *testing.T) {
|
||||
t.Errorf("failed to parse tags: want: len 0, got: len %d", len(fm.Tags))
|
||||
}
|
||||
}
|
||||
|
||||
// TestHasInvalidFrontmatter asserts that markdown is properly parsed when it
|
||||
// contains invalid frontmatter
|
||||
func TestHasInvalidFrontmatter(t *testing.T) {
|
||||
|
||||
// stub our cheatsheet content (with invalid frontmatter)
|
||||
markdown := `---
|
||||
syntax: go
|
||||
tags: [ test ]
|
||||
To foo the bar: baz`
|
||||
|
||||
// parse the frontmatter
|
||||
text, _, err := Parse(markdown)
|
||||
|
||||
// assert that an error was returned
|
||||
if err == nil {
|
||||
t.Error("failed to error on invalid frontmatter")
|
||||
}
|
||||
|
||||
// assert that the "raw" markdown was returned
|
||||
if text != markdown {
|
||||
t.Errorf("failed to parse text: want: %s, got: %s", markdown, text)
|
||||
}
|
||||
}
|
||||
|
24
internal/installer/clone.go
Normal file
24
internal/installer/clone.go
Normal file
@ -0,0 +1,24 @@
|
||||
package installer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
const cloneURL = "https://github.com/cheat/cheatsheets.git"
|
||||
|
||||
// Clone clones the community cheatsheets
|
||||
func Clone(path string) error {
|
||||
|
||||
// perform the clone in a shell
|
||||
cmd := exec.Command("git", "clone", cloneURL, path)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to clone cheatsheets: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
37
internal/installer/prompt.go
Normal file
37
internal/installer/prompt.go
Normal file
@ -0,0 +1,37 @@
|
||||
package installer
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Prompt prompts the user for a answer
|
||||
func Prompt(prompt string, def bool) (bool, error) {
|
||||
|
||||
// initialize a line reader
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
|
||||
// display the prompt
|
||||
fmt.Print(fmt.Sprintf("%s: ", prompt))
|
||||
|
||||
// read the answer
|
||||
ans, err := reader.ReadString('\n')
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to parse input: %v", err)
|
||||
}
|
||||
|
||||
// normalize the answer
|
||||
ans = strings.ToLower(strings.TrimRight(ans, "\n"))
|
||||
|
||||
// return the appropriate response
|
||||
switch ans {
|
||||
case "y":
|
||||
return true, nil
|
||||
case "":
|
||||
return def, nil
|
||||
default:
|
||||
return false, nil
|
||||
}
|
||||
}
|
37
internal/sheet/colorize.go
Normal file
37
internal/sheet/colorize.go
Normal file
@ -0,0 +1,37 @@
|
||||
package sheet
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
|
||||
"github.com/alecthomas/chroma/quick"
|
||||
)
|
||||
|
||||
// Colorize applies syntax-highlighting to a cheatsheet's Text.
|
||||
func (s *Sheet) Colorize(conf config.Config) {
|
||||
|
||||
// if the syntax was not specified, default to bash
|
||||
lex := s.Syntax
|
||||
if lex == "" {
|
||||
lex = "bash"
|
||||
}
|
||||
|
||||
// write colorized text into a buffer
|
||||
var buf bytes.Buffer
|
||||
err := quick.Highlight(
|
||||
&buf,
|
||||
s.Text,
|
||||
lex,
|
||||
conf.Formatter,
|
||||
conf.Style,
|
||||
)
|
||||
|
||||
// if colorization somehow failed, do nothing
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// otherwise, swap the cheatsheet's Text with its colorized equivalent
|
||||
s.Text = buf.String()
|
||||
}
|
@ -1,7 +0,0 @@
|
||||
package sheet
|
||||
|
||||
// Match encapsulates search matches within cheatsheets
|
||||
type Match struct {
|
||||
Line int
|
||||
Text string
|
||||
}
|
@ -3,43 +3,22 @@ package sheet
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/mgutz/ansi"
|
||||
)
|
||||
|
||||
// Search searches for regexp matches in a cheatsheet's text, and optionally
|
||||
// colorizes matching strings.
|
||||
func (s *Sheet) Search(reg *regexp.Regexp, colorize bool) []Match {
|
||||
// Search returns lines within a sheet's Text that match the search regex
|
||||
func (s *Sheet) Search(reg *regexp.Regexp) string {
|
||||
|
||||
// record matches
|
||||
matches := []Match{}
|
||||
matches := ""
|
||||
|
||||
// search through the cheatsheet's text line by line
|
||||
// TODO: searching line-by-line is surely the "naive" approach. Revisit this
|
||||
// later with an eye for performance improvements.
|
||||
for linenum, line := range strings.Split(s.Text, "\n") {
|
||||
for _, line := range strings.Split(s.Text, "\n\n") {
|
||||
|
||||
// exit early if the line doesn't match the regex
|
||||
if !reg.MatchString(line) {
|
||||
continue
|
||||
if reg.MatchString(line) {
|
||||
matches += line + "\n\n"
|
||||
}
|
||||
|
||||
// init the match
|
||||
m := Match{
|
||||
Line: linenum + 1,
|
||||
Text: strings.TrimSpace(line),
|
||||
}
|
||||
|
||||
// colorize the matching text if so configured
|
||||
if colorize {
|
||||
m.Text = reg.ReplaceAllStringFunc(m.Text, func(matched string) string {
|
||||
return ansi.Color(matched, "red+b")
|
||||
})
|
||||
}
|
||||
|
||||
// record the match
|
||||
matches = append(matches, m)
|
||||
}
|
||||
|
||||
return matches
|
||||
return strings.TrimSpace(matches)
|
||||
}
|
||||
|
@ -4,8 +4,6 @@ import (
|
||||
"reflect"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
// TestSearchNoMatch ensures that the expected output is returned when no
|
||||
@ -24,21 +22,21 @@ func TestSearchNoMatch(t *testing.T) {
|
||||
}
|
||||
|
||||
// search the sheet
|
||||
matches := sheet.Search(reg, false)
|
||||
matches := sheet.Search(reg)
|
||||
|
||||
// assert that no matches were found
|
||||
if len(matches) != 0 {
|
||||
t.Errorf("failure: expected no matches: got: %s", spew.Sdump(matches))
|
||||
if matches != "" {
|
||||
t.Errorf("failure: expected no matches: got: %s", matches)
|
||||
}
|
||||
}
|
||||
|
||||
// TestSearchSingleMatchNoColor asserts that the expected output is returned
|
||||
// when a single match is returned, and no colorization is applied.
|
||||
func TestSearchSingleMatchNoColor(t *testing.T) {
|
||||
// TestSearchSingleMatch asserts that the expected output is returned
|
||||
// when a single match is returned
|
||||
func TestSearchSingleMatch(t *testing.T) {
|
||||
|
||||
// mock a cheatsheet
|
||||
sheet := Sheet{
|
||||
Text: "The quick brown fox\njumped over\nthe lazy dog.",
|
||||
Text: "The quick brown fox\njumped over\n\nthe lazy dog.",
|
||||
}
|
||||
|
||||
// compile the search regex
|
||||
@ -48,69 +46,28 @@ func TestSearchSingleMatchNoColor(t *testing.T) {
|
||||
}
|
||||
|
||||
// search the sheet
|
||||
matches := sheet.Search(reg, false)
|
||||
matches := sheet.Search(reg)
|
||||
|
||||
// specify the expected results
|
||||
want := []Match{
|
||||
Match{
|
||||
Line: 1,
|
||||
Text: "The quick brown fox",
|
||||
},
|
||||
}
|
||||
want := "The quick brown fox\njumped over"
|
||||
|
||||
// assert that the correct matches were returned
|
||||
if !reflect.DeepEqual(matches, want) {
|
||||
if matches != want {
|
||||
t.Errorf(
|
||||
"failed to return expected matches: want:\n%s, got:\n%s",
|
||||
spew.Sdump(want),
|
||||
spew.Sdump(matches),
|
||||
want,
|
||||
matches,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// TestSearchSingleMatchColorized asserts that the expected output is returned
|
||||
// when a single match is returned, and colorization is applied
|
||||
func TestSearchSingleMatchColorized(t *testing.T) {
|
||||
// TestSearchMultiMatch asserts that the expected output is returned
|
||||
// when a multiple matches are returned
|
||||
func TestSearchMultiMatch(t *testing.T) {
|
||||
|
||||
// mock a cheatsheet
|
||||
sheet := Sheet{
|
||||
Text: "The quick brown fox\njumped over\nthe lazy dog.",
|
||||
}
|
||||
|
||||
// compile the search regex
|
||||
reg, err := regexp.Compile("(?i)fox")
|
||||
if err != nil {
|
||||
t.Errorf("failed to compile regex: %v", err)
|
||||
}
|
||||
|
||||
// search the sheet
|
||||
matches := sheet.Search(reg, true)
|
||||
|
||||
// specify the expected results
|
||||
want := []Match{
|
||||
Match{
|
||||
Line: 1,
|
||||
Text: "The quick brown \x1b[1;31mfox\x1b[0m",
|
||||
},
|
||||
}
|
||||
|
||||
// assert that the correct matches were returned
|
||||
if !reflect.DeepEqual(matches, want) {
|
||||
t.Errorf(
|
||||
"failed to return expected matches: want:\n%s, got:\n%s",
|
||||
spew.Sdump(want),
|
||||
spew.Sdump(matches),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// TestSearchMultiMatchNoColor asserts that the expected output is returned
|
||||
// when a multiple matches are returned, and no colorization is applied
|
||||
func TestSearchMultiMatchNoColor(t *testing.T) {
|
||||
|
||||
// mock a cheatsheet
|
||||
sheet := Sheet{
|
||||
Text: "The quick brown fox\njumped over\nthe lazy dog.",
|
||||
Text: "The quick brown fox\n\njumped over\n\nthe lazy dog.",
|
||||
}
|
||||
|
||||
// compile the search regex
|
||||
@ -120,66 +77,17 @@ func TestSearchMultiMatchNoColor(t *testing.T) {
|
||||
}
|
||||
|
||||
// search the sheet
|
||||
matches := sheet.Search(reg, false)
|
||||
matches := sheet.Search(reg)
|
||||
|
||||
// specify the expected results
|
||||
want := []Match{
|
||||
Match{
|
||||
Line: 1,
|
||||
Text: "The quick brown fox",
|
||||
},
|
||||
Match{
|
||||
Line: 3,
|
||||
Text: "the lazy dog.",
|
||||
},
|
||||
}
|
||||
want := "The quick brown fox\n\nthe lazy dog."
|
||||
|
||||
// assert that the correct matches were returned
|
||||
if !reflect.DeepEqual(matches, want) {
|
||||
t.Errorf(
|
||||
"failed to return expected matches: want:\n%s, got:\n%s",
|
||||
spew.Sdump(want),
|
||||
spew.Sdump(matches),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// TestSearchMultiMatchColorized asserts that the expected output is returned
|
||||
// when a multiple matches are returned, and colorization is applied
|
||||
func TestSearchMultiMatchColorized(t *testing.T) {
|
||||
|
||||
// mock a cheatsheet
|
||||
sheet := Sheet{
|
||||
Text: "The quick brown fox\njumped over\nthe lazy dog.",
|
||||
}
|
||||
|
||||
// compile the search regex
|
||||
reg, err := regexp.Compile("(?i)the")
|
||||
if err != nil {
|
||||
t.Errorf("failed to compile regex: %v", err)
|
||||
}
|
||||
|
||||
// search the sheet
|
||||
matches := sheet.Search(reg, true)
|
||||
|
||||
// specify the expected results
|
||||
want := []Match{
|
||||
Match{
|
||||
Line: 1,
|
||||
Text: "\x1b[1;31mThe\x1b[0m quick brown fox",
|
||||
},
|
||||
Match{
|
||||
Line: 3,
|
||||
Text: "\x1b[1;31mthe\x1b[0m lazy dog.",
|
||||
},
|
||||
}
|
||||
|
||||
// assert that the correct matches were returned
|
||||
if !reflect.DeepEqual(matches, want) {
|
||||
t.Errorf(
|
||||
"failed to return expected matches: want:\n%s, got:\n%s",
|
||||
spew.Sdump(want),
|
||||
spew.Sdump(matches),
|
||||
want,
|
||||
matches,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -33,7 +33,7 @@ func Load(cheatpaths []cp.Cheatpath) ([]map[string]sheet.Sheet, error) {
|
||||
|
||||
// fail if an error occurred while walking the directory
|
||||
if err != nil {
|
||||
return fmt.Errorf("error walking path: %v", err)
|
||||
return fmt.Errorf("failed to walk path: %v", err)
|
||||
}
|
||||
|
||||
// don't register directories as cheatsheets
|
||||
@ -61,7 +61,12 @@ func Load(cheatpaths []cp.Cheatpath) ([]map[string]sheet.Sheet, error) {
|
||||
// parse the cheatsheet file into a `sheet` struct
|
||||
s, err := sheet.New(title, path, cheatpath.Tags, cheatpath.ReadOnly)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create sheet: %v", err)
|
||||
return fmt.Errorf(
|
||||
"failed to load sheet: %s, path: %s, err: %v",
|
||||
title,
|
||||
path,
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
// register the cheatsheet on its cheatpath, keyed by its title
|
||||
|
@ -9,7 +9,7 @@ import (
|
||||
"github.com/cheat/cheat/internal/sheet"
|
||||
)
|
||||
|
||||
// TestTags asserts that cheetsheet tags are properly returned
|
||||
// TestTags asserts that cheatsheet tags are properly returned
|
||||
func TestTags(t *testing.T) {
|
||||
|
||||
// mock cheatsheets available on multiple cheatpaths
|
||||
|
@ -1,9 +0,0 @@
|
||||
function _cheat_autocomplete {
|
||||
sheets=$(cheat -l | sed -n '2,$p'|cut -d' ' -f1)
|
||||
COMPREPLY=()
|
||||
if [ $COMP_CWORD = 1 ]; then
|
||||
COMPREPLY=(`compgen -W "$sheets" -- $2`)
|
||||
fi
|
||||
}
|
||||
|
||||
complete -F _cheat_autocomplete cheat
|
74
scripts/cheat.bash
Executable file
74
scripts/cheat.bash
Executable file
@ -0,0 +1,74 @@
|
||||
# cheat(1) completion -*- shell-script -*-
|
||||
|
||||
# generate cheatsheet completions, optionally using `fzf`
|
||||
_cheat_complete_cheatsheets()
|
||||
{
|
||||
if [[ "$CHEAT_USE_FZF" = true ]]; then
|
||||
FZF_COMPLETION_TRIGGER='' _fzf_complete "--no-multi" "$@" < <(
|
||||
cheat -l | tail -n +2 | cut -d' ' -f1
|
||||
)
|
||||
else
|
||||
COMPREPLY=( $(compgen -W "$(cheat -l | tail -n +2 | cut -d' ' -f1)" -- "$cur") )
|
||||
fi
|
||||
}
|
||||
|
||||
# generate tag completions, optionally using `fzf`
|
||||
_cheat_complete_tags()
|
||||
{
|
||||
if [ "$CHEAT_USE_FZF" = true ]; then
|
||||
FZF_COMPLETION_TRIGGER='' _fzf_complete "--no-multi" "$@" < <(cheat -T)
|
||||
else
|
||||
COMPREPLY=( $(compgen -W "$(cheat -T)" -- "$cur") )
|
||||
fi
|
||||
}
|
||||
|
||||
# implement the `cheat` autocompletions
|
||||
_cheat()
|
||||
{
|
||||
local cur prev words cword split
|
||||
_init_completion -s || return
|
||||
|
||||
# complete options that are currently being typed: `--col` => `--colorize`
|
||||
if [[ $cur == -* ]]; then
|
||||
COMPREPLY=( $(compgen -W '$(_parse_help "$1" | sed "s/=//g")' -- "$cur") )
|
||||
[[ $COMPREPLY == *= ]] && compopt -o nospace
|
||||
return
|
||||
fi
|
||||
|
||||
# implement completions
|
||||
case $prev in
|
||||
--colorize|-c|\
|
||||
--directories|-d|\
|
||||
--init|\
|
||||
--regex|-r|\
|
||||
--search|-s|\
|
||||
--tags|-T|\
|
||||
--version|-v)
|
||||
# noop the above, which should implement no completions
|
||||
;;
|
||||
--edit|-e)
|
||||
_cheat_complete_cheatsheets
|
||||
;;
|
||||
--list|-l)
|
||||
_cheat_complete_cheatsheets
|
||||
;;
|
||||
--path|-p)
|
||||
COMPREPLY=( $(compgen -W "$(cheat -d | cut -d':' -f1)" -- "$cur") )
|
||||
;;
|
||||
--rm)
|
||||
_cheat_complete_cheatsheets
|
||||
;;
|
||||
--tag|-t)
|
||||
_cheat_complete_tags
|
||||
;;
|
||||
*)
|
||||
_cheat_complete_cheatsheets
|
||||
;;
|
||||
esac
|
||||
|
||||
$split && return
|
||||
|
||||
} &&
|
||||
complete -F _cheat cheat
|
||||
|
||||
# ex: filetype=sh
|
66
scripts/cheat.zsh
Executable file
66
scripts/cheat.zsh
Executable file
@ -0,0 +1,66 @@
|
||||
#compdef cheat
|
||||
|
||||
local cheats taglist pathlist
|
||||
|
||||
_cheat_complete_personal_cheatsheets()
|
||||
{
|
||||
cheats=("${(f)$(cheat -l -t personal | tail -n +2 | cut -d' ' -f1)}")
|
||||
_describe -t cheats 'cheats' cheats
|
||||
}
|
||||
|
||||
_cheat_complete_full_cheatsheets()
|
||||
{
|
||||
cheats=("${(f)$(cheat -l | tail -n +2 | cut -d' ' -f1)}")
|
||||
_describe -t cheats 'cheats' cheats
|
||||
}
|
||||
|
||||
_cheat_complete_tags()
|
||||
{
|
||||
taglist=("${(f)$(cheat -T)}")
|
||||
_describe -t taglist 'taglist' taglist
|
||||
}
|
||||
|
||||
_cheat_complete_paths()
|
||||
{
|
||||
pathlist=("${(f)$(cheat -d | cut -d':' -f1)}")
|
||||
_describe -t pathlist 'pathlist' pathlist
|
||||
}
|
||||
|
||||
_cheat() {
|
||||
|
||||
_arguments -C \
|
||||
'(--init)--init[Write a default config file to stdout]: :->none' \
|
||||
'(-c --colorize)'{-c,--colorize}'[Colorize output]: :->none' \
|
||||
'(-d --directories)'{-d,--directories}'[List cheatsheet directories]: :->none' \
|
||||
'(-e --edit)'{-e,--edit}'[Edit <sheet>]: :->personal' \
|
||||
'(-l --list)'{-l,--list}'[List cheatsheets]: :->full' \
|
||||
'(-p --path)'{-p,--path}'[Return only sheets found on path <name>]: :->pathlist' \
|
||||
'(-r --regex)'{-r,--regex}'[Treat search <phrase> as a regex]: :->none' \
|
||||
'(-s --search)'{-s,--search}'[Search cheatsheets for <phrase>]: :->none' \
|
||||
'(-t --tag)'{-t,--tag}'[Return only sheets matching <tag>]: :->taglist' \
|
||||
'(-T --tags)'{-T,--tags}'[List all tags in use]: :->none' \
|
||||
'(-v --version)'{-v,--version}'[Print the version number]: :->none' \
|
||||
'(--rm)--rm[Remove (delete) <sheet>]: :->personal' \
|
||||
'(-)*: :->full'
|
||||
|
||||
case $state in
|
||||
(none)
|
||||
;;
|
||||
(full)
|
||||
_cheat_complete_full_cheatsheets
|
||||
;;
|
||||
(personal)
|
||||
_cheat_complete_personal_cheatsheets
|
||||
;;
|
||||
(taglist)
|
||||
_cheat_complete_tags
|
||||
;;
|
||||
(pathlist)
|
||||
_cheat_complete_paths
|
||||
;;
|
||||
(*)
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_cheat
|
@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This function enables you to choose a cheatsheet to view by selecting output
|
||||
# from `cheat -l`. `source` it in your shell to enable it. (Consider renaming
|
||||
# or aliasing it to something convenient.)
|
||||
|
||||
# Arguments passed to this function (like --color) will be passed to the second
|
||||
# invokation of `cheat`.
|
||||
function cheat-fzf {
|
||||
eval `cheat -l | tail -n +2 | fzf | awk -v vars="$*" '{ print "cheat " $1 " -t " $3, vars }'`
|
||||
}
|
5
vendor/github.com/alecthomas/chroma/.golangci.yml
generated
vendored
5
vendor/github.com/alecthomas/chroma/.golangci.yml
generated
vendored
@ -20,6 +20,11 @@ linters:
|
||||
- wsl
|
||||
- gomnd
|
||||
- gocognit
|
||||
- goerr113
|
||||
- nolintlint
|
||||
- testpackage
|
||||
- godot
|
||||
- nestif
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
|
30
vendor/github.com/alecthomas/chroma/.goreleaser.yml
generated
vendored
30
vendor/github.com/alecthomas/chroma/.goreleaser.yml
generated
vendored
@ -3,28 +3,30 @@ release:
|
||||
github:
|
||||
owner: alecthomas
|
||||
name: chroma
|
||||
brew:
|
||||
install: bin.install "chroma"
|
||||
brews:
|
||||
-
|
||||
install: bin.install "chroma"
|
||||
builds:
|
||||
- goos:
|
||||
- linux
|
||||
- darwin
|
||||
- windows
|
||||
- linux
|
||||
- darwin
|
||||
- windows
|
||||
goarch:
|
||||
- amd64
|
||||
- "386"
|
||||
- amd64
|
||||
- "386"
|
||||
goarm:
|
||||
- "6"
|
||||
- "6"
|
||||
main: ./cmd/chroma/main.go
|
||||
ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
|
||||
binary: chroma
|
||||
archive:
|
||||
format: tar.gz
|
||||
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
|
||||
archives:
|
||||
-
|
||||
format: tar.gz
|
||||
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
|
||||
.Arm }}{{ end }}'
|
||||
files:
|
||||
- COPYING
|
||||
- README*
|
||||
files:
|
||||
- COPYING
|
||||
- README*
|
||||
snapshot:
|
||||
name_template: SNAPSHOT-{{ .Commit }}
|
||||
checksum:
|
||||
|
2
vendor/github.com/alecthomas/chroma/.travis.yml
generated
vendored
2
vendor/github.com/alecthomas/chroma/.travis.yml
generated
vendored
@ -4,7 +4,7 @@ go:
|
||||
- "1.13.x"
|
||||
script:
|
||||
- go test -v ./...
|
||||
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.22.2
|
||||
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
|
||||
- ./bin/golangci-lint run
|
||||
- git clean -fdx .
|
||||
after_success:
|
||||
|
14
vendor/github.com/alecthomas/chroma/formatters/html/html.go
generated
vendored
14
vendor/github.com/alecthomas/chroma/formatters/html/html.go
generated
vendored
@ -22,6 +22,9 @@ func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix =
|
||||
// WithClasses emits HTML using CSS classes, rather than inline styles.
|
||||
func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } }
|
||||
|
||||
// WithAllClasses disables an optimisation that omits redundant CSS classes.
|
||||
func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } }
|
||||
|
||||
// TabWidth sets the number of characters for a tab. Defaults to 8.
|
||||
func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } }
|
||||
|
||||
@ -141,6 +144,7 @@ type Formatter struct {
|
||||
standalone bool
|
||||
prefix string
|
||||
Classes bool // Exported field to detect when classes are being used
|
||||
allClasses bool
|
||||
preWrapper PreWrapper
|
||||
tabWidth int
|
||||
lineNumbers bool
|
||||
@ -188,7 +192,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||
wrapInTable := f.lineNumbers && f.lineNumbersInTable
|
||||
|
||||
lines := chroma.SplitTokensIntoLines(tokens)
|
||||
lineDigits := len(fmt.Sprintf("%d", len(lines)))
|
||||
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
|
||||
highlightIndex := 0
|
||||
|
||||
if wrapInTable {
|
||||
@ -362,8 +366,12 @@ func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
|
||||
if tt == chroma.Background {
|
||||
continue
|
||||
}
|
||||
class := f.class(tt)
|
||||
if class == "" {
|
||||
continue
|
||||
}
|
||||
styles := css[tt]
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, f.class(tt), styles); err != nil {
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -379,7 +387,7 @@ func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string
|
||||
if t != chroma.Background {
|
||||
entry = entry.Sub(bg)
|
||||
}
|
||||
if entry.IsZero() {
|
||||
if !f.allClasses && entry.IsZero() {
|
||||
continue
|
||||
}
|
||||
classes[t] = StyleEntryToCSS(entry)
|
||||
|
17
vendor/github.com/alecthomas/chroma/go.mod
generated
vendored
17
vendor/github.com/alecthomas/chroma/go.mod
generated
vendored
@ -1,19 +1,18 @@
|
||||
module github.com/alecthomas/chroma
|
||||
|
||||
go 1.13
|
||||
|
||||
require (
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 // indirect
|
||||
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae
|
||||
github.com/alecthomas/kong v0.2.4
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
|
||||
github.com/dlclark/regexp2 v1.1.6
|
||||
github.com/mattn/go-colorable v0.0.9
|
||||
github.com/mattn/go-isatty v0.0.4
|
||||
github.com/dlclark/regexp2 v1.2.0
|
||||
github.com/mattn/go-colorable v0.1.6
|
||||
github.com/mattn/go-isatty v0.0.12
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/sergi/go-diff v1.0.0 // indirect
|
||||
github.com/stretchr/testify v1.3.0 // indirect
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 // indirect
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
|
||||
)
|
||||
|
||||
replace github.com/GeertJohan/go.rice => github.com/alecthomas/go.rice v1.0.1-0.20190719113735-961b99d742e7
|
||||
|
||||
go 1.13
|
||||
|
25
vendor/github.com/alecthomas/chroma/go.sum
generated
vendored
25
vendor/github.com/alecthomas/chroma/go.sum
generated
vendored
@ -2,8 +2,8 @@ github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae h1:C4Q9m+oXOxcSWwYk9XzzafY2xAVAaeubZbUHJkw3PlY=
|
||||
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI=
|
||||
github.com/alecthomas/kong v0.2.4 h1:Y0ZBCHAvHhTHw7FFJ2FzCAAG4pkbTgA45nc7BpMhDNk=
|
||||
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
||||
@ -11,15 +11,16 @@ github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg=
|
||||
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
|
||||
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
@ -29,5 +30,7 @@ github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 h1:YAFjXN64LMvktoUZH9zgY4lGc/msGN7HQfoSuKCgaDU=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
|
7
vendor/github.com/alecthomas/chroma/lexer.go
generated
vendored
7
vendor/github.com/alecthomas/chroma/lexer.go
generated
vendored
@ -6,7 +6,8 @@ import (
|
||||
|
||||
var (
|
||||
defaultOptions = &TokeniseOptions{
|
||||
State: "root",
|
||||
State: "root",
|
||||
EnsureLF: true,
|
||||
}
|
||||
)
|
||||
|
||||
@ -80,6 +81,10 @@ type TokeniseOptions struct {
|
||||
State string
|
||||
// Nested tokenisation.
|
||||
Nested bool
|
||||
|
||||
// If true, all EOLs are converted into LF
|
||||
// by replacing CRLF and CR
|
||||
EnsureLF bool
|
||||
}
|
||||
|
||||
// A Lexer for tokenising source code.
|
||||
|
2
vendor/github.com/alecthomas/chroma/lexers/b/bash.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/b/bash.go
generated
vendored
@ -36,7 +36,7 @@ var Bash = internal.Register(MustNewLexer(
|
||||
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
|
||||
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
|
||||
{`\A#!.+\n`, CommentPreproc, nil},
|
||||
{`#.*\S`, CommentSingle, nil},
|
||||
{`#.*(\S|$)`, CommentSingle, nil},
|
||||
{`\\[\w\W]`, LiteralStringEscape, nil},
|
||||
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
|
||||
{`[\[\]{}()=]`, Operator, nil},
|
||||
|
206
vendor/github.com/alecthomas/chroma/lexers/c/caddyfile.go
generated
vendored
Normal file
206
vendor/github.com/alecthomas/chroma/lexers/c/caddyfile.go
generated
vendored
Normal file
@ -0,0 +1,206 @@
|
||||
package c
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// caddyfileCommon are the rules common to both of the lexer variants
|
||||
var caddyfileCommon = Rules{
|
||||
"site_block_common": {
|
||||
// Import keyword
|
||||
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
|
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||
// Matcher token stub for docs
|
||||
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
|
||||
// These cannot have matchers but may have things that look like
|
||||
// matchers in their arguments, so we just parse as a subdirective.
|
||||
{`try_files`, Keyword, Push("subdirective")},
|
||||
// These are special, they can nest more directives
|
||||
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
|
||||
// Any other directive
|
||||
{`[^\s#]+`, Keyword, Push("directive")},
|
||||
Include("base"),
|
||||
},
|
||||
"matcher": {
|
||||
{`\{`, Punctuation, Push("block")},
|
||||
// Not can be one-liner
|
||||
{`not`, Keyword, Push("deep_not_matcher")},
|
||||
// Any other same-line matcher
|
||||
{`[^\s#]+`, Keyword, Push("arguments")},
|
||||
// Terminators
|
||||
{`\n`, Text, Pop(1)},
|
||||
{`\}`, Punctuation, Pop(1)},
|
||||
Include("base"),
|
||||
},
|
||||
"block": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
// Not can be one-liner
|
||||
{`not`, Keyword, Push("not_matcher")},
|
||||
// Any other subdirective
|
||||
{`[^\s#]+`, Keyword, Push("subdirective")},
|
||||
Include("base"),
|
||||
},
|
||||
"nested_block": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||
// Something that starts with literally < is probably a docs stub
|
||||
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
|
||||
// Any other directive
|
||||
{`[^\s#]+`, Keyword, Push("nested_directive")},
|
||||
Include("base"),
|
||||
},
|
||||
"not_matcher": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
{`[^\s#]+`, Keyword, Push("arguments")},
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
"deep_not_matcher": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
"directive": {
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
Include("matcher_token"),
|
||||
Include("comments_pop_1"),
|
||||
{`\n`, Text, Pop(1)},
|
||||
Include("base"),
|
||||
},
|
||||
"nested_directive": {
|
||||
{`\{(?=\s)`, Punctuation, Push("nested_block")},
|
||||
Include("matcher_token"),
|
||||
Include("comments_pop_1"),
|
||||
{`\n`, Text, Pop(1)},
|
||||
Include("base"),
|
||||
},
|
||||
"subdirective": {
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
Include("comments_pop_1"),
|
||||
{`\n`, Text, Pop(1)},
|
||||
Include("base"),
|
||||
},
|
||||
"arguments": {
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
Include("comments_pop_2"),
|
||||
{`\\\n`, Text, nil}, // Skip escaped newlines
|
||||
{`\n`, Text, Pop(2)},
|
||||
Include("base"),
|
||||
},
|
||||
"deep_subdirective": {
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
Include("comments_pop_3"),
|
||||
{`\n`, Text, Pop(3)},
|
||||
Include("base"),
|
||||
},
|
||||
"matcher_token": {
|
||||
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
|
||||
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
|
||||
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
|
||||
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
|
||||
},
|
||||
"comments": {
|
||||
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
|
||||
},
|
||||
"comments_pop_1": {
|
||||
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
|
||||
},
|
||||
"comments_pop_2": {
|
||||
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
|
||||
},
|
||||
"comments_pop_3": {
|
||||
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
|
||||
},
|
||||
"base": {
|
||||
Include("comments"),
|
||||
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
|
||||
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
|
||||
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
|
||||
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
|
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
|
||||
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
|
||||
{`\]|\|`, Punctuation, nil},
|
||||
{`[^\s#{}$\]]+`, LiteralString, nil},
|
||||
{`/[^\s#]*`, Name, nil},
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
}
|
||||
|
||||
// Caddyfile lexer.
|
||||
var Caddyfile = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "Caddyfile",
|
||||
Aliases: []string{"caddyfile", "caddy"},
|
||||
Filenames: []string{"Caddyfile*"},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
Include("comments"),
|
||||
// Global options block
|
||||
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
|
||||
// Snippets
|
||||
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
|
||||
// Site label
|
||||
{`[^#{(\s,]+`, GenericHeading, Push("label")},
|
||||
// Site label with placeholder
|
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
"globals": {
|
||||
{`\}`, Punctuation, Pop(1)},
|
||||
{`[^\s#]+`, Keyword, Push("directive")},
|
||||
Include("base"),
|
||||
},
|
||||
"snippet": {
|
||||
{`\}`, Punctuation, Pop(1)},
|
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||
// Any directive
|
||||
{`[^\s#]+`, Keyword, Push("directive")},
|
||||
Include("base"),
|
||||
},
|
||||
"label": {
|
||||
// Allow multiple labels, comma separated, newlines after
|
||||
// a comma means another label is coming
|
||||
{`,\s*\n?`, Text, nil},
|
||||
{` `, Text, nil},
|
||||
// Site label with placeholder
|
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
|
||||
// Site label
|
||||
{`[^#{(\s,]+`, GenericHeading, nil},
|
||||
// Comment after non-block label (hack because comments end in \n)
|
||||
{`#.*\n`, CommentSingle, Push("site_block")},
|
||||
// Note: if \n, we'll never pop out of the site_block, it's valid
|
||||
{`\{(?=\s)|\n`, Punctuation, Push("site_block")},
|
||||
},
|
||||
"site_block": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
Include("site_block_common"),
|
||||
},
|
||||
}.Merge(caddyfileCommon),
|
||||
))
|
||||
|
||||
// Caddyfile directive-only lexer.
|
||||
var CaddyfileDirectives = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "Caddyfile Directives",
|
||||
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
|
||||
Filenames: []string{},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
Rules{
|
||||
// Same as "site_block" in Caddyfile
|
||||
"root": {
|
||||
Include("site_block_common"),
|
||||
},
|
||||
}.Merge(caddyfileCommon),
|
||||
))
|
137
vendor/github.com/alecthomas/chroma/lexers/circular/php.go
generated
vendored
137
vendor/github.com/alecthomas/chroma/lexers/circular/php.go
generated
vendored
@ -1,15 +1,12 @@
|
||||
package circular
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/h"
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// PHP lexer.
|
||||
var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||
// PHP lexer for pure PHP code (not embedded in HTML).
|
||||
var PHP = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "PHP",
|
||||
Aliases: []string{"php", "php3", "php4", "php5"},
|
||||
@ -19,73 +16,65 @@ var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||
CaseInsensitive: true,
|
||||
EnsureNL: true,
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`<\?(php)?`, CommentPreproc, Push("php")},
|
||||
{`[^<]+`, Other, nil},
|
||||
{`<`, Other, nil},
|
||||
},
|
||||
"php": {
|
||||
{`\?>`, CommentPreproc, Pop(1)},
|
||||
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
|
||||
{`\s+`, Text, nil},
|
||||
{`#.*?\n`, CommentSingle, nil},
|
||||
{`//.*?\n`, CommentSingle, nil},
|
||||
{`/\*\*/`, CommentMultiline, nil},
|
||||
{`/\*\*.*?\*/`, LiteralStringDoc, nil},
|
||||
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
|
||||
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
|
||||
{`\?`, Operator, nil},
|
||||
{`[\[\]{}();,]+`, Punctuation, nil},
|
||||
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
|
||||
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
|
||||
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
|
||||
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
|
||||
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
|
||||
{`(true|false|null)\b`, KeywordConstant, nil},
|
||||
Include("magicconstants"),
|
||||
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
|
||||
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
|
||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
|
||||
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
|
||||
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
|
||||
{`0[0-7]+`, LiteralNumberOct, nil},
|
||||
{`0x[a-f0-9]+`, LiteralNumberHex, nil},
|
||||
{`\d+`, LiteralNumberInteger, nil},
|
||||
{`0b[01]+`, LiteralNumberBin, nil},
|
||||
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
|
||||
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
|
||||
{`"`, LiteralStringDouble, Push("string")},
|
||||
},
|
||||
"magicfuncs": {
|
||||
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
|
||||
},
|
||||
"magicconstants": {
|
||||
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
|
||||
},
|
||||
"classname": {
|
||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
|
||||
},
|
||||
"functionname": {
|
||||
Include("magicfuncs"),
|
||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
"string": {
|
||||
{`"`, LiteralStringDouble, Pop(1)},
|
||||
{`[^{$"\\]+`, LiteralStringDouble, nil},
|
||||
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
|
||||
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
|
||||
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
||||
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
||||
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
|
||||
{`[${\\]`, LiteralStringDouble, nil},
|
||||
},
|
||||
phpCommonRules.Rename("php", "root"),
|
||||
))
|
||||
|
||||
var phpCommonRules = Rules{
|
||||
"php": {
|
||||
{`\?>`, CommentPreproc, Pop(1)},
|
||||
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
|
||||
{`\s+`, Text, nil},
|
||||
{`#.*?\n`, CommentSingle, nil},
|
||||
{`//.*?\n`, CommentSingle, nil},
|
||||
{`/\*\*/`, CommentMultiline, nil},
|
||||
{`/\*\*.*?\*/`, LiteralStringDoc, nil},
|
||||
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
|
||||
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
|
||||
{`\?`, Operator, nil},
|
||||
{`[\[\]{}();,]+`, Punctuation, nil},
|
||||
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
|
||||
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
|
||||
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
|
||||
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
|
||||
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
|
||||
{`(true|false|null)\b`, KeywordConstant, nil},
|
||||
Include("magicconstants"),
|
||||
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
|
||||
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
|
||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
|
||||
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
|
||||
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
|
||||
{`0[0-7]+`, LiteralNumberOct, nil},
|
||||
{`0x[a-f0-9]+`, LiteralNumberHex, nil},
|
||||
{`\d+`, LiteralNumberInteger, nil},
|
||||
{`0b[01]+`, LiteralNumberBin, nil},
|
||||
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
|
||||
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
|
||||
{`"`, LiteralStringDouble, Push("string")},
|
||||
},
|
||||
).SetAnalyser(func(text string) float32 {
|
||||
if strings.Contains(text, "<?php") {
|
||||
return 0.5
|
||||
}
|
||||
return 0.0
|
||||
})))
|
||||
"magicfuncs": {
|
||||
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
|
||||
},
|
||||
"magicconstants": {
|
||||
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
|
||||
},
|
||||
"classname": {
|
||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
|
||||
},
|
||||
"functionname": {
|
||||
Include("magicfuncs"),
|
||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
"string": {
|
||||
{`"`, LiteralStringDouble, Pop(1)},
|
||||
{`[^{$"\\]+`, LiteralStringDouble, nil},
|
||||
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
|
||||
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
|
||||
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
||||
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
||||
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
|
||||
{`[${\\]`, LiteralStringDouble, nil},
|
||||
},
|
||||
}
|
||||
|
34
vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go
generated
vendored
Normal file
34
vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
package circular
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/h"
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// PHTML lexer is PHP in HTML.
|
||||
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||
&Config{
|
||||
Name: "PHTML",
|
||||
Aliases: []string{"phtml"},
|
||||
Filenames: []string{"*.phtml"},
|
||||
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5"},
|
||||
DotAll: true,
|
||||
CaseInsensitive: true,
|
||||
EnsureNL: true,
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`<\?(php)?`, CommentPreproc, Push("php")},
|
||||
{`[^<]+`, Other, nil},
|
||||
{`<`, Other, nil},
|
||||
},
|
||||
}.Merge(phpCommonRules),
|
||||
).SetAnalyser(func(text string) float32 {
|
||||
if strings.Contains(text, "<?php") {
|
||||
return 0.5
|
||||
}
|
||||
return 0.0
|
||||
})))
|
118
vendor/github.com/alecthomas/chroma/lexers/g/gherkin.go
generated
vendored
Normal file
118
vendor/github.com/alecthomas/chroma/lexers/g/gherkin.go
generated
vendored
Normal file
@ -0,0 +1,118 @@
|
||||
package g
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
var stepKeywords = `^(\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )`
|
||||
|
||||
var featureKeywords = `^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$`
|
||||
|
||||
var featureElementKeywords = `^(\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$`
|
||||
|
||||
var examplesKeywords = `^(\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$`
|
||||
|
||||
// Gherkin lexer.
|
||||
var Gherkin = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "Gherkin",
|
||||
Aliases: []string{"cucumber", "Cucumber", "gherkin", "Gherkin"},
|
||||
Filenames: []string{"*.feature", "*.FEATURE"},
|
||||
MimeTypes: []string{"text/x-gherkin"},
|
||||
},
|
||||
Rules{
|
||||
"comments": {
|
||||
{`\s*#.*$`, Comment, nil},
|
||||
},
|
||||
"featureElements": {
|
||||
{stepKeywords, Keyword, Push("stepContentStack")},
|
||||
Include("comments"),
|
||||
{`(\s|.)`, NameFunction, nil},
|
||||
},
|
||||
"featureElementsOnStack": {
|
||||
{stepKeywords, Keyword, Pop(2)},
|
||||
Include("comments"),
|
||||
{`(\s|.)`, NameFunction, nil},
|
||||
},
|
||||
"examplesTable": {
|
||||
{`\s+\|`, Keyword, Push("examplesTableHeader")},
|
||||
Include("comments"),
|
||||
{`(\s|.)`, NameFunction, nil},
|
||||
},
|
||||
"examplesTableHeader": {
|
||||
{`\s+\|\s*$`, Keyword, Pop(2)},
|
||||
Include("comments"),
|
||||
{`\\\|`, NameVariable, nil},
|
||||
{`\s*\|`, Keyword, nil},
|
||||
{`[^|]`, NameVariable, nil},
|
||||
},
|
||||
"scenarioSectionsOnStack": {
|
||||
{featureElementKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("featureElementsOnStack")},
|
||||
},
|
||||
"narrative": {
|
||||
Include("scenarioSectionsOnStack"),
|
||||
{`(\s|.)`, NameFunction, nil},
|
||||
},
|
||||
"tableVars": {
|
||||
{`(<[^>]+>)`, NameVariable, nil},
|
||||
},
|
||||
"numbers": {
|
||||
{`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralString, nil},
|
||||
},
|
||||
"string": {
|
||||
Include("tableVars"),
|
||||
{`(\s|.)`, LiteralString, nil},
|
||||
},
|
||||
"pyString": {
|
||||
{`"""`, Keyword, Pop(1)},
|
||||
Include("string"),
|
||||
},
|
||||
"stepContentRoot": {
|
||||
{`$`, Keyword, Pop(1)},
|
||||
Include("stepContent"),
|
||||
},
|
||||
"stepContentStack": {
|
||||
{`$`, Keyword, Pop(2)},
|
||||
Include("stepContent"),
|
||||
},
|
||||
"stepContent": {
|
||||
{`"`, NameFunction, Push("doubleString")},
|
||||
Include("tableVars"),
|
||||
Include("numbers"),
|
||||
Include("comments"),
|
||||
{`(\s|.)`, NameFunction, nil},
|
||||
},
|
||||
"tableContent": {
|
||||
{`\s+\|\s*$`, Keyword, Pop(1)},
|
||||
Include("comments"),
|
||||
{`\\\|`, LiteralString, nil},
|
||||
{`\s*\|`, Keyword, nil},
|
||||
{`"`, LiteralString, Push("doubleStringTable")},
|
||||
Include("string"),
|
||||
},
|
||||
"doubleString": {
|
||||
{`"`, NameFunction, Pop(1)},
|
||||
Include("string"),
|
||||
},
|
||||
"doubleStringTable": {
|
||||
{`"`, LiteralString, Pop(1)},
|
||||
Include("string"),
|
||||
},
|
||||
"root": {
|
||||
{`\n`, NameFunction, nil},
|
||||
Include("comments"),
|
||||
{`"""`, Keyword, Push("pyString")},
|
||||
{`\s+\|`, Keyword, Push("tableContent")},
|
||||
{`"`, NameFunction, Push("doubleString")},
|
||||
Include("tableVars"),
|
||||
Include("numbers"),
|
||||
{`(\s*)(@[^@\r\n\t ]+)`, ByGroups(NameFunction, NameTag), nil},
|
||||
{stepKeywords, ByGroups(NameFunction, Keyword), Push("stepContentRoot")},
|
||||
{featureKeywords, ByGroups(Keyword, Keyword, NameFunction), Push("narrative")},
|
||||
{featureElementKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("featureElements")},
|
||||
{examplesKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("examplesTable")},
|
||||
{`(\s|.)`, NameFunction, nil},
|
||||
},
|
||||
},
|
||||
))
|
1
vendor/github.com/alecthomas/chroma/lexers/g/go.go
generated
vendored
1
vendor/github.com/alecthomas/chroma/lexers/g/go.go
generated
vendored
@ -15,6 +15,7 @@ var Go = internal.Register(MustNewLexer(
|
||||
Aliases: []string{"go", "golang"},
|
||||
Filenames: []string{"*.go"},
|
||||
MimeTypes: []string{"text/x-gosrc"},
|
||||
EnsureNL: true,
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
|
4
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
4
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
@ -19,8 +19,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
||||
{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
||||
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
||||
{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
||||
},
|
||||
"headers": {
|
||||
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
|
||||
|
54
vendor/github.com/alecthomas/chroma/lexers/hlb.go
generated
vendored
Normal file
54
vendor/github.com/alecthomas/chroma/lexers/hlb.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
package lexers
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// HLB lexer.
|
||||
var HLB = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "HLB",
|
||||
Aliases: []string{"hlb"},
|
||||
Filenames: []string{"*.hlb"},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`(#.*)`, ByGroups(CommentSingle), nil},
|
||||
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
|
||||
{`((\b(true|false)\b))`, ByGroups(NameBuiltin), nil},
|
||||
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil},
|
||||
{`(\b[a-zA-Z_][a-zA-Z0-9]*\b)(\()`, ByGroups(NameFunction, Punctuation), Push("params")},
|
||||
{`(\{)`, ByGroups(Punctuation), Push("block")},
|
||||
{`(\n|\r|\r\n)`, Text, nil},
|
||||
{`.`, Text, nil},
|
||||
},
|
||||
"string": {
|
||||
{`"`, LiteralString, Pop(1)},
|
||||
{`\\"`, LiteralString, nil},
|
||||
{`[^\\"]+`, LiteralString, nil},
|
||||
},
|
||||
"block": {
|
||||
{`(\})`, ByGroups(Punctuation), Pop(1)},
|
||||
{`(#.*)`, ByGroups(CommentSingle), nil},
|
||||
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
|
||||
{`((\b(true|false)\b))`, ByGroups(KeywordConstant), nil},
|
||||
{`"`, LiteralString, Push("string")},
|
||||
{`(with)`, ByGroups(KeywordReserved), nil},
|
||||
{`(as)([\t ]+)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(KeywordReserved, Text, NameFunction), nil},
|
||||
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)([\t ]+)(\{)`, ByGroups(KeywordType, Text, Punctuation), Push("block")},
|
||||
{`(?!\b(?:scratch|image|resolve|http|checksum|chmod|filename|git|keepGitDir|local|includePatterns|excludePatterns|followPaths|generate|frontendInput|shell|run|readonlyRootfs|env|dir|user|network|security|host|ssh|secret|mount|target|localPath|uid|gid|mode|readonly|tmpfs|sourcePath|cache|mkdir|createParents|chown|createdTime|mkfile|rm|allowNotFound|allowWildcards|copy|followSymlinks|contentsOnly|unpack|createDestPath)\b)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil},
|
||||
{`(\n|\r|\r\n)`, Text, nil},
|
||||
{`.`, Text, nil},
|
||||
},
|
||||
"params": {
|
||||
{`(\))`, ByGroups(Punctuation), Pop(1)},
|
||||
{`(variadic)`, ByGroups(Keyword), nil},
|
||||
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil},
|
||||
{`(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil},
|
||||
{`(\n|\r|\r\n)`, Text, nil},
|
||||
{`.`, Text, nil},
|
||||
},
|
||||
},
|
||||
))
|
2
vendor/github.com/alecthomas/chroma/lexers/i/ini.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/i/ini.go
generated
vendored
@ -10,7 +10,7 @@ var Ini = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "INI",
|
||||
Aliases: []string{"ini", "cfg", "dosini"},
|
||||
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig"},
|
||||
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"},
|
||||
MimeTypes: []string{"text/x-ini", "text/inf"},
|
||||
},
|
||||
Rules{
|
||||
|
11
vendor/github.com/alecthomas/chroma/lexers/internal/api.go
generated
vendored
11
vendor/github.com/alecthomas/chroma/lexers/internal/api.go
generated
vendored
@ -37,19 +37,20 @@ func Names(withAliases bool) []string {
|
||||
|
||||
// Get a Lexer by name, alias or file extension.
|
||||
func Get(name string) chroma.Lexer {
|
||||
candidates := chroma.PrioritisedLexers{}
|
||||
if lexer := Registry.byName[name]; lexer != nil {
|
||||
candidates = append(candidates, lexer)
|
||||
return lexer
|
||||
}
|
||||
if lexer := Registry.byAlias[name]; lexer != nil {
|
||||
candidates = append(candidates, lexer)
|
||||
return lexer
|
||||
}
|
||||
if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil {
|
||||
candidates = append(candidates, lexer)
|
||||
return lexer
|
||||
}
|
||||
if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil {
|
||||
candidates = append(candidates, lexer)
|
||||
return lexer
|
||||
}
|
||||
|
||||
candidates := chroma.PrioritisedLexers{}
|
||||
// Try file extension.
|
||||
if lexer := Match("filename." + name); lexer != nil {
|
||||
candidates = append(candidates, lexer)
|
||||
|
5
vendor/github.com/alecthomas/chroma/lexers/j/jsx.go
generated
vendored
5
vendor/github.com/alecthomas/chroma/lexers/j/jsx.go
generated
vendored
@ -69,8 +69,9 @@ var JSX = internal.Register(MustNewLexer(
|
||||
Include("root"),
|
||||
},
|
||||
"jsx": {
|
||||
{`(<)([\w]+)`, ByGroups(Punctuation, NameTag), Push("tag")},
|
||||
{`(<)(/)([\w]+)(>)`, ByGroups(Punctuation, Punctuation, NameTag, Punctuation), nil},
|
||||
{`(<)(/?)(>)`, ByGroups(Punctuation, Punctuation, Punctuation), nil},
|
||||
{`(<)([\w\.]+)`, ByGroups(Punctuation, NameTag), Push("tag")},
|
||||
{`(<)(/)([\w\.]+)(>)`, ByGroups(Punctuation, Punctuation, NameTag, Punctuation), nil},
|
||||
},
|
||||
"tag": {
|
||||
{`\s+`, Text, nil},
|
||||
|
1
vendor/github.com/alecthomas/chroma/lexers/lexers.go
generated
vendored
1
vendor/github.com/alecthomas/chroma/lexers/lexers.go
generated
vendored
@ -32,6 +32,7 @@ import (
|
||||
_ "github.com/alecthomas/chroma/lexers/w"
|
||||
_ "github.com/alecthomas/chroma/lexers/x"
|
||||
_ "github.com/alecthomas/chroma/lexers/y"
|
||||
_ "github.com/alecthomas/chroma/lexers/z"
|
||||
)
|
||||
|
||||
// Registry of Lexers.
|
||||
|
9
vendor/github.com/alecthomas/chroma/lexers/m/markdown.go
generated
vendored
9
vendor/github.com/alecthomas/chroma/lexers/m/markdown.go
generated
vendored
@ -2,11 +2,12 @@ package m
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/h"
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// Markdown lexer.
|
||||
var Markdown = internal.Register(MustNewLexer(
|
||||
var Markdown = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||
&Config{
|
||||
Name: "markdown",
|
||||
Aliases: []string{"md", "mkd"},
|
||||
@ -40,8 +41,8 @@ var Markdown = internal.Register(MustNewLexer(
|
||||
{"`[^`]+`", LiteralStringBacktick, nil},
|
||||
{`[@#][\w/:]+`, NameEntity, nil},
|
||||
{`(!?\[)([^]]+)(\])(\()([^)]+)(\))`, ByGroups(Text, NameTag, Text, Text, NameAttribute, Text), nil},
|
||||
{`[^\\\s]+`, Text, nil},
|
||||
{`.|\n`, Text, nil},
|
||||
{`[^\\\s]+`, Other, nil},
|
||||
{`.|\n`, Other, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
)))
|
||||
|
1
vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go
generated
vendored
1
vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go
generated
vendored
@ -11,6 +11,7 @@ var Plaintext = internal.Register(MustNewLexer(
|
||||
Aliases: []string{"text", "plain", "no-highlight"},
|
||||
Filenames: []string{"*.txt"},
|
||||
MimeTypes: []string{"text/plain"},
|
||||
Priority: 0.1,
|
||||
},
|
||||
internal.PlaintextRules,
|
||||
))
|
||||
|
59
vendor/github.com/alecthomas/chroma/lexers/p/pony.go
generated
vendored
Normal file
59
vendor/github.com/alecthomas/chroma/lexers/p/pony.go
generated
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
package p
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// Pony lexer.
|
||||
var Pony = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "Pony",
|
||||
Aliases: []string{"pony"},
|
||||
Filenames: []string{"*.pony"},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`\n`, Text, nil},
|
||||
{`[^\S\n]+`, Text, nil},
|
||||
{`//.*\n`, CommentSingle, nil},
|
||||
{`/\*`, CommentMultiline, Push("nested_comment")},
|
||||
{`"""(?:.|\n)*?"""`, LiteralStringDoc, nil},
|
||||
{`"`, LiteralString, Push("string")},
|
||||
{`\'.*\'`, LiteralStringChar, nil},
|
||||
{`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil},
|
||||
{Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil},
|
||||
{`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil},
|
||||
{Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil},
|
||||
{`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")},
|
||||
{`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")},
|
||||
{Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil},
|
||||
{`_?[A-Z]\w*`, NameClass, nil},
|
||||
{`string\(\)`, NameOther, nil},
|
||||
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil},
|
||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||
{`\d+`, LiteralNumberInteger, nil},
|
||||
{`(true|false)\b`, Keyword, nil},
|
||||
{`_\d*`, Name, nil},
|
||||
{`_?[a-z][\w\'_]*`, Name, nil},
|
||||
},
|
||||
"typename": {
|
||||
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)},
|
||||
},
|
||||
"methodname": {
|
||||
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)},
|
||||
},
|
||||
"nested_comment": {
|
||||
{`[^*/]+`, CommentMultiline, nil},
|
||||
{`/\*`, CommentMultiline, Push()},
|
||||
{`\*/`, CommentMultiline, Pop(1)},
|
||||
{`[*/]`, CommentMultiline, nil},
|
||||
},
|
||||
"string": {
|
||||
{`"`, LiteralString, Pop(1)},
|
||||
{`\\"`, LiteralString, nil},
|
||||
{`[^\\"]+`, LiteralString, nil},
|
||||
},
|
||||
},
|
||||
))
|
67
vendor/github.com/alecthomas/chroma/lexers/r/reasonml.go
generated
vendored
Normal file
67
vendor/github.com/alecthomas/chroma/lexers/r/reasonml.go
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
package r
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// Reasonml lexer.
|
||||
var Reasonml = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "ReasonML",
|
||||
Aliases: []string{"reason", "reasonml"},
|
||||
Filenames: []string{"*.re", "*.rei"},
|
||||
MimeTypes: []string{"text/x-reasonml"},
|
||||
},
|
||||
Rules{
|
||||
"escape-sequence": {
|
||||
{`\\[\\"\'ntbr]`, LiteralStringEscape, nil},
|
||||
{`\\[0-9]{3}`, LiteralStringEscape, nil},
|
||||
{`\\x[0-9a-fA-F]{2}`, LiteralStringEscape, nil},
|
||||
},
|
||||
"root": {
|
||||
{`\s+`, Text, nil},
|
||||
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil},
|
||||
{`\b([A-Z][\w\']*)(?=\s*\.)`, NameNamespace, Push("dotted")},
|
||||
{`\b([A-Z][\w\']*)`, NameClass, nil},
|
||||
{`//.*?\n`, CommentSingle, nil},
|
||||
{`\/\*(?![\/])`, CommentMultiline, Push("comment")},
|
||||
{`\b(as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|false|for|fun|esfun|function|functor|if|in|include|inherit|initializer|lazy|let|switch|module|pub|mutable|new|nonrec|object|of|open|pri|rec|sig|struct|then|to|true|try|type|val|virtual|when|while|with)\b`, Keyword, nil},
|
||||
{"(~|\\}|\\|]|\\||\\|\\||\\{<|\\{|`|_|]|\\[\\||\\[>|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\\.\\.\\.|\\.\\.|\\.|=>|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", OperatorWord, nil},
|
||||
{`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil},
|
||||
{`\b(and|asr|land|lor|lsl|lsr|lxor|mod|or)\b`, OperatorWord, nil},
|
||||
{`\b(unit|int|float|bool|string|char|list|array)\b`, KeywordType, nil},
|
||||
{`[^\W\d][\w']*`, Name, nil},
|
||||
{`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil},
|
||||
{`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil},
|
||||
{`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil},
|
||||
{`0[bB][01][01_]*`, LiteralNumberBin, nil},
|
||||
{`\d[\d_]*`, LiteralNumberInteger, nil},
|
||||
{`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil},
|
||||
{`'.'`, LiteralStringChar, nil},
|
||||
{`'`, Keyword, nil},
|
||||
{`"`, LiteralStringDouble, Push("string")},
|
||||
{`[~?][a-z][\w\']*:`, NameVariable, nil},
|
||||
},
|
||||
"comment": {
|
||||
{`[^\/*]+`, CommentMultiline, nil},
|
||||
{`\/\*`, CommentMultiline, Push()},
|
||||
{`\*\/`, CommentMultiline, Pop(1)},
|
||||
{`[\*]`, CommentMultiline, nil},
|
||||
},
|
||||
"string": {
|
||||
{`[^\\"]+`, LiteralStringDouble, nil},
|
||||
Include("escape-sequence"),
|
||||
{`\\\n`, LiteralStringDouble, nil},
|
||||
{`"`, LiteralStringDouble, Pop(1)},
|
||||
},
|
||||
"dotted": {
|
||||
{`\s+`, Text, nil},
|
||||
{`\.`, Punctuation, nil},
|
||||
{`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil},
|
||||
{`[A-Z][\w\']*`, NameClass, Pop(1)},
|
||||
{`[a-z_][\w\']*`, Name, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
},
|
||||
))
|
2
vendor/github.com/alecthomas/chroma/lexers/r/rust.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/r/rust.go
generated
vendored
@ -58,7 +58,7 @@ var Rust = internal.Register(MustNewLexer(
|
||||
{`'[a-zA-Z_]\w*`, NameAttribute, nil},
|
||||
{`[{}()\[\],.;]`, Punctuation, nil},
|
||||
{`[+\-*/%&|<>^!~@=:?]`, Operator, nil},
|
||||
{`[a-zA-Z_]\w*`, Name, nil},
|
||||
{`(r#)?[a-zA-Z_]\w*`, Name, nil},
|
||||
{`#!?\[`, CommentPreproc, Push("attribute[")},
|
||||
{`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, TextWhitespace, Punctuation), Push("macro{")},
|
||||
{`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, Punctuation), Push("macro(")},
|
||||
|
94
vendor/github.com/alecthomas/chroma/lexers/s/sas.go
generated
vendored
Normal file
94
vendor/github.com/alecthomas/chroma/lexers/s/sas.go
generated
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
package s
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// Sas lexer.
|
||||
var Sas = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "SAS",
|
||||
Aliases: []string{"sas"},
|
||||
Filenames: []string{"*.SAS", "*.sas"},
|
||||
MimeTypes: []string{"text/x-sas", "text/sas", "application/x-sas"},
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
Include("comments"),
|
||||
Include("proc-data"),
|
||||
Include("cards-datalines"),
|
||||
Include("logs"),
|
||||
Include("general"),
|
||||
{`.`, Text, nil},
|
||||
{`\\\n`, Text, nil},
|
||||
{`\n`, Text, nil},
|
||||
},
|
||||
"comments": {
|
||||
{`^\s*\*.*?;`, Comment, nil},
|
||||
{`/\*.*?\*/`, Comment, nil},
|
||||
{`^\s*\*(.|\n)*?;`, CommentMultiline, nil},
|
||||
{`/[*](.|\n)*?[*]/`, CommentMultiline, nil},
|
||||
},
|
||||
"proc-data": {
|
||||
{`(^|;)\s*(proc \w+|data|run|quit)[\s;]`, KeywordReserved, nil},
|
||||
},
|
||||
"cards-datalines": {
|
||||
{`^\s*(datalines|cards)\s*;\s*$`, Keyword, Push("data")},
|
||||
},
|
||||
"data": {
|
||||
{`(.|\n)*^\s*;\s*$`, Other, Pop(1)},
|
||||
},
|
||||
"logs": {
|
||||
{`\n?^\s*%?put `, Keyword, Push("log-messages")},
|
||||
},
|
||||
"log-messages": {
|
||||
{`NOTE(:|-).*`, Generic, Pop(1)},
|
||||
{`WARNING(:|-).*`, GenericEmph, Pop(1)},
|
||||
{`ERROR(:|-).*`, GenericError, Pop(1)},
|
||||
Include("general"),
|
||||
},
|
||||
"general": {
|
||||
Include("keywords"),
|
||||
Include("vars-strings"),
|
||||
Include("special"),
|
||||
Include("numbers"),
|
||||
},
|
||||
"keywords": {
|
||||
{Words(`\b`, `\b`, `abort`, `array`, `attrib`, `by`, `call`, `cards`, `cards4`, `catname`, `continue`, `datalines`, `datalines4`, `delete`, `delim`, `delimiter`, `display`, `dm`, `drop`, `endsas`, `error`, `file`, `filename`, `footnote`, `format`, `goto`, `in`, `infile`, `informat`, `input`, `keep`, `label`, `leave`, `length`, `libname`, `link`, `list`, `lostcard`, `merge`, `missing`, `modify`, `options`, `output`, `out`, `page`, `put`, `redirect`, `remove`, `rename`, `replace`, `retain`, `return`, `select`, `set`, `skip`, `startsas`, `stop`, `title`, `update`, `waitsas`, `where`, `window`, `x`, `systask`), Keyword, nil},
|
||||
{Words(`\b`, `\b`, `add`, `and`, `alter`, `as`, `cascade`, `check`, `create`, `delete`, `describe`, `distinct`, `drop`, `foreign`, `from`, `group`, `having`, `index`, `insert`, `into`, `in`, `key`, `like`, `message`, `modify`, `msgtype`, `not`, `null`, `on`, `or`, `order`, `primary`, `references`, `reset`, `restrict`, `select`, `set`, `table`, `unique`, `update`, `validate`, `view`, `where`), Keyword, nil},
|
||||
{Words(`\b`, `\b`, `do`, `if`, `then`, `else`, `end`, `until`, `while`), Keyword, nil},
|
||||
{Words(`%`, `\b`, `bquote`, `nrbquote`, `cmpres`, `qcmpres`, `compstor`, `datatyp`, `display`, `do`, `else`, `end`, `eval`, `global`, `goto`, `if`, `index`, `input`, `keydef`, `label`, `left`, `length`, `let`, `local`, `lowcase`, `macro`, `mend`, `nrquote`, `nrstr`, `put`, `qleft`, `qlowcase`, `qscan`, `qsubstr`, `qsysfunc`, `qtrim`, `quote`, `qupcase`, `scan`, `str`, `substr`, `superq`, `syscall`, `sysevalf`, `sysexec`, `sysfunc`, `sysget`, `syslput`, `sysprod`, `sysrc`, `sysrput`, `then`, `to`, `trim`, `unquote`, `until`, `upcase`, `verify`, `while`, `window`), NameBuiltin, nil},
|
||||
{Words(`\b`, `\(`, `abs`, `addr`, `airy`, `arcos`, `arsin`, `atan`, `attrc`, `attrn`, `band`, `betainv`, `blshift`, `bnot`, `bor`, `brshift`, `bxor`, `byte`, `cdf`, `ceil`, `cexist`, `cinv`, `close`, `cnonct`, `collate`, `compbl`, `compound`, `compress`, `cos`, `cosh`, `css`, `curobs`, `cv`, `daccdb`, `daccdbsl`, `daccsl`, `daccsyd`, `dacctab`, `dairy`, `date`, `datejul`, `datepart`, `datetime`, `day`, `dclose`, `depdb`, `depdbsl`, `depsl`, `depsyd`, `deptab`, `dequote`, `dhms`, `dif`, `digamma`, `dim`, `dinfo`, `dnum`, `dopen`, `doptname`, `doptnum`, `dread`, `dropnote`, `dsname`, `erf`, `erfc`, `exist`, `exp`, `fappend`, `fclose`, `fcol`, `fdelete`, `fetch`, `fetchobs`, `fexist`, `fget`, `fileexist`, `filename`, `fileref`, `finfo`, `finv`, `fipname`, `fipnamel`, `fipstate`, `floor`, `fnonct`, `fnote`, `fopen`, `foptname`, `foptnum`, `fpoint`, `fpos`, `fput`, `fread`, `frewind`, `frlen`, `fsep`, `fuzz`, `fwrite`, `gaminv`, `gamma`, `getoption`, `getvarc`, `getvarn`, `hbound`, `hms`, `hosthelp`, `hour`, `ibessel`, `index`, `indexc`, `indexw`, `input`, `inputc`, `inputn`, `int`, `intck`, `intnx`, `intrr`, `irr`, `jbessel`, `juldate`, `kurtosis`, `lag`, `lbound`, `left`, `length`, `lgamma`, `libname`, `libref`, `log`, `log10`, `log2`, `logpdf`, `logpmf`, `logsdf`, `lowcase`, `max`, `mdy`, `mean`, `min`, `minute`, `mod`, `month`, `mopen`, `mort`, `n`, `netpv`, `nmiss`, `normal`, `note`, `npv`, `open`, `ordinal`, `pathname`, `pdf`, `peek`, `peekc`, `pmf`, `point`, `poisson`, `poke`, `probbeta`, `probbnml`, `probchi`, `probf`, `probgam`, `probhypr`, `probit`, `probnegb`, `probnorm`, `probt`, `put`, `putc`, `putn`, `qtr`, `quote`, `ranbin`, `rancau`, `ranexp`, `rangam`, `range`, `rank`, `rannor`, `ranpoi`, `rantbl`, `rantri`, `ranuni`, `repeat`, `resolve`, `reverse`, `rewind`, `right`, `round`, `saving`, `scan`, `sdf`, `second`, `sign`, `sin`, `sinh`, `skewness`, `soundex`, `spedis`, `sqrt`, `std`, `stderr`, `stfips`, `stname`, `stnamel`, `substr`, `sum`, `symget`, `sysget`, `sysmsg`, `sysprod`, `sysrc`, `system`, `tan`, `tanh`, `time`, `timepart`, `tinv`, `tnonct`, `today`, `translate`, `tranwrd`, `trigamma`, `trim`, `trimn`, `trunc`, `uniform`, `upcase`, `uss`, `var`, `varfmt`, `varinfmt`, `varlabel`, `varlen`, `varname`, `varnum`, `varray`, `varrayx`, `vartype`, `verify`, `vformat`, `vformatd`, `vformatdx`, `vformatn`, `vformatnx`, `vformatw`, `vformatwx`, `vformatx`, `vinarray`, `vinarrayx`, `vinformat`, `vinformatd`, `vinformatdx`, `vinformatn`, `vinformatnx`, `vinformatw`, `vinformatwx`, `vinformatx`, `vlabel`, `vlabelx`, `vlength`, `vlengthx`, `vname`, `vnamex`, `vtype`, `vtypex`, `weekday`, `year`, `yyq`, `zipfips`, `zipname`, `zipnamel`, `zipstate`), NameBuiltin, nil},
|
||||
},
|
||||
"vars-strings": {
|
||||
{`&[a-z_]\w{0,31}\.?`, NameVariable, nil},
|
||||
{`%[a-z_]\w{0,31}`, NameFunction, nil},
|
||||
{`\'`, LiteralString, Push("string_squote")},
|
||||
{`"`, LiteralString, Push("string_dquote")},
|
||||
},
|
||||
"string_squote": {
|
||||
{`'`, LiteralString, Pop(1)},
|
||||
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil},
|
||||
{`[^$\'\\]+`, LiteralString, nil},
|
||||
{`[$\'\\]`, LiteralString, nil},
|
||||
},
|
||||
"string_dquote": {
|
||||
{`"`, LiteralString, Pop(1)},
|
||||
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil},
|
||||
{`&`, NameVariable, Push("validvar")},
|
||||
{`[^$&"\\]+`, LiteralString, nil},
|
||||
{`[$"\\]`, LiteralString, nil},
|
||||
},
|
||||
"validvar": {
|
||||
{`[a-z_]\w{0,31}\.?`, NameVariable, Pop(1)},
|
||||
},
|
||||
"numbers": {
|
||||
{`\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b`, LiteralNumber, nil},
|
||||
},
|
||||
"special": {
|
||||
{`(null|missing|_all_|_automatic_|_character_|_n_|_infile_|_name_|_null_|_numeric_|_user_|_webout_)`, KeywordConstant, nil},
|
||||
},
|
||||
},
|
||||
))
|
81
vendor/github.com/alecthomas/chroma/lexers/t/terraform.go
generated
vendored
81
vendor/github.com/alecthomas/chroma/lexers/t/terraform.go
generated
vendored
@ -15,55 +15,46 @@ var Terraform = internal.Register(MustNewLexer(
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
Include("string"),
|
||||
Include("punctuation"),
|
||||
Include("curly"),
|
||||
Include("basic"),
|
||||
Include("whitespace"),
|
||||
{`[0-9]+`, LiteralNumber, nil},
|
||||
{`[\[\](),.{}]`, Punctuation, nil},
|
||||
{`-?[0-9]+`, LiteralNumber, nil},
|
||||
{`=>`, Punctuation, nil},
|
||||
{Words(``, `\b`, `true`, `false`), KeywordConstant, nil},
|
||||
{`/(?s)\*(((?!\*/).)*)\*/`, CommentMultiline, nil},
|
||||
{`\s*(#|//).*\n`, CommentSingle, nil},
|
||||
{`([a-zA-Z]\w*)(\s*)(=(?!>))`, ByGroups(NameAttribute, Text, Text), nil},
|
||||
{Words(`^\s*`, `\b`, `variable`, `data`, `resource`, `provider`, `provisioner`, `module`, `output`), KeywordReserved, nil},
|
||||
{Words(``, `\b`, `for`, `in`), Keyword, nil},
|
||||
{Words(``, ``, `count`, `data`, `var`, `module`, `each`), NameBuiltin, nil},
|
||||
{Words(``, `\b`, `abs`, `ceil`, `floor`, `log`, `max`, `min`, `parseint`, `pow`, `signum`), NameBuiltin, nil},
|
||||
{Words(``, `\b`, `chomp`, `format`, `formatlist`, `indent`, `join`, `lower`, `regex`, `regexall`, `replace`, `split`, `strrev`, `substr`, `title`, `trim`, `trimprefix`, `trimsuffix`, `trimspace`, `upper`), NameBuiltin, nil},
|
||||
{Words(`[^.]`, `\b`, `chunklist`, `coalesce`, `coalescelist`, `compact`, `concat`, `contains`, `distinct`, `element`, `flatten`, `index`, `keys`, `length`, `list`, `lookup`, `map`, `matchkeys`, `merge`, `range`, `reverse`, `setintersection`, `setproduct`, `setsubtract`, `setunion`, `slice`, `sort`, `transpose`, `values`, `zipmap`), NameBuiltin, nil},
|
||||
{Words(`[^.]`, `\b`, `base64decode`, `base64encode`, `base64gzip`, `csvdecode`, `jsondecode`, `jsonencode`, `urlencode`, `yamldecode`, `yamlencode`), NameBuiltin, nil},
|
||||
{Words(``, `\b`, `abspath`, `dirname`, `pathexpand`, `basename`, `file`, `fileexists`, `fileset`, `filebase64`, `templatefile`), NameBuiltin, nil},
|
||||
{Words(``, `\b`, `formatdate`, `timeadd`, `timestamp`), NameBuiltin, nil},
|
||||
{Words(``, `\b`, `base64sha256`, `base64sha512`, `bcrypt`, `filebase64sha256`, `filebase64sha512`, `filemd5`, `filesha1`, `filesha256`, `filesha512`, `md5`, `rsadecrypt`, `sha1`, `sha256`, `sha512`, `uuid`, `uuidv5`), NameBuiltin, nil},
|
||||
{Words(``, `\b`, `cidrhost`, `cidrnetmask`, `cidrsubnet`), NameBuiltin, nil},
|
||||
{Words(``, `\b`, `can`, `tobool`, `tolist`, `tomap`, `tonumber`, `toset`, `tostring`, `try`), NameBuiltin, nil},
|
||||
{`=(?!>)|\+|-|\*|\/|:|!|%|>|<(?!<)|>=|<=|==|!=|&&|\||\?`, Operator, nil},
|
||||
{`\n|\s+|\\\n`, Text, nil},
|
||||
{`[a-zA-Z]\w*`, NameOther, nil},
|
||||
{`"`, LiteralStringDouble, Push("string")},
|
||||
{`(?s)(<<-?)(\w+)(\n\s*(?:(?!\2).)*\s*\n\s*)(\2)`, ByGroups(Operator, Operator, String, Operator), nil},
|
||||
},
|
||||
"basic": {
|
||||
{Words(`\b`, `\b`, `true`, `false`), KeywordType, nil},
|
||||
{`\s*/\*`, CommentMultiline, Push("comment")},
|
||||
{`\s*#.*\n`, CommentSingle, nil},
|
||||
{`(.*?)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil},
|
||||
{Words(`\b`, `\b`, `variable`, `resource`, `provider`, `provisioner`, `module`), KeywordReserved, Push("function")},
|
||||
{Words(`\b`, `\b`, `ingress`, `egress`, `listener`, `default`, `connection`, `alias`), KeywordDeclaration, nil},
|
||||
{`\$\{`, LiteralStringInterpol, Push("var_builtin")},
|
||||
},
|
||||
"function": {
|
||||
{`(\s+)(".*")(\s+)`, ByGroups(Text, LiteralString, Text), nil},
|
||||
Include("punctuation"),
|
||||
Include("curly"),
|
||||
},
|
||||
"var_builtin": {
|
||||
{`\$\{`, LiteralStringInterpol, Push()},
|
||||
{Words(`\b`, `\b`, `concat`, `file`, `join`, `lookup`, `element`), NameBuiltin, nil},
|
||||
Include("string"),
|
||||
Include("punctuation"),
|
||||
{`\s+`, Text, nil},
|
||||
{`\}`, LiteralStringInterpol, Pop(1)},
|
||||
"declaration": {
|
||||
{`(\s*)("(?:\\\\|\\"|[^"])*")(\s*)`, ByGroups(Text, NameVariable, Text), nil},
|
||||
{`\{`, Punctuation, Pop(1)},
|
||||
},
|
||||
"string": {
|
||||
{`(".*")`, ByGroups(LiteralStringDouble), nil},
|
||||
{`"`, LiteralStringDouble, Pop(1)},
|
||||
{`\\\\`, LiteralStringDouble, nil},
|
||||
{`\\\\"`, LiteralStringDouble, nil},
|
||||
{`\$\{`, LiteralStringInterpol, Push("interp-inside")},
|
||||
{`\$`, LiteralStringDouble, nil},
|
||||
{`[^"\\\\$]+`, LiteralStringDouble, nil},
|
||||
},
|
||||
"punctuation": {
|
||||
{`[\[\](),.]`, Punctuation, nil},
|
||||
},
|
||||
"curly": {
|
||||
{`\{`, TextPunctuation, nil},
|
||||
{`\}`, TextPunctuation, nil},
|
||||
},
|
||||
"comment": {
|
||||
{`[^*/]`, CommentMultiline, nil},
|
||||
{`/\*`, CommentMultiline, Push()},
|
||||
{`\*/`, CommentMultiline, Pop(1)},
|
||||
{`[*/]`, CommentMultiline, nil},
|
||||
},
|
||||
"whitespace": {
|
||||
{`\n`, Text, nil},
|
||||
{`\s+`, Text, nil},
|
||||
{`\\\n`, Text, nil},
|
||||
"interp-inside": {
|
||||
{`\}`, LiteralStringInterpol, Pop(1)},
|
||||
Include("root"),
|
||||
},
|
||||
},
|
||||
))
|
||||
|
2
vendor/github.com/alecthomas/chroma/lexers/t/toml.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/t/toml.go
generated
vendored
@ -22,7 +22,7 @@ var TOML = internal.Register(MustNewLexer(
|
||||
{`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil},
|
||||
{`"(\\\\|\\"|[^"])*"`, StringDouble, nil},
|
||||
{`'(\\\\|\\'|[^'])*'`, StringSingle, nil},
|
||||
{`[.,=\[\]]`, Punctuation, nil},
|
||||
{`[.,=\[\]{}]`, Punctuation, nil},
|
||||
{`[^\W\d]\w*`, NameOther, nil},
|
||||
},
|
||||
},
|
||||
|
36
vendor/github.com/alecthomas/chroma/lexers/t/typescript.go
generated
vendored
36
vendor/github.com/alecthomas/chroma/lexers/t/typescript.go
generated
vendored
@ -9,7 +9,7 @@ import (
|
||||
var TypeScript = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "TypeScript",
|
||||
Aliases: []string{"ts", "typescript"},
|
||||
Aliases: []string{"ts", "tsx", "typescript"},
|
||||
Filenames: []string{"*.ts", "*.tsx"},
|
||||
MimeTypes: []string{"text/x-typescript"},
|
||||
DotAll: true,
|
||||
@ -32,19 +32,20 @@ var TypeScript = internal.Register(MustNewLexer(
|
||||
{`\n`, Text, Pop(1)},
|
||||
},
|
||||
"root": {
|
||||
Include("jsx"),
|
||||
{`^(?=\s|/|<!--)`, Text, Push("slashstartsregex")},
|
||||
Include("commentsandwhitespace"),
|
||||
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
|
||||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
||||
{`[})\].]`, Punctuation, nil},
|
||||
{`(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b`, Keyword, Push("slashstartsregex")},
|
||||
{`(for|in|of|while|do|break|return|yield|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|keyof|asserts|is|infer|await|void|this)\b`, Keyword, Push("slashstartsregex")},
|
||||
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
|
||||
{`(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b`, KeywordReserved, nil},
|
||||
{`(abstract|async|boolean|class|const|debugger|enum|export|extends|from|get|global|goto|implements|import|interface|namespace|package|private|protected|public|readonly|require|set|static|super|type)\b`, KeywordReserved, nil},
|
||||
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
|
||||
{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
|
||||
{`(Array|Boolean|Date|Error|Function|Math|Number|Object|Packages|RegExp|String|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
|
||||
{`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")},
|
||||
{`\b(string|bool|number)\b`, KeywordType, nil},
|
||||
{`\b(constructor|declare|interface|as|AS)\b`, KeywordReserved, nil},
|
||||
{`\b(string|bool|number|any|never|object|symbol|unique|unknown|bigint)\b`, KeywordType, nil},
|
||||
{`\b(constructor|declare|interface|as)\b`, KeywordReserved, nil},
|
||||
{`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")},
|
||||
{`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")},
|
||||
{`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil},
|
||||
@ -69,5 +70,28 @@ var TypeScript = internal.Register(MustNewLexer(
|
||||
{`\}`, LiteralStringInterpol, Pop(1)},
|
||||
Include("root"),
|
||||
},
|
||||
"jsx": {
|
||||
{`(<)(/?)(>)`, ByGroups(Punctuation, Punctuation, Punctuation), nil},
|
||||
{`(<)([\w\.]+)`, ByGroups(Punctuation, NameTag), Push("tag")},
|
||||
{`(<)(/)([\w\.]*)(>)`, ByGroups(Punctuation, Punctuation, NameTag, Punctuation), nil},
|
||||
},
|
||||
"tag": {
|
||||
{`\s+`, Text, nil},
|
||||
{`([\w]+\s*)(=)(\s*)`, ByGroups(NameAttribute, Operator, Text), Push("attr")},
|
||||
{`[{}]+`, Punctuation, nil},
|
||||
{`[\w\.]+`, NameAttribute, nil},
|
||||
{`(/?)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation), Pop(1)},
|
||||
},
|
||||
"attr": {
|
||||
{`{`, Punctuation, Push("expression")},
|
||||
{`".*?"`, LiteralString, Pop(1)},
|
||||
{`'.*?'`, LiteralString, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
"expression": {
|
||||
{`{`, Punctuation, Push()},
|
||||
{`}`, Punctuation, Pop(1)},
|
||||
Include("root"),
|
||||
},
|
||||
},
|
||||
))
|
||||
|
3
vendor/github.com/alecthomas/chroma/lexers/t/typoscript.go
generated
vendored
3
vendor/github.com/alecthomas/chroma/lexers/t/typoscript.go
generated
vendored
@ -10,9 +10,10 @@ var Typoscript = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "TypoScript",
|
||||
Aliases: []string{"typoscript"},
|
||||
Filenames: []string{"*.ts", "*.txt"},
|
||||
Filenames: []string{"*.ts"},
|
||||
MimeTypes: []string{"text/x-typoscript"},
|
||||
DotAll: true,
|
||||
Priority: 0.1,
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
|
19
vendor/github.com/alecthomas/chroma/lexers/y/yaml.go
generated
vendored
19
vendor/github.com/alecthomas/chroma/lexers/y/yaml.go
generated
vendored
@ -15,32 +15,35 @@ var YAML = internal.Register(MustNewLexer(
|
||||
Rules{
|
||||
"root": {
|
||||
Include("whitespace"),
|
||||
{`^---`, Text, nil},
|
||||
{`^---`, NameNamespace, nil},
|
||||
{`^\.\.\.`, NameNamespace, nil},
|
||||
{`[\n?]?\s*- `, Text, nil},
|
||||
{`#.*$`, Comment, nil},
|
||||
{`!![^\s]+`, CommentPreproc, nil},
|
||||
{`&[^\s]+`, CommentPreproc, nil},
|
||||
{`\*[^\s]+`, CommentPreproc, nil},
|
||||
{`^%include\s+[^\n\r]+`, CommentPreproc, nil},
|
||||
{`([>|+-]\s+)(\s+)((?:(?:.*?$)(?:[\n\r]*?)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil},
|
||||
{`[>|](?:[+-])?\s(?:^(?:[ \n]{1})+.*\n?)*$`, StringDoc, nil},
|
||||
Include("key"),
|
||||
Include("value"),
|
||||
{`[?:,\[\]]`, Punctuation, nil},
|
||||
{`.`, Text, nil},
|
||||
},
|
||||
"value": {
|
||||
{Words(``, `\b`, "true", "false", "null"), KeywordConstant, nil},
|
||||
{Words(``, `\b`, "true", "True", "TRUE", "false", "False", "FALSE", "null",
|
||||
"y", "Y", "yes", "Yes", "YES", "n", "N", "no", "No", "NO",
|
||||
"on", "On", "ON", "off", "Off", "OFF"), KeywordConstant, nil},
|
||||
{`"(?:\\.|[^"])*"`, StringDouble, nil},
|
||||
{`'(?:\\.|[^'])*'`, StringSingle, nil},
|
||||
{`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil},
|
||||
{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil},
|
||||
{`\b[\w]+\b`, Text, nil},
|
||||
{`[^\{\}\[\]\?,\:\!\-\*&\@].*`, Literal, nil},
|
||||
},
|
||||
"key": {
|
||||
{`"[^"\n].*": `, Keyword, nil},
|
||||
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, Keyword, Punctuation, Whitespace), nil},
|
||||
{`([^"\n{]*)(:)( )`, ByGroups(Keyword, Punctuation, Whitespace), nil},
|
||||
{`([^"\n{]*)(:)(\n)`, ByGroups(Keyword, Punctuation, Whitespace), nil},
|
||||
{`"[^"\n].*": `, NameTag, nil},
|
||||
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, NameTag, Punctuation, Whitespace), nil},
|
||||
{`([^"\n{]*)(:)( )`, ByGroups(NameTag, Punctuation, Whitespace), nil},
|
||||
{`([^"\n{]*)(:)(\n)`, ByGroups(NameTag, Punctuation, Whitespace), nil},
|
||||
},
|
||||
"whitespace": {
|
||||
{`\s+`, Whitespace, nil},
|
||||
|
67
vendor/github.com/alecthomas/chroma/lexers/y/yang.go
generated
vendored
Normal file
67
vendor/github.com/alecthomas/chroma/lexers/y/yang.go
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
package y
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
var YANG = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "YANG",
|
||||
Aliases: []string{"yang"},
|
||||
Filenames: []string{"*.yang"},
|
||||
MimeTypes: []string{"application/yang"},
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`\s+`, Whitespace, nil},
|
||||
{`[\{\}\;]+`, Punctuation, nil},
|
||||
{`(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])`, Operator, nil},
|
||||
|
||||
{`"(?:\\"|[^"])*?"`, StringDouble, nil},
|
||||
{`'(?:\\'|[^'])*?'`, StringSingle, nil},
|
||||
|
||||
{`/\*`, CommentMultiline, Push("comments")},
|
||||
{`//.*?$`, CommentSingle, nil},
|
||||
|
||||
//match BNF stmt for `node-identifier` with [ prefix ":"]
|
||||
{`(?:^|(?<=[\s{};]))([\w.-]+)(:)([\w.-]+)(?=[\s{};])`, ByGroups(KeywordNamespace, Punctuation, Text), nil},
|
||||
|
||||
//match BNF stmt `date-arg-str`
|
||||
{`([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s\{\}\;])`, LiteralDate, nil},
|
||||
{`([0-9]+\.[0-9]+)(?=[\s\{\}\;])`, NumberFloat, nil},
|
||||
{`([0-9]+)(?=[\s\{\}\;])`, NumberInteger, nil},
|
||||
|
||||
//TOP_STMTS_KEYWORDS
|
||||
{Words(``, `(?=[^\w\-\:])`, `module`, `submodule`), Keyword, nil},
|
||||
//MODULE_HEADER_STMT_KEYWORDS
|
||||
{Words(``, `(?=[^\w\-\:])`, `belongs-to`, `namespace`, `prefix`, `yang-version`), Keyword, nil},
|
||||
//META_STMT_KEYWORDS
|
||||
{Words(``, `(?=[^\w\-\:])`, `contact`, `description`, `organization`, `reference`, `revision`), Keyword, nil},
|
||||
//LINKAGE_STMTS_KEYWORDS
|
||||
{Words(``, `(?=[^\w\-\:])`, `import`, `include`, `revision-date`), Keyword, nil},
|
||||
//BODY_STMT_KEYWORDS
|
||||
{Words(``, `(?=[^\w\-\:])`, `action`, `argument`, `augment`, `deviation`, `extension`, `feature`, `grouping`, `identity`, `if-feature`, `input`, `notification`, `output`, `rpc`, `typedef`), Keyword, nil},
|
||||
//DATA_DEF_STMT_KEYWORDS
|
||||
{Words(``, `(?=[^\w\-\:])`, `anydata`, `anyxml`, `case`, `choice`, `config`, `container`, `deviate`, `leaf`, `leaf-list`, `list`, `must`, `presence`, `refine`, `uses`, `when`), Keyword, nil},
|
||||
//TYPE_STMT_KEYWORDS
|
||||
{Words(``, `(?=[^\w\-\:])`, `base`, `bit`, `default`, `enum`, `error-app-tag`, `error-message`, `fraction-digits`, `length`, `max-elements`, `min-elements`, `modifier`, `ordered-by`, `path`, `pattern`, `position`, `range`, `require-instance`, `status`, `type`, `units`, `value`, `yin-element`), Keyword, nil},
|
||||
//LIST_STMT_KEYWORDS
|
||||
{Words(``, `(?=[^\w\-\:])`, `key`, `mandatory`, `unique`), Keyword, nil},
|
||||
|
||||
//CONSTANTS_KEYWORDS - RFC7950 other keywords
|
||||
{Words(``, `(?=[^\w\-\:])`, `add`, `current`, `delete`, `deprecated`, `false`, `invert-match`, `max`, `min`, `not-supported`, `obsolete`, `replace`, `true`, `unbounded`, `user`), NameClass, nil},
|
||||
|
||||
//RFC7950 Built-In Types
|
||||
{Words(``, `(?=[^\w\-\:])`, `binary`, `bits`, `boolean`, `decimal64`, `empty`, `enumeration`, `identityref`, `instance-identifier`, `int16`, `int32`, `int64`, `int8`, `leafref`, `string`, `uint16`, `uint32`, `uint64`, `uint8`, `union`), NameClass, nil},
|
||||
|
||||
{`[^;{}\s\'\"]+`, Text, nil},
|
||||
},
|
||||
"comments": {
|
||||
{`[^*/]`, CommentMultiline, nil},
|
||||
{`/\*`, CommentMultiline, Push("comment")},
|
||||
{`\*/`, CommentMultiline, Pop(1)},
|
||||
{`[*/]`, CommentMultiline, nil},
|
||||
},
|
||||
},
|
||||
))
|
54
vendor/github.com/alecthomas/chroma/lexers/z/zig.go
generated
vendored
Normal file
54
vendor/github.com/alecthomas/chroma/lexers/z/zig.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
package z
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// Zig lexer.
|
||||
var Zig = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "Zig",
|
||||
Aliases: []string{"zig"},
|
||||
Filenames: []string{"*.zig"},
|
||||
MimeTypes: []string{"text/zig"},
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`\n`, TextWhitespace, nil},
|
||||
{`\s+`, TextWhitespace, nil},
|
||||
{`//.*?\n`, CommentSingle, nil},
|
||||
{Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil},
|
||||
{Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil},
|
||||
{Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil},
|
||||
{Words(``, `\b`, `while`, `for`), Keyword, nil},
|
||||
{Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil},
|
||||
{Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil},
|
||||
{Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil},
|
||||
{Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil},
|
||||
{`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil},
|
||||
{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil},
|
||||
{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil},
|
||||
{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil},
|
||||
{`0b[01]+`, LiteralNumberBin, nil},
|
||||
{`0o[0-7]+`, LiteralNumberOct, nil},
|
||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||
{`[0-9]+`, LiteralNumberInteger, nil},
|
||||
{`@[a-zA-Z_]\w*`, NameBuiltin, nil},
|
||||
{`[a-zA-Z_]\w*`, Name, nil},
|
||||
{`\'\\\'\'`, LiteralStringEscape, nil},
|
||||
{`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil},
|
||||
{`\'[^\\\']\'`, LiteralString, nil},
|
||||
{`\\\\[^\n]*`, LiteralStringHeredoc, nil},
|
||||
{`c\\\\[^\n]*`, LiteralStringHeredoc, nil},
|
||||
{`c?"`, LiteralString, Push("string")},
|
||||
{`[+%=><|^!?/\-*&~:]`, Operator, nil},
|
||||
{`[{}()\[\],.;]`, Punctuation, nil},
|
||||
},
|
||||
"string": {
|
||||
{`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil},
|
||||
{`[^\\"\n]+`, LiteralString, nil},
|
||||
{`"`, LiteralString, Pop(1)},
|
||||
},
|
||||
},
|
||||
))
|
41
vendor/github.com/alecthomas/chroma/regexp.go
generated
vendored
41
vendor/github.com/alecthomas/chroma/regexp.go
generated
vendored
@ -6,6 +6,7 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/dlclark/regexp2"
|
||||
@ -160,6 +161,14 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
|
||||
// Rules maps from state to a sequence of Rules.
|
||||
type Rules map[string][]Rule
|
||||
|
||||
// Rename clones rules then a rule.
|
||||
func (r Rules) Rename(old, new string) Rules {
|
||||
r = r.Clone()
|
||||
r[new] = r[old]
|
||||
delete(r, old)
|
||||
return r
|
||||
}
|
||||
|
||||
// Clone returns a clone of the Rules.
|
||||
func (r Rules) Clone() Rules {
|
||||
out := map[string][]Rule{}
|
||||
@ -170,6 +179,15 @@ func (r Rules) Clone() Rules {
|
||||
return out
|
||||
}
|
||||
|
||||
// Merge creates a clone of "r" then merges "rules" into the clone.
|
||||
func (r Rules) Merge(rules Rules) Rules {
|
||||
out := r.Clone()
|
||||
for k, v := range rules.Clone() {
|
||||
out[k] = v
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// MustNewLexer creates a new Lexer or panics.
|
||||
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
|
||||
lexer, err := NewLexer(config, rules)
|
||||
@ -376,6 +394,7 @@ func (r *RegexLexer) maybeCompile() (err error) {
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
|
||||
}
|
||||
rule.Regexp.MatchTimeout = time.Millisecond * 250
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -410,6 +429,9 @@ func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator,
|
||||
if options == nil {
|
||||
options = defaultOptions
|
||||
}
|
||||
if options.EnsureLF {
|
||||
text = ensureLF(text)
|
||||
}
|
||||
if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") {
|
||||
text += "\n"
|
||||
}
|
||||
@ -437,3 +459,22 @@ func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule
|
||||
}
|
||||
return 0, &CompiledRule{}, nil
|
||||
}
|
||||
|
||||
// replace \r and \r\n with \n
|
||||
// same as strings.ReplaceAll but more efficient
|
||||
func ensureLF(text string) string {
|
||||
buf := make([]byte, len(text))
|
||||
var j int
|
||||
for i := 0; i < len(text); i++ {
|
||||
c := text[i]
|
||||
if c == '\r' {
|
||||
if i < len(text)-1 && text[i+1] == '\n' {
|
||||
continue
|
||||
}
|
||||
c = '\n'
|
||||
}
|
||||
buf[j] = c
|
||||
j++
|
||||
}
|
||||
return string(buf[:j])
|
||||
}
|
||||
|
BIN
vendor/github.com/dlclark/regexp2/.DS_Store
generated
vendored
BIN
vendor/github.com/dlclark/regexp2/.DS_Store
generated
vendored
Binary file not shown.
4
vendor/github.com/dlclark/regexp2/.gitignore
generated
vendored
4
vendor/github.com/dlclark/regexp2/.gitignore
generated
vendored
@ -22,4 +22,6 @@ _testmain.go
|
||||
*.exe
|
||||
*.test
|
||||
*.prof
|
||||
*.out
|
||||
*.out
|
||||
|
||||
.DS_Store
|
||||
|
15
vendor/github.com/dlclark/regexp2/README.md
generated
vendored
15
vendor/github.com/dlclark/regexp2/README.md
generated
vendored
@ -57,6 +57,21 @@ The __last__ capture is embedded in each group, so `g.String()` will return the
|
||||
| named ascii character class `[[:foo:]]`| yes | no |
|
||||
| conditionals `((expr)yes\|no)` | no | yes |
|
||||
|
||||
## RE2 compatibility mode
|
||||
The default behavior of `regexp2` is to match the .NET regexp engine, however the `RE2` option is provided to change the parsing to increase compatibility with RE2. Using the `RE2` option when compiling a regexp will not take away any features, but will change the following behaviors:
|
||||
* add support for named ascii character classes (e.g. `[[:foo:]]`)
|
||||
* add support for python-style capture groups (e.g. `(P<name>re)`)
|
||||
|
||||
```go
|
||||
re := regexp2.MustCompile(`Your RE2-compatible pattern`, regexp2.RE2)
|
||||
if isMatch, _ := re.MatchString(`Something to match`); isMatch {
|
||||
//do something
|
||||
}
|
||||
```
|
||||
|
||||
This feature is a work in progress and I'm open to ideas for more things to put here (maybe more relaxed character escaping rules?).
|
||||
|
||||
|
||||
## Library features that I'm still working on
|
||||
- Regex split
|
||||
|
||||
|
1
vendor/github.com/dlclark/regexp2/regexp.go
generated
vendored
1
vendor/github.com/dlclark/regexp2/regexp.go
generated
vendored
@ -120,6 +120,7 @@ const (
|
||||
RightToLeft = 0x0040 // "r"
|
||||
Debug = 0x0080 // "d"
|
||||
ECMAScript = 0x0100 // "e"
|
||||
RE2 = 0x0200 // RE2 (regexp package) compatibility mode
|
||||
)
|
||||
|
||||
func (re *Regexp) RightToLeft() bool {
|
||||
|
70
vendor/github.com/dlclark/regexp2/syntax/charclass.go
generated
vendored
70
vendor/github.com/dlclark/regexp2/syntax/charclass.go
generated
vendored
@ -484,6 +484,29 @@ func (c *CharSet) addRanges(ranges []singleRange) {
|
||||
c.canonicalize()
|
||||
}
|
||||
|
||||
// Merges everything but the new ranges into our own
|
||||
func (c *CharSet) addNegativeRanges(ranges []singleRange) {
|
||||
if c.anything {
|
||||
return
|
||||
}
|
||||
|
||||
var hi rune
|
||||
|
||||
// convert incoming ranges into opposites, assume they are in order
|
||||
for _, r := range ranges {
|
||||
if hi < r.first {
|
||||
c.ranges = append(c.ranges, singleRange{hi, r.first - 1})
|
||||
}
|
||||
hi = r.last + 1
|
||||
}
|
||||
|
||||
if hi < utf8.MaxRune {
|
||||
c.ranges = append(c.ranges, singleRange{hi, utf8.MaxRune})
|
||||
}
|
||||
|
||||
c.canonicalize()
|
||||
}
|
||||
|
||||
func isValidUnicodeCat(catName string) bool {
|
||||
_, ok := unicodeCategories[catName]
|
||||
return ok
|
||||
@ -515,6 +538,53 @@ func (c *CharSet) addRange(chMin, chMax rune) {
|
||||
c.canonicalize()
|
||||
}
|
||||
|
||||
func (c *CharSet) addNamedASCII(name string, negate bool) bool {
|
||||
var rs []singleRange
|
||||
|
||||
switch name {
|
||||
case "alnum":
|
||||
rs = []singleRange{singleRange{'0', '9'}, singleRange{'A', 'Z'}, singleRange{'a', 'z'}}
|
||||
case "alpha":
|
||||
rs = []singleRange{singleRange{'A', 'Z'}, singleRange{'a', 'z'}}
|
||||
case "ascii":
|
||||
rs = []singleRange{singleRange{0, 0x7f}}
|
||||
case "blank":
|
||||
rs = []singleRange{singleRange{'\t', '\t'}, singleRange{' ', ' '}}
|
||||
case "cntrl":
|
||||
rs = []singleRange{singleRange{0, 0x1f}, singleRange{0x7f, 0x7f}}
|
||||
case "digit":
|
||||
c.addDigit(false, negate, "")
|
||||
case "graph":
|
||||
rs = []singleRange{singleRange{'!', '~'}}
|
||||
case "lower":
|
||||
rs = []singleRange{singleRange{'a', 'z'}}
|
||||
case "print":
|
||||
rs = []singleRange{singleRange{' ', '~'}}
|
||||
case "punct": //[!-/:-@[-`{-~]
|
||||
rs = []singleRange{singleRange{'!', '/'}, singleRange{':', '@'}, singleRange{'[', '`'}, singleRange{'{', '~'}}
|
||||
case "space":
|
||||
c.addSpace(true, negate)
|
||||
case "upper":
|
||||
rs = []singleRange{singleRange{'A', 'Z'}}
|
||||
case "word":
|
||||
c.addWord(true, negate)
|
||||
case "xdigit":
|
||||
rs = []singleRange{singleRange{'0', '9'}, singleRange{'A', 'F'}, singleRange{'a', 'f'}}
|
||||
default:
|
||||
return false
|
||||
}
|
||||
|
||||
if len(rs) > 0 {
|
||||
if negate {
|
||||
c.addNegativeRanges(rs)
|
||||
} else {
|
||||
c.addRanges(rs)
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
type singleRangeSorter []singleRange
|
||||
|
||||
func (p singleRangeSorter) Len() int { return len(p) }
|
||||
|
97
vendor/github.com/dlclark/regexp2/syntax/parser.go
generated
vendored
97
vendor/github.com/dlclark/regexp2/syntax/parser.go
generated
vendored
@ -21,6 +21,7 @@ const (
|
||||
RightToLeft = 0x0040 // "r"
|
||||
Debug = 0x0080 // "d"
|
||||
ECMAScript = 0x0100 // "e"
|
||||
RE2 = 0x0200 // RE2 compat mode
|
||||
)
|
||||
|
||||
func optionFromCode(ch rune) RegexOptions {
|
||||
@ -310,7 +311,7 @@ func (p *parser) countCaptures() error {
|
||||
switch ch {
|
||||
case '\\':
|
||||
if p.charsRight() > 0 {
|
||||
p.moveRight(1)
|
||||
p.scanBackslash(true)
|
||||
}
|
||||
|
||||
case '#':
|
||||
@ -354,6 +355,14 @@ func (p *parser) countCaptures() error {
|
||||
p.noteCaptureName(p.scanCapname(), pos)
|
||||
}
|
||||
}
|
||||
} else if p.useRE2() && p.charsRight() > 2 && (p.rightChar(0) == 'P' && p.rightChar(1) == '<') {
|
||||
// RE2-compat (?P<)
|
||||
p.moveRight(2)
|
||||
ch = p.rightChar(0)
|
||||
if IsWordChar(ch) {
|
||||
p.noteCaptureName(p.scanCapname(), pos)
|
||||
}
|
||||
|
||||
} else {
|
||||
// (?...
|
||||
|
||||
@ -520,7 +529,7 @@ func (p *parser) scanRegex() (*regexNode, error) {
|
||||
}
|
||||
|
||||
case '\\':
|
||||
n, err := p.scanBackslash()
|
||||
n, err := p.scanBackslash(false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -1022,6 +1031,50 @@ func (p *parser) scanGroupOpen() (*regexNode, error) {
|
||||
}
|
||||
}
|
||||
|
||||
case 'P':
|
||||
if p.useRE2() {
|
||||
// support for P<name> syntax
|
||||
if p.charsRight() < 3 {
|
||||
goto BreakRecognize
|
||||
}
|
||||
|
||||
ch = p.moveRightGetChar()
|
||||
if ch != '<' {
|
||||
goto BreakRecognize
|
||||
}
|
||||
|
||||
ch = p.moveRightGetChar()
|
||||
p.moveLeft()
|
||||
|
||||
if IsWordChar(ch) {
|
||||
capnum := -1
|
||||
capname := p.scanCapname()
|
||||
|
||||
if p.isCaptureName(capname) {
|
||||
capnum = p.captureSlotFromName(capname)
|
||||
}
|
||||
|
||||
// check if we have bogus character after the name
|
||||
if p.charsRight() > 0 && p.rightChar(0) != '>' {
|
||||
return nil, p.getErr(ErrInvalidGroupName)
|
||||
}
|
||||
|
||||
// actually make the node
|
||||
|
||||
if capnum != -1 && p.charsRight() > 0 && p.moveRightGetChar() == '>' {
|
||||
return newRegexNodeMN(ntCapture, p.options, capnum, -1), nil
|
||||
}
|
||||
goto BreakRecognize
|
||||
|
||||
} else {
|
||||
// bad group name - starts with something other than a word character and isn't a number
|
||||
return nil, p.getErr(ErrInvalidGroupName)
|
||||
}
|
||||
}
|
||||
// if we're not using RE2 compat mode then
|
||||
// we just behave like normal
|
||||
fallthrough
|
||||
|
||||
default:
|
||||
p.moveLeft()
|
||||
|
||||
@ -1055,7 +1108,7 @@ BreakRecognize:
|
||||
}
|
||||
|
||||
// scans backslash specials and basics
|
||||
func (p *parser) scanBackslash() (*regexNode, error) {
|
||||
func (p *parser) scanBackslash(scanOnly bool) (*regexNode, error) {
|
||||
|
||||
if p.charsRight() == 0 {
|
||||
return nil, p.getErr(ErrIllegalEndEscape)
|
||||
@ -1123,12 +1176,12 @@ func (p *parser) scanBackslash() (*regexNode, error) {
|
||||
return newRegexNodeSet(ntSet, p.options, cc), nil
|
||||
|
||||
default:
|
||||
return p.scanBasicBackslash()
|
||||
return p.scanBasicBackslash(scanOnly)
|
||||
}
|
||||
}
|
||||
|
||||
// Scans \-style backreferences and character escapes
|
||||
func (p *parser) scanBasicBackslash() (*regexNode, error) {
|
||||
func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
|
||||
if p.charsRight() == 0 {
|
||||
return nil, p.getErr(ErrIllegalEndEscape)
|
||||
}
|
||||
@ -1184,15 +1237,19 @@ func (p *parser) scanBasicBackslash() (*regexNode, error) {
|
||||
if p.charsRight() > 0 && p.moveRightGetChar() == close {
|
||||
if p.isCaptureSlot(capnum) {
|
||||
return newRegexNodeM(ntRef, p.options, capnum), nil
|
||||
} else {
|
||||
return nil, p.getErr(ErrUndefinedBackRef, capnum)
|
||||
}
|
||||
return nil, p.getErr(ErrUndefinedBackRef, capnum)
|
||||
}
|
||||
} else if !angled && ch >= '1' && ch <= '9' { // Try to parse backreference or octal: \1
|
||||
capnum, err := p.scanDecimal()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if scanOnly {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if p.useOptionE() || p.isCaptureSlot(capnum) {
|
||||
return newRegexNodeM(ntRef, p.options, capnum), nil
|
||||
}
|
||||
@ -1448,11 +1505,26 @@ func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) {
|
||||
savePos := p.textpos()
|
||||
|
||||
p.moveRight(1)
|
||||
p.scanCapname() // throwaway the name
|
||||
negate := false
|
||||
if p.charsRight() > 1 && p.rightChar(0) == '^' {
|
||||
negate = true
|
||||
p.moveRight(1)
|
||||
}
|
||||
|
||||
nm := p.scanCapname() // snag the name
|
||||
if !scanOnly && p.useRE2() {
|
||||
// look up the name since these are valid for RE2
|
||||
// add the group based on the name
|
||||
if ok := cc.addNamedASCII(nm, negate); !ok {
|
||||
return nil, p.getErr(ErrInvalidCharRange)
|
||||
}
|
||||
}
|
||||
if p.charsRight() < 2 || p.moveRightGetChar() != ':' || p.moveRightGetChar() != ']' {
|
||||
p.textto(savePos)
|
||||
} else if p.useRE2() {
|
||||
// move on
|
||||
continue
|
||||
}
|
||||
// else lookup name (nyi)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1547,7 +1619,7 @@ func (p *parser) scanDecimal() (int, error) {
|
||||
|
||||
// Returns true for options allowed only at the top level
|
||||
func isOnlyTopOption(option RegexOptions) bool {
|
||||
return option == RightToLeft || option == ECMAScript
|
||||
return option == RightToLeft || option == ECMAScript || option == RE2
|
||||
}
|
||||
|
||||
// Scans cimsx-cimsx option string, stops at the first unrecognized char.
|
||||
@ -1861,6 +1933,11 @@ func (p *parser) useOptionE() bool {
|
||||
return (p.options & ECMAScript) != 0
|
||||
}
|
||||
|
||||
// true to use RE2 compatibility parsing behavior.
|
||||
func (p *parser) useRE2() bool {
|
||||
return (p.options & RE2) != 0
|
||||
}
|
||||
|
||||
// True if options stack is empty.
|
||||
func (p *parser) emptyOptionsStack() bool {
|
||||
return len(p.optionsStack) == 0
|
||||
|
9
vendor/github.com/mattn/go-colorable/.travis.yml
generated
vendored
9
vendor/github.com/mattn/go-colorable/.travis.yml
generated
vendored
@ -1,9 +0,0 @@
|
||||
language: go
|
||||
go:
|
||||
- tip
|
||||
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
script:
|
||||
- $HOME/gopath/bin/goveralls -repotoken xnXqRGwgW3SXIguzxf90ZSK1GPYZPaGrw
|
21
vendor/github.com/mattn/go-colorable/LICENSE
generated
vendored
21
vendor/github.com/mattn/go-colorable/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Yasuhiro Matsumoto
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
48
vendor/github.com/mattn/go-colorable/README.md
generated
vendored
48
vendor/github.com/mattn/go-colorable/README.md
generated
vendored
@ -1,48 +0,0 @@
|
||||
# go-colorable
|
||||
|
||||
[](http://godoc.org/github.com/mattn/go-colorable)
|
||||
[](https://travis-ci.org/mattn/go-colorable)
|
||||
[](https://coveralls.io/github/mattn/go-colorable?branch=master)
|
||||
[](https://goreportcard.com/report/mattn/go-colorable)
|
||||
|
||||
Colorable writer for windows.
|
||||
|
||||
For example, most of logger packages doesn't show colors on windows. (I know we can do it with ansicon. But I don't want.)
|
||||
This package is possible to handle escape sequence for ansi color on windows.
|
||||
|
||||
## Too Bad!
|
||||
|
||||

|
||||
|
||||
|
||||
## So Good!
|
||||
|
||||

|
||||
|
||||
## Usage
|
||||
|
||||
```go
|
||||
logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true})
|
||||
logrus.SetOutput(colorable.NewColorableStdout())
|
||||
|
||||
logrus.Info("succeeded")
|
||||
logrus.Warn("not correct")
|
||||
logrus.Error("something error")
|
||||
logrus.Fatal("panic")
|
||||
```
|
||||
|
||||
You can compile above code on non-windows OSs.
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
$ go get github.com/mattn/go-colorable
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
||||
|
||||
# Author
|
||||
|
||||
Yasuhiro Matsumoto (a.k.a mattn)
|
29
vendor/github.com/mattn/go-colorable/colorable_appengine.go
generated
vendored
29
vendor/github.com/mattn/go-colorable/colorable_appengine.go
generated
vendored
@ -1,29 +0,0 @@
|
||||
// +build appengine
|
||||
|
||||
package colorable
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
|
||||
_ "github.com/mattn/go-isatty"
|
||||
)
|
||||
|
||||
// NewColorable return new instance of Writer which handle escape sequence.
|
||||
func NewColorable(file *os.File) io.Writer {
|
||||
if file == nil {
|
||||
panic("nil passed instead of *os.File to NewColorable()")
|
||||
}
|
||||
|
||||
return file
|
||||
}
|
||||
|
||||
// NewColorableStdout return new instance of Writer which handle escape sequence for stdout.
|
||||
func NewColorableStdout() io.Writer {
|
||||
return os.Stdout
|
||||
}
|
||||
|
||||
// NewColorableStderr return new instance of Writer which handle escape sequence for stderr.
|
||||
func NewColorableStderr() io.Writer {
|
||||
return os.Stderr
|
||||
}
|
30
vendor/github.com/mattn/go-colorable/colorable_others.go
generated
vendored
30
vendor/github.com/mattn/go-colorable/colorable_others.go
generated
vendored
@ -1,30 +0,0 @@
|
||||
// +build !windows
|
||||
// +build !appengine
|
||||
|
||||
package colorable
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
|
||||
_ "github.com/mattn/go-isatty"
|
||||
)
|
||||
|
||||
// NewColorable return new instance of Writer which handle escape sequence.
|
||||
func NewColorable(file *os.File) io.Writer {
|
||||
if file == nil {
|
||||
panic("nil passed instead of *os.File to NewColorable()")
|
||||
}
|
||||
|
||||
return file
|
||||
}
|
||||
|
||||
// NewColorableStdout return new instance of Writer which handle escape sequence for stdout.
|
||||
func NewColorableStdout() io.Writer {
|
||||
return os.Stdout
|
||||
}
|
||||
|
||||
// NewColorableStderr return new instance of Writer which handle escape sequence for stderr.
|
||||
func NewColorableStderr() io.Writer {
|
||||
return os.Stderr
|
||||
}
|
884
vendor/github.com/mattn/go-colorable/colorable_windows.go
generated
vendored
884
vendor/github.com/mattn/go-colorable/colorable_windows.go
generated
vendored
@ -1,884 +0,0 @@
|
||||
// +build windows
|
||||
// +build !appengine
|
||||
|
||||
package colorable
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"math"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
|
||||
"github.com/mattn/go-isatty"
|
||||
)
|
||||
|
||||
const (
|
||||
foregroundBlue = 0x1
|
||||
foregroundGreen = 0x2
|
||||
foregroundRed = 0x4
|
||||
foregroundIntensity = 0x8
|
||||
foregroundMask = (foregroundRed | foregroundBlue | foregroundGreen | foregroundIntensity)
|
||||
backgroundBlue = 0x10
|
||||
backgroundGreen = 0x20
|
||||
backgroundRed = 0x40
|
||||
backgroundIntensity = 0x80
|
||||
backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity)
|
||||
)
|
||||
|
||||
type wchar uint16
|
||||
type short int16
|
||||
type dword uint32
|
||||
type word uint16
|
||||
|
||||
type coord struct {
|
||||
x short
|
||||
y short
|
||||
}
|
||||
|
||||
type smallRect struct {
|
||||
left short
|
||||
top short
|
||||
right short
|
||||
bottom short
|
||||
}
|
||||
|
||||
type consoleScreenBufferInfo struct {
|
||||
size coord
|
||||
cursorPosition coord
|
||||
attributes word
|
||||
window smallRect
|
||||
maximumWindowSize coord
|
||||
}
|
||||
|
||||
type consoleCursorInfo struct {
|
||||
size dword
|
||||
visible int32
|
||||
}
|
||||
|
||||
var (
|
||||
kernel32 = syscall.NewLazyDLL("kernel32.dll")
|
||||
procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
|
||||
procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
|
||||
procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition")
|
||||
procFillConsoleOutputCharacter = kernel32.NewProc("FillConsoleOutputCharacterW")
|
||||
procFillConsoleOutputAttribute = kernel32.NewProc("FillConsoleOutputAttribute")
|
||||
procGetConsoleCursorInfo = kernel32.NewProc("GetConsoleCursorInfo")
|
||||
procSetConsoleCursorInfo = kernel32.NewProc("SetConsoleCursorInfo")
|
||||
procSetConsoleTitle = kernel32.NewProc("SetConsoleTitleW")
|
||||
)
|
||||
|
||||
// Writer provide colorable Writer to the console
|
||||
type Writer struct {
|
||||
out io.Writer
|
||||
handle syscall.Handle
|
||||
oldattr word
|
||||
oldpos coord
|
||||
}
|
||||
|
||||
// NewColorable return new instance of Writer which handle escape sequence from File.
|
||||
func NewColorable(file *os.File) io.Writer {
|
||||
if file == nil {
|
||||
panic("nil passed instead of *os.File to NewColorable()")
|
||||
}
|
||||
|
||||
if isatty.IsTerminal(file.Fd()) {
|
||||
var csbi consoleScreenBufferInfo
|
||||
handle := syscall.Handle(file.Fd())
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
return &Writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}}
|
||||
}
|
||||
return file
|
||||
}
|
||||
|
||||
// NewColorableStdout return new instance of Writer which handle escape sequence for stdout.
|
||||
func NewColorableStdout() io.Writer {
|
||||
return NewColorable(os.Stdout)
|
||||
}
|
||||
|
||||
// NewColorableStderr return new instance of Writer which handle escape sequence for stderr.
|
||||
func NewColorableStderr() io.Writer {
|
||||
return NewColorable(os.Stderr)
|
||||
}
|
||||
|
||||
var color256 = map[int]int{
|
||||
0: 0x000000,
|
||||
1: 0x800000,
|
||||
2: 0x008000,
|
||||
3: 0x808000,
|
||||
4: 0x000080,
|
||||
5: 0x800080,
|
||||
6: 0x008080,
|
||||
7: 0xc0c0c0,
|
||||
8: 0x808080,
|
||||
9: 0xff0000,
|
||||
10: 0x00ff00,
|
||||
11: 0xffff00,
|
||||
12: 0x0000ff,
|
||||
13: 0xff00ff,
|
||||
14: 0x00ffff,
|
||||
15: 0xffffff,
|
||||
16: 0x000000,
|
||||
17: 0x00005f,
|
||||
18: 0x000087,
|
||||
19: 0x0000af,
|
||||
20: 0x0000d7,
|
||||
21: 0x0000ff,
|
||||
22: 0x005f00,
|
||||
23: 0x005f5f,
|
||||
24: 0x005f87,
|
||||
25: 0x005faf,
|
||||
26: 0x005fd7,
|
||||
27: 0x005fff,
|
||||
28: 0x008700,
|
||||
29: 0x00875f,
|
||||
30: 0x008787,
|
||||
31: 0x0087af,
|
||||
32: 0x0087d7,
|
||||
33: 0x0087ff,
|
||||
34: 0x00af00,
|
||||
35: 0x00af5f,
|
||||
36: 0x00af87,
|
||||
37: 0x00afaf,
|
||||
38: 0x00afd7,
|
||||
39: 0x00afff,
|
||||
40: 0x00d700,
|
||||
41: 0x00d75f,
|
||||
42: 0x00d787,
|
||||
43: 0x00d7af,
|
||||
44: 0x00d7d7,
|
||||
45: 0x00d7ff,
|
||||
46: 0x00ff00,
|
||||
47: 0x00ff5f,
|
||||
48: 0x00ff87,
|
||||
49: 0x00ffaf,
|
||||
50: 0x00ffd7,
|
||||
51: 0x00ffff,
|
||||
52: 0x5f0000,
|
||||
53: 0x5f005f,
|
||||
54: 0x5f0087,
|
||||
55: 0x5f00af,
|
||||
56: 0x5f00d7,
|
||||
57: 0x5f00ff,
|
||||
58: 0x5f5f00,
|
||||
59: 0x5f5f5f,
|
||||
60: 0x5f5f87,
|
||||
61: 0x5f5faf,
|
||||
62: 0x5f5fd7,
|
||||
63: 0x5f5fff,
|
||||
64: 0x5f8700,
|
||||
65: 0x5f875f,
|
||||
66: 0x5f8787,
|
||||
67: 0x5f87af,
|
||||
68: 0x5f87d7,
|
||||
69: 0x5f87ff,
|
||||
70: 0x5faf00,
|
||||
71: 0x5faf5f,
|
||||
72: 0x5faf87,
|
||||
73: 0x5fafaf,
|
||||
74: 0x5fafd7,
|
||||
75: 0x5fafff,
|
||||
76: 0x5fd700,
|
||||
77: 0x5fd75f,
|
||||
78: 0x5fd787,
|
||||
79: 0x5fd7af,
|
||||
80: 0x5fd7d7,
|
||||
81: 0x5fd7ff,
|
||||
82: 0x5fff00,
|
||||
83: 0x5fff5f,
|
||||
84: 0x5fff87,
|
||||
85: 0x5fffaf,
|
||||
86: 0x5fffd7,
|
||||
87: 0x5fffff,
|
||||
88: 0x870000,
|
||||
89: 0x87005f,
|
||||
90: 0x870087,
|
||||
91: 0x8700af,
|
||||
92: 0x8700d7,
|
||||
93: 0x8700ff,
|
||||
94: 0x875f00,
|
||||
95: 0x875f5f,
|
||||
96: 0x875f87,
|
||||
97: 0x875faf,
|
||||
98: 0x875fd7,
|
||||
99: 0x875fff,
|
||||
100: 0x878700,
|
||||
101: 0x87875f,
|
||||
102: 0x878787,
|
||||
103: 0x8787af,
|
||||
104: 0x8787d7,
|
||||
105: 0x8787ff,
|
||||
106: 0x87af00,
|
||||
107: 0x87af5f,
|
||||
108: 0x87af87,
|
||||
109: 0x87afaf,
|
||||
110: 0x87afd7,
|
||||
111: 0x87afff,
|
||||
112: 0x87d700,
|
||||
113: 0x87d75f,
|
||||
114: 0x87d787,
|
||||
115: 0x87d7af,
|
||||
116: 0x87d7d7,
|
||||
117: 0x87d7ff,
|
||||
118: 0x87ff00,
|
||||
119: 0x87ff5f,
|
||||
120: 0x87ff87,
|
||||
121: 0x87ffaf,
|
||||
122: 0x87ffd7,
|
||||
123: 0x87ffff,
|
||||
124: 0xaf0000,
|
||||
125: 0xaf005f,
|
||||
126: 0xaf0087,
|
||||
127: 0xaf00af,
|
||||
128: 0xaf00d7,
|
||||
129: 0xaf00ff,
|
||||
130: 0xaf5f00,
|
||||
131: 0xaf5f5f,
|
||||
132: 0xaf5f87,
|
||||
133: 0xaf5faf,
|
||||
134: 0xaf5fd7,
|
||||
135: 0xaf5fff,
|
||||
136: 0xaf8700,
|
||||
137: 0xaf875f,
|
||||
138: 0xaf8787,
|
||||
139: 0xaf87af,
|
||||
140: 0xaf87d7,
|
||||
141: 0xaf87ff,
|
||||
142: 0xafaf00,
|
||||
143: 0xafaf5f,
|
||||
144: 0xafaf87,
|
||||
145: 0xafafaf,
|
||||
146: 0xafafd7,
|
||||
147: 0xafafff,
|
||||
148: 0xafd700,
|
||||
149: 0xafd75f,
|
||||
150: 0xafd787,
|
||||
151: 0xafd7af,
|
||||
152: 0xafd7d7,
|
||||
153: 0xafd7ff,
|
||||
154: 0xafff00,
|
||||
155: 0xafff5f,
|
||||
156: 0xafff87,
|
||||
157: 0xafffaf,
|
||||
158: 0xafffd7,
|
||||
159: 0xafffff,
|
||||
160: 0xd70000,
|
||||
161: 0xd7005f,
|
||||
162: 0xd70087,
|
||||
163: 0xd700af,
|
||||
164: 0xd700d7,
|
||||
165: 0xd700ff,
|
||||
166: 0xd75f00,
|
||||
167: 0xd75f5f,
|
||||
168: 0xd75f87,
|
||||
169: 0xd75faf,
|
||||
170: 0xd75fd7,
|
||||
171: 0xd75fff,
|
||||
172: 0xd78700,
|
||||
173: 0xd7875f,
|
||||
174: 0xd78787,
|
||||
175: 0xd787af,
|
||||
176: 0xd787d7,
|
||||
177: 0xd787ff,
|
||||
178: 0xd7af00,
|
||||
179: 0xd7af5f,
|
||||
180: 0xd7af87,
|
||||
181: 0xd7afaf,
|
||||
182: 0xd7afd7,
|
||||
183: 0xd7afff,
|
||||
184: 0xd7d700,
|
||||
185: 0xd7d75f,
|
||||
186: 0xd7d787,
|
||||
187: 0xd7d7af,
|
||||
188: 0xd7d7d7,
|
||||
189: 0xd7d7ff,
|
||||
190: 0xd7ff00,
|
||||
191: 0xd7ff5f,
|
||||
192: 0xd7ff87,
|
||||
193: 0xd7ffaf,
|
||||
194: 0xd7ffd7,
|
||||
195: 0xd7ffff,
|
||||
196: 0xff0000,
|
||||
197: 0xff005f,
|
||||
198: 0xff0087,
|
||||
199: 0xff00af,
|
||||
200: 0xff00d7,
|
||||
201: 0xff00ff,
|
||||
202: 0xff5f00,
|
||||
203: 0xff5f5f,
|
||||
204: 0xff5f87,
|
||||
205: 0xff5faf,
|
||||
206: 0xff5fd7,
|
||||
207: 0xff5fff,
|
||||
208: 0xff8700,
|
||||
209: 0xff875f,
|
||||
210: 0xff8787,
|
||||
211: 0xff87af,
|
||||
212: 0xff87d7,
|
||||
213: 0xff87ff,
|
||||
214: 0xffaf00,
|
||||
215: 0xffaf5f,
|
||||
216: 0xffaf87,
|
||||
217: 0xffafaf,
|
||||
218: 0xffafd7,
|
||||
219: 0xffafff,
|
||||
220: 0xffd700,
|
||||
221: 0xffd75f,
|
||||
222: 0xffd787,
|
||||
223: 0xffd7af,
|
||||
224: 0xffd7d7,
|
||||
225: 0xffd7ff,
|
||||
226: 0xffff00,
|
||||
227: 0xffff5f,
|
||||
228: 0xffff87,
|
||||
229: 0xffffaf,
|
||||
230: 0xffffd7,
|
||||
231: 0xffffff,
|
||||
232: 0x080808,
|
||||
233: 0x121212,
|
||||
234: 0x1c1c1c,
|
||||
235: 0x262626,
|
||||
236: 0x303030,
|
||||
237: 0x3a3a3a,
|
||||
238: 0x444444,
|
||||
239: 0x4e4e4e,
|
||||
240: 0x585858,
|
||||
241: 0x626262,
|
||||
242: 0x6c6c6c,
|
||||
243: 0x767676,
|
||||
244: 0x808080,
|
||||
245: 0x8a8a8a,
|
||||
246: 0x949494,
|
||||
247: 0x9e9e9e,
|
||||
248: 0xa8a8a8,
|
||||
249: 0xb2b2b2,
|
||||
250: 0xbcbcbc,
|
||||
251: 0xc6c6c6,
|
||||
252: 0xd0d0d0,
|
||||
253: 0xdadada,
|
||||
254: 0xe4e4e4,
|
||||
255: 0xeeeeee,
|
||||
}
|
||||
|
||||
// `\033]0;TITLESTR\007`
|
||||
func doTitleSequence(er *bytes.Reader) error {
|
||||
var c byte
|
||||
var err error
|
||||
|
||||
c, err = er.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c != '0' && c != '2' {
|
||||
return nil
|
||||
}
|
||||
c, err = er.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c != ';' {
|
||||
return nil
|
||||
}
|
||||
title := make([]byte, 0, 80)
|
||||
for {
|
||||
c, err = er.ReadByte()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c == 0x07 || c == '\n' {
|
||||
break
|
||||
}
|
||||
title = append(title, c)
|
||||
}
|
||||
if len(title) > 0 {
|
||||
title8, err := syscall.UTF16PtrFromString(string(title))
|
||||
if err == nil {
|
||||
procSetConsoleTitle.Call(uintptr(unsafe.Pointer(title8)))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Write write data on console
|
||||
func (w *Writer) Write(data []byte) (n int, err error) {
|
||||
var csbi consoleScreenBufferInfo
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
|
||||
er := bytes.NewReader(data)
|
||||
var bw [1]byte
|
||||
loop:
|
||||
for {
|
||||
c1, err := er.ReadByte()
|
||||
if err != nil {
|
||||
break loop
|
||||
}
|
||||
if c1 != 0x1b {
|
||||
bw[0] = c1
|
||||
w.out.Write(bw[:])
|
||||
continue
|
||||
}
|
||||
c2, err := er.ReadByte()
|
||||
if err != nil {
|
||||
break loop
|
||||
}
|
||||
|
||||
if c2 == ']' {
|
||||
if err := doTitleSequence(er); err != nil {
|
||||
break loop
|
||||
}
|
||||
continue
|
||||
}
|
||||
if c2 != 0x5b {
|
||||
continue
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
var m byte
|
||||
for {
|
||||
c, err := er.ReadByte()
|
||||
if err != nil {
|
||||
break loop
|
||||
}
|
||||
if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' {
|
||||
m = c
|
||||
break
|
||||
}
|
||||
buf.Write([]byte(string(c)))
|
||||
}
|
||||
|
||||
switch m {
|
||||
case 'A':
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
csbi.cursorPosition.y -= short(n)
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
|
||||
case 'B':
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
csbi.cursorPosition.y += short(n)
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
|
||||
case 'C':
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
csbi.cursorPosition.x += short(n)
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
|
||||
case 'D':
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
csbi.cursorPosition.x -= short(n)
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
|
||||
case 'E':
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
csbi.cursorPosition.x = 0
|
||||
csbi.cursorPosition.y += short(n)
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
|
||||
case 'F':
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
csbi.cursorPosition.x = 0
|
||||
csbi.cursorPosition.y -= short(n)
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
|
||||
case 'G':
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
csbi.cursorPosition.x = short(n - 1)
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
|
||||
case 'H', 'f':
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
if buf.Len() > 0 {
|
||||
token := strings.Split(buf.String(), ";")
|
||||
switch len(token) {
|
||||
case 1:
|
||||
n1, err := strconv.Atoi(token[0])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
csbi.cursorPosition.y = short(n1 - 1)
|
||||
case 2:
|
||||
n1, err := strconv.Atoi(token[0])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
n2, err := strconv.Atoi(token[1])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
csbi.cursorPosition.x = short(n2 - 1)
|
||||
csbi.cursorPosition.y = short(n1 - 1)
|
||||
}
|
||||
} else {
|
||||
csbi.cursorPosition.y = 0
|
||||
}
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
|
||||
case 'J':
|
||||
n := 0
|
||||
if buf.Len() > 0 {
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
var count, written dword
|
||||
var cursor coord
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
switch n {
|
||||
case 0:
|
||||
cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
|
||||
count = dword(csbi.size.x - csbi.cursorPosition.x + (csbi.size.y-csbi.cursorPosition.y)*csbi.size.x)
|
||||
case 1:
|
||||
cursor = coord{x: csbi.window.left, y: csbi.window.top}
|
||||
count = dword(csbi.size.x - csbi.cursorPosition.x + (csbi.window.top-csbi.cursorPosition.y)*csbi.size.x)
|
||||
case 2:
|
||||
cursor = coord{x: csbi.window.left, y: csbi.window.top}
|
||||
count = dword(csbi.size.x - csbi.cursorPosition.x + (csbi.size.y-csbi.cursorPosition.y)*csbi.size.x)
|
||||
}
|
||||
procFillConsoleOutputCharacter.Call(uintptr(w.handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
|
||||
procFillConsoleOutputAttribute.Call(uintptr(w.handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
|
||||
case 'K':
|
||||
n := 0
|
||||
if buf.Len() > 0 {
|
||||
n, err = strconv.Atoi(buf.String())
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
var cursor coord
|
||||
var count, written dword
|
||||
switch n {
|
||||
case 0:
|
||||
cursor = coord{x: csbi.cursorPosition.x + 1, y: csbi.cursorPosition.y}
|
||||
count = dword(csbi.size.x - csbi.cursorPosition.x - 1)
|
||||
case 1:
|
||||
cursor = coord{x: csbi.window.left, y: csbi.window.top + csbi.cursorPosition.y}
|
||||
count = dword(csbi.size.x - csbi.cursorPosition.x)
|
||||
case 2:
|
||||
cursor = coord{x: csbi.window.left, y: csbi.window.top + csbi.cursorPosition.y}
|
||||
count = dword(csbi.size.x)
|
||||
}
|
||||
procFillConsoleOutputCharacter.Call(uintptr(w.handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
|
||||
procFillConsoleOutputAttribute.Call(uintptr(w.handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
|
||||
case 'm':
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
attr := csbi.attributes
|
||||
cs := buf.String()
|
||||
if cs == "" {
|
||||
procSetConsoleTextAttribute.Call(uintptr(w.handle), uintptr(w.oldattr))
|
||||
continue
|
||||
}
|
||||
token := strings.Split(cs, ";")
|
||||
for i := 0; i < len(token); i++ {
|
||||
ns := token[i]
|
||||
if n, err = strconv.Atoi(ns); err == nil {
|
||||
switch {
|
||||
case n == 0 || n == 100:
|
||||
attr = w.oldattr
|
||||
case 1 <= n && n <= 5:
|
||||
attr |= foregroundIntensity
|
||||
case n == 7:
|
||||
attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4)
|
||||
case n == 22 || n == 25:
|
||||
attr |= foregroundIntensity
|
||||
case n == 27:
|
||||
attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4)
|
||||
case 30 <= n && n <= 37:
|
||||
attr &= backgroundMask
|
||||
if (n-30)&1 != 0 {
|
||||
attr |= foregroundRed
|
||||
}
|
||||
if (n-30)&2 != 0 {
|
||||
attr |= foregroundGreen
|
||||
}
|
||||
if (n-30)&4 != 0 {
|
||||
attr |= foregroundBlue
|
||||
}
|
||||
case n == 38: // set foreground color.
|
||||
if i < len(token)-2 && (token[i+1] == "5" || token[i+1] == "05") {
|
||||
if n256, err := strconv.Atoi(token[i+2]); err == nil {
|
||||
if n256foreAttr == nil {
|
||||
n256setup()
|
||||
}
|
||||
attr &= backgroundMask
|
||||
attr |= n256foreAttr[n256]
|
||||
i += 2
|
||||
}
|
||||
} else {
|
||||
attr = attr & (w.oldattr & backgroundMask)
|
||||
}
|
||||
case n == 39: // reset foreground color.
|
||||
attr &= backgroundMask
|
||||
attr |= w.oldattr & foregroundMask
|
||||
case 40 <= n && n <= 47:
|
||||
attr &= foregroundMask
|
||||
if (n-40)&1 != 0 {
|
||||
attr |= backgroundRed
|
||||
}
|
||||
if (n-40)&2 != 0 {
|
||||
attr |= backgroundGreen
|
||||
}
|
||||
if (n-40)&4 != 0 {
|
||||
attr |= backgroundBlue
|
||||
}
|
||||
case n == 48: // set background color.
|
||||
if i < len(token)-2 && token[i+1] == "5" {
|
||||
if n256, err := strconv.Atoi(token[i+2]); err == nil {
|
||||
if n256backAttr == nil {
|
||||
n256setup()
|
||||
}
|
||||
attr &= foregroundMask
|
||||
attr |= n256backAttr[n256]
|
||||
i += 2
|
||||
}
|
||||
} else {
|
||||
attr = attr & (w.oldattr & foregroundMask)
|
||||
}
|
||||
case n == 49: // reset foreground color.
|
||||
attr &= foregroundMask
|
||||
attr |= w.oldattr & backgroundMask
|
||||
case 90 <= n && n <= 97:
|
||||
attr = (attr & backgroundMask)
|
||||
attr |= foregroundIntensity
|
||||
if (n-90)&1 != 0 {
|
||||
attr |= foregroundRed
|
||||
}
|
||||
if (n-90)&2 != 0 {
|
||||
attr |= foregroundGreen
|
||||
}
|
||||
if (n-90)&4 != 0 {
|
||||
attr |= foregroundBlue
|
||||
}
|
||||
case 100 <= n && n <= 107:
|
||||
attr = (attr & foregroundMask)
|
||||
attr |= backgroundIntensity
|
||||
if (n-100)&1 != 0 {
|
||||
attr |= backgroundRed
|
||||
}
|
||||
if (n-100)&2 != 0 {
|
||||
attr |= backgroundGreen
|
||||
}
|
||||
if (n-100)&4 != 0 {
|
||||
attr |= backgroundBlue
|
||||
}
|
||||
}
|
||||
procSetConsoleTextAttribute.Call(uintptr(w.handle), uintptr(attr))
|
||||
}
|
||||
}
|
||||
case 'h':
|
||||
var ci consoleCursorInfo
|
||||
cs := buf.String()
|
||||
if cs == "5>" {
|
||||
procGetConsoleCursorInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&ci)))
|
||||
ci.visible = 0
|
||||
procSetConsoleCursorInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&ci)))
|
||||
} else if cs == "?25" {
|
||||
procGetConsoleCursorInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&ci)))
|
||||
ci.visible = 1
|
||||
procSetConsoleCursorInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&ci)))
|
||||
}
|
||||
case 'l':
|
||||
var ci consoleCursorInfo
|
||||
cs := buf.String()
|
||||
if cs == "5>" {
|
||||
procGetConsoleCursorInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&ci)))
|
||||
ci.visible = 1
|
||||
procSetConsoleCursorInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&ci)))
|
||||
} else if cs == "?25" {
|
||||
procGetConsoleCursorInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&ci)))
|
||||
ci.visible = 0
|
||||
procSetConsoleCursorInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&ci)))
|
||||
}
|
||||
case 's':
|
||||
procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
|
||||
w.oldpos = csbi.cursorPosition
|
||||
case 'u':
|
||||
procSetConsoleCursorPosition.Call(uintptr(w.handle), *(*uintptr)(unsafe.Pointer(&w.oldpos)))
|
||||
}
|
||||
}
|
||||
|
||||
return len(data), nil
|
||||
}
|
||||
|
||||
type consoleColor struct {
|
||||
rgb int
|
||||
red bool
|
||||
green bool
|
||||
blue bool
|
||||
intensity bool
|
||||
}
|
||||
|
||||
func (c consoleColor) foregroundAttr() (attr word) {
|
||||
if c.red {
|
||||
attr |= foregroundRed
|
||||
}
|
||||
if c.green {
|
||||
attr |= foregroundGreen
|
||||
}
|
||||
if c.blue {
|
||||
attr |= foregroundBlue
|
||||
}
|
||||
if c.intensity {
|
||||
attr |= foregroundIntensity
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (c consoleColor) backgroundAttr() (attr word) {
|
||||
if c.red {
|
||||
attr |= backgroundRed
|
||||
}
|
||||
if c.green {
|
||||
attr |= backgroundGreen
|
||||
}
|
||||
if c.blue {
|
||||
attr |= backgroundBlue
|
||||
}
|
||||
if c.intensity {
|
||||
attr |= backgroundIntensity
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var color16 = []consoleColor{
|
||||
{0x000000, false, false, false, false},
|
||||
{0x000080, false, false, true, false},
|
||||
{0x008000, false, true, false, false},
|
||||
{0x008080, false, true, true, false},
|
||||
{0x800000, true, false, false, false},
|
||||
{0x800080, true, false, true, false},
|
||||
{0x808000, true, true, false, false},
|
||||
{0xc0c0c0, true, true, true, false},
|
||||
{0x808080, false, false, false, true},
|
||||
{0x0000ff, false, false, true, true},
|
||||
{0x00ff00, false, true, false, true},
|
||||
{0x00ffff, false, true, true, true},
|
||||
{0xff0000, true, false, false, true},
|
||||
{0xff00ff, true, false, true, true},
|
||||
{0xffff00, true, true, false, true},
|
||||
{0xffffff, true, true, true, true},
|
||||
}
|
||||
|
||||
type hsv struct {
|
||||
h, s, v float32
|
||||
}
|
||||
|
||||
func (a hsv) dist(b hsv) float32 {
|
||||
dh := a.h - b.h
|
||||
switch {
|
||||
case dh > 0.5:
|
||||
dh = 1 - dh
|
||||
case dh < -0.5:
|
||||
dh = -1 - dh
|
||||
}
|
||||
ds := a.s - b.s
|
||||
dv := a.v - b.v
|
||||
return float32(math.Sqrt(float64(dh*dh + ds*ds + dv*dv)))
|
||||
}
|
||||
|
||||
func toHSV(rgb int) hsv {
|
||||
r, g, b := float32((rgb&0xFF0000)>>16)/256.0,
|
||||
float32((rgb&0x00FF00)>>8)/256.0,
|
||||
float32(rgb&0x0000FF)/256.0
|
||||
min, max := minmax3f(r, g, b)
|
||||
h := max - min
|
||||
if h > 0 {
|
||||
if max == r {
|
||||
h = (g - b) / h
|
||||
if h < 0 {
|
||||
h += 6
|
||||
}
|
||||
} else if max == g {
|
||||
h = 2 + (b-r)/h
|
||||
} else {
|
||||
h = 4 + (r-g)/h
|
||||
}
|
||||
}
|
||||
h /= 6.0
|
||||
s := max - min
|
||||
if max != 0 {
|
||||
s /= max
|
||||
}
|
||||
v := max
|
||||
return hsv{h: h, s: s, v: v}
|
||||
}
|
||||
|
||||
type hsvTable []hsv
|
||||
|
||||
func toHSVTable(rgbTable []consoleColor) hsvTable {
|
||||
t := make(hsvTable, len(rgbTable))
|
||||
for i, c := range rgbTable {
|
||||
t[i] = toHSV(c.rgb)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (t hsvTable) find(rgb int) consoleColor {
|
||||
hsv := toHSV(rgb)
|
||||
n := 7
|
||||
l := float32(5.0)
|
||||
for i, p := range t {
|
||||
d := hsv.dist(p)
|
||||
if d < l {
|
||||
l, n = d, i
|
||||
}
|
||||
}
|
||||
return color16[n]
|
||||
}
|
||||
|
||||
func minmax3f(a, b, c float32) (min, max float32) {
|
||||
if a < b {
|
||||
if b < c {
|
||||
return a, c
|
||||
} else if a < c {
|
||||
return a, b
|
||||
} else {
|
||||
return c, b
|
||||
}
|
||||
} else {
|
||||
if a < c {
|
||||
return b, c
|
||||
} else if b < c {
|
||||
return b, a
|
||||
} else {
|
||||
return c, a
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var n256foreAttr []word
|
||||
var n256backAttr []word
|
||||
|
||||
func n256setup() {
|
||||
n256foreAttr = make([]word, 256)
|
||||
n256backAttr = make([]word, 256)
|
||||
t := toHSVTable(color16)
|
||||
for i, rgb := range color256 {
|
||||
c := t.find(rgb)
|
||||
n256foreAttr[i] = c.foregroundAttr()
|
||||
n256backAttr[i] = c.backgroundAttr()
|
||||
}
|
||||
}
|
55
vendor/github.com/mattn/go-colorable/noncolorable.go
generated
vendored
55
vendor/github.com/mattn/go-colorable/noncolorable.go
generated
vendored
@ -1,55 +0,0 @@
|
||||
package colorable
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
// NonColorable hold writer but remove escape sequence.
|
||||
type NonColorable struct {
|
||||
out io.Writer
|
||||
}
|
||||
|
||||
// NewNonColorable return new instance of Writer which remove escape sequence from Writer.
|
||||
func NewNonColorable(w io.Writer) io.Writer {
|
||||
return &NonColorable{out: w}
|
||||
}
|
||||
|
||||
// Write write data on console
|
||||
func (w *NonColorable) Write(data []byte) (n int, err error) {
|
||||
er := bytes.NewReader(data)
|
||||
var bw [1]byte
|
||||
loop:
|
||||
for {
|
||||
c1, err := er.ReadByte()
|
||||
if err != nil {
|
||||
break loop
|
||||
}
|
||||
if c1 != 0x1b {
|
||||
bw[0] = c1
|
||||
w.out.Write(bw[:])
|
||||
continue
|
||||
}
|
||||
c2, err := er.ReadByte()
|
||||
if err != nil {
|
||||
break loop
|
||||
}
|
||||
if c2 != 0x5b {
|
||||
continue
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
for {
|
||||
c, err := er.ReadByte()
|
||||
if err != nil {
|
||||
break loop
|
||||
}
|
||||
if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' {
|
||||
break
|
||||
}
|
||||
buf.Write([]byte(string(c)))
|
||||
}
|
||||
}
|
||||
|
||||
return len(data), nil
|
||||
}
|
15
vendor/github.com/mattn/go-isatty/.travis.yml
generated
vendored
15
vendor/github.com/mattn/go-isatty/.travis.yml
generated
vendored
@ -1,13 +1,14 @@
|
||||
language: go
|
||||
sudo: false
|
||||
go:
|
||||
- 1.13.x
|
||||
- tip
|
||||
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- $HOME/gopath/bin/goveralls -repotoken 3gHdORO5k5ziZcWMBxnd9LrMZaJs8m9x5
|
||||
- ./go.test.sh
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
|
2
vendor/github.com/mattn/go-isatty/README.md
generated
vendored
2
vendor/github.com/mattn/go-isatty/README.md
generated
vendored
@ -1,7 +1,7 @@
|
||||
# go-isatty
|
||||
|
||||
[](http://godoc.org/github.com/mattn/go-isatty)
|
||||
[](https://travis-ci.org/mattn/go-isatty)
|
||||
[](https://codecov.io/gh/mattn/go-isatty)
|
||||
[](https://coveralls.io/github/mattn/go-isatty?branch=master)
|
||||
[](https://goreportcard.com/report/mattn/go-isatty)
|
||||
|
||||
|
2
vendor/github.com/mattn/go-isatty/go.mod
generated
vendored
2
vendor/github.com/mattn/go-isatty/go.mod
generated
vendored
@ -2,4 +2,4 @@ module github.com/mattn/go-isatty
|
||||
|
||||
go 1.12
|
||||
|
||||
require golang.org/x/sys v0.0.0-20191026070338-33540a1f6037
|
||||
require golang.org/x/sys v0.0.0-20200116001909-b77594299b42
|
||||
|
4
vendor/github.com/mattn/go-isatty/go.sum
generated
vendored
4
vendor/github.com/mattn/go-isatty/go.sum
generated
vendored
@ -1,2 +1,2 @@
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
|
12
vendor/github.com/mattn/go-isatty/go.test.sh
generated
vendored
Normal file
12
vendor/github.com/mattn/go-isatty/go.test.sh
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
echo "" > coverage.txt
|
||||
|
||||
for d in $(go list ./... | grep -v vendor); do
|
||||
go test -race -coverprofile=profile.out -covermode=atomic "$d"
|
||||
if [ -f profile.out ]; then
|
||||
cat profile.out >> coverage.txt
|
||||
rm profile.out
|
||||
fi
|
||||
done
|
23
vendor/github.com/mattn/go-isatty/isatty_android.go
generated
vendored
23
vendor/github.com/mattn/go-isatty/isatty_android.go
generated
vendored
@ -1,23 +0,0 @@
|
||||
// +build android
|
||||
|
||||
package isatty
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const ioctlReadTermios = syscall.TCGETS
|
||||
|
||||
// IsTerminal return true if the file descriptor is terminal.
|
||||
func IsTerminal(fd uintptr) bool {
|
||||
var termios syscall.Termios
|
||||
_, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
|
||||
return err == 0
|
||||
}
|
||||
|
||||
// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
|
||||
// terminal. This is also always false on this environment.
|
||||
func IsCygwinTerminal(fd uintptr) bool {
|
||||
return false
|
||||
}
|
12
vendor/github.com/mattn/go-isatty/isatty_bsd.go
generated
vendored
12
vendor/github.com/mattn/go-isatty/isatty_bsd.go
generated
vendored
@ -3,18 +3,12 @@
|
||||
|
||||
package isatty
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const ioctlReadTermios = syscall.TIOCGETA
|
||||
import "golang.org/x/sys/unix"
|
||||
|
||||
// IsTerminal return true if the file descriptor is terminal.
|
||||
func IsTerminal(fd uintptr) bool {
|
||||
var termios syscall.Termios
|
||||
_, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0)
|
||||
return err == 0
|
||||
_, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
|
||||
|
1
vendor/github.com/mattn/go-isatty/isatty_tcgets.go
generated
vendored
1
vendor/github.com/mattn/go-isatty/isatty_tcgets.go
generated
vendored
@ -1,6 +1,5 @@
|
||||
// +build linux aix
|
||||
// +build !appengine
|
||||
// +build !android
|
||||
|
||||
package isatty
|
||||
|
||||
|
8
vendor/github.com/mattn/go-isatty/renovate.json
generated
vendored
Normal file
8
vendor/github.com/mattn/go-isatty/renovate.json
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": [
|
||||
"config:base"
|
||||
],
|
||||
"postUpdateOptions": [
|
||||
"gomodTidy"
|
||||
]
|
||||
}
|
1
vendor/github.com/mgutz/ansi/.gitignore
generated
vendored
1
vendor/github.com/mgutz/ansi/.gitignore
generated
vendored
@ -1 +0,0 @@
|
||||
*.test
|
9
vendor/github.com/mgutz/ansi/LICENSE
generated
vendored
9
vendor/github.com/mgutz/ansi/LICENSE
generated
vendored
@ -1,9 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2013 Mario L. Gutierrez
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user