mirror of
https://github.com/cheat/cheat.git
synced 2025-09-01 09:38:29 +02:00
Compare commits
82 Commits
Author | SHA1 | Date | |
---|---|---|---|
14f321b0e6 | |||
d3250fda79 | |||
c482488c41 | |||
fe8f39013e | |||
1016b20ef2 | |||
def8985dcd | |||
e6f12147df | |||
a8c2c396ed | |||
35262df4f2 | |||
12ffa4cb5c | |||
d9c602f9e1 | |||
b67ff8b6a8 | |||
a500a621a1 | |||
23b6928874 | |||
9de39fb12b | |||
ad501c4cbe | |||
f17de401e5 | |||
2c097adeda | |||
b825e0f535 | |||
8385277b28 | |||
768d55e5d4 | |||
6aedc5c116 | |||
e881bb1f97 | |||
501f9c66ad | |||
a2aa82d9f3 | |||
018bce7ad5 | |||
17acefdd9b | |||
37918e09a4 | |||
86967873a8 | |||
d237d98c15 | |||
eb9b3e7798 | |||
b0a351033d | |||
1eb44e8809 | |||
55b18b4897 | |||
883a17092f | |||
4f2a57fce8 | |||
ecc96c64f9 | |||
a81dd96ff4 | |||
fb538baba5 | |||
1a7b5c6127 | |||
cdddfbb516 | |||
4ef4c35d8c | |||
a58294859e | |||
606092e288 | |||
233a9de1aa | |||
aa16f68620 | |||
367673d5d9 | |||
08fb9e11a9 | |||
3f4d4bddb2 | |||
6c6753b35c | |||
0718b606e1 | |||
857119b443 | |||
f421483eea | |||
4adddbf504 | |||
b9c86b6975 | |||
0b21ccf6f8 | |||
a3ad8c5101 | |||
bacb74929a | |||
82e1c27494 | |||
45beeb2edb | |||
c2c479b36c | |||
cb0243e7fc | |||
e5d04d41ea | |||
2474ea4fb1 | |||
7467c9fbc0 | |||
dfba3da003 | |||
ad7ad64a75 | |||
c4dcfd5da0 | |||
278a5d9154 | |||
9fa0c466fd | |||
4e9b2928b3 | |||
fa5eb44be8 | |||
49afd7c16b | |||
59d5c96c24 | |||
8e602b0e93 | |||
fb04cb1fcd | |||
d42726101e | |||
93b3a711f5 | |||
9c3d41c8bd | |||
4eeec6c868 | |||
1b17ab1914 | |||
477650ee44 |
11
.github/dependabot.yml
vendored
Normal file
11
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: gomod
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
ignore:
|
||||
- dependency-name: github.com/alecthomas/chroma
|
||||
versions:
|
||||
- 0.9.1
|
57
.github/workflows/build.yml
vendored
Normal file
57
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
name: Go
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
# TODO: is it possible to DRY out these jobs? Aside from `runs-on`, they are
|
||||
# identical.
|
||||
build-linux:
|
||||
runs-on: [ ubuntu-latest ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: 1.16
|
||||
|
||||
- name: Set up Revive (linter)
|
||||
run: go get -u github.com/boyter/scc github.com/mgechev/revive
|
||||
env:
|
||||
GO111MODULE: off
|
||||
|
||||
|
||||
- name: Build
|
||||
run: make build
|
||||
|
||||
- name: Test
|
||||
run: make test
|
||||
|
||||
build-osx:
|
||||
runs-on: [ macos-latest ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: 1.16
|
||||
|
||||
- name: Set up Revive (linter)
|
||||
run: go get -u github.com/boyter/scc github.com/mgechev/revive
|
||||
env:
|
||||
GO111MODULE: off
|
||||
|
||||
- name: Build
|
||||
run: make build
|
||||
|
||||
- name: Test
|
||||
run: make test
|
||||
|
||||
# TODO: windows
|
36
.github/workflows/codeql-analysis.yml
vendored
Normal file
36
.github/workflows/codeql-analysis.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
name: CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
schedule:
|
||||
- cron: '45 23 * * 0'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'go' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
15
.travis.yml
15
.travis.yml
@ -1,15 +0,0 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.14.x
|
||||
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
|
||||
env:
|
||||
- GO111MODULE=on
|
||||
|
||||
install: true
|
||||
|
||||
script: make ci
|
@ -35,6 +35,9 @@ Are you unable to do the above, but still want to contribute? You can help
|
||||
`cheat` simply by telling others about it. Share it with friends and coworkers
|
||||
that might benefit from using it.
|
||||
|
||||
#### Pull Requests ####
|
||||
Please open all pull-requests against the `develop` branch.
|
||||
|
||||
|
||||
[cheat]: https://github.com/cheat/cheat
|
||||
[cheatsheets]: https://github.com/cheat/cheatsheets
|
||||
|
8
Dockerfile
Normal file
8
Dockerfile
Normal file
@ -0,0 +1,8 @@
|
||||
# NB: this image isn't used anywhere in the build pipeline. It exists to
|
||||
# conveniently facilitate ad-hoc experimentation in a sandboxed environment
|
||||
# during development.
|
||||
FROM golang:1.15-alpine
|
||||
|
||||
RUN apk add git less make
|
||||
|
||||
WORKDIR /app
|
43
Makefile
43
Makefile
@ -7,6 +7,7 @@ dist_dir := ./dist
|
||||
CAT := cat
|
||||
COLUMN := column
|
||||
CTAGS := ctags
|
||||
DOCKER := docker
|
||||
GO := go
|
||||
GREP := grep
|
||||
GZIP := gzip --best
|
||||
@ -20,6 +21,8 @@ SED := sed
|
||||
SORT := sort
|
||||
ZIP := zip -m
|
||||
|
||||
docker_image := cheat-devel:latest
|
||||
|
||||
# build flags
|
||||
BUILD_FLAGS := -ldflags="-s -w" -mod vendor -trimpath
|
||||
GOBIN :=
|
||||
@ -28,30 +31,33 @@ TMPDIR := /tmp
|
||||
# release binaries
|
||||
releases := \
|
||||
$(dist_dir)/cheat-darwin-amd64 \
|
||||
$(dist_dir)/cheat-linux-386 \
|
||||
$(dist_dir)/cheat-linux-amd64 \
|
||||
$(dist_dir)/cheat-linux-arm5 \
|
||||
$(dist_dir)/cheat-linux-arm6 \
|
||||
$(dist_dir)/cheat-linux-arm7 \
|
||||
$(dist_dir)/cheat-linux-arm64 \
|
||||
$(dist_dir)/cheat-windows-amd64.exe
|
||||
|
||||
## build: build an executable for your architecture
|
||||
.PHONY: build
|
||||
build: $(dist_dir) clean vendor generate man
|
||||
build: $(dist_dir) clean fmt lint vet vendor generate man
|
||||
$(GO) build $(BUILD_FLAGS) -o $(dist_dir)/cheat $(cmd_dir)
|
||||
|
||||
## build-release: build release executables
|
||||
.PHONY: build-release
|
||||
build-release: $(releases)
|
||||
|
||||
## ci: build a "release" executable for the current architecture (used in ci)
|
||||
.PHONY: ci
|
||||
ci: | setup prepare build
|
||||
|
||||
# cheat-darwin-amd64
|
||||
$(dist_dir)/cheat-darwin-amd64: prepare
|
||||
GOARCH=amd64 GOOS=darwin \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-linux-386
|
||||
$(dist_dir)/cheat-linux-386: prepare
|
||||
GOARCH=386 GOOS=linux \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-linux-amd64
|
||||
$(dist_dir)/cheat-linux-amd64: prepare
|
||||
GOARCH=amd64 GOOS=linux \
|
||||
@ -72,10 +78,15 @@ $(dist_dir)/cheat-linux-arm7: prepare
|
||||
GOARCH=arm GOOS=linux GOARM=7 \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-linux-arm64
|
||||
$(dist_dir)/cheat-linux-arm64: prepare
|
||||
GOARCH=arm64 GOOS=linux \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
|
||||
|
||||
# cheat-windows-amd64
|
||||
$(dist_dir)/cheat-windows-amd64.exe: prepare
|
||||
GOARCH=amd64 GOOS=windows \
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(ZIP) $@.zip $@
|
||||
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(ZIP) $@.zip $@ -j
|
||||
|
||||
# ./dist
|
||||
$(dist_dir):
|
||||
@ -99,6 +110,7 @@ clean: $(dist_dir)
|
||||
.PHONY: distclean
|
||||
distclean:
|
||||
$(RM) -f tags
|
||||
@$(DOCKER) image rm -f $(docker_image)
|
||||
|
||||
## setup: install revive (linter) and scc (sloc tool)
|
||||
.PHONY: setup
|
||||
@ -126,6 +138,10 @@ man:
|
||||
vendor:
|
||||
$(GO) mod vendor && $(GO) mod tidy && $(GO) mod verify
|
||||
|
||||
## vendor-update: update vendored dependencies
|
||||
vendor-update:
|
||||
$(GO) get -t -u ./... && $(GO) mod vendor
|
||||
|
||||
## fmt: run go fmt
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
@ -159,6 +175,21 @@ check: | vendor fmt lint vet test
|
||||
.PHONY: prepare
|
||||
prepare: | $(dist_dir) clean generate vendor fmt lint vet test
|
||||
|
||||
## docker-setup: create a docker image for use during development
|
||||
.PHONY: docker-setup
|
||||
docker-setup:
|
||||
$(DOCKER) build -t $(docker_image) -f Dockerfile .
|
||||
|
||||
## docker-run: shell into the development docker container
|
||||
.PHONY: docker-run
|
||||
docker-run:
|
||||
$(DOCKER) run -v `pwd`:/app -ti $(docker_image) sh
|
||||
|
||||
## docker-sh: shell into the docker development container
|
||||
.PHONY: docker-sh
|
||||
docker-sh:
|
||||
$(DOCKER) run -v $(shell pwd):/app -ti $(docker_image) /bin/ash
|
||||
|
||||
## help: display this help text
|
||||
.PHONY: help
|
||||
help:
|
||||
|
35
README.md
35
README.md
@ -1,8 +1,9 @@
|
||||

|
||||
|
||||
|
||||
cheat
|
||||
=====
|
||||
|
||||
[](https://travis-ci.com/cheat/cheat)
|
||||
|
||||
`cheat` allows you to create and view interactive cheatsheets on the
|
||||
command-line. It was designed to help remind \*nix system administrators of
|
||||
options for commands that they use frequently, but not frequently enough to
|
||||
@ -47,17 +48,17 @@ Installing
|
||||
`cheat` has no dependencies. To install it, download the executable from the
|
||||
[releases][] page and place it on your `PATH`.
|
||||
|
||||
Alternatively, if you have [go][] installed, you may install `cheat` using `go
|
||||
get`:
|
||||
|
||||
```sh
|
||||
go get -u github.com/cheat/cheat/cmd/cheat
|
||||
```
|
||||
|
||||
Configuring
|
||||
-----------
|
||||
### conf.yml ###
|
||||
`cheat` is configured by a YAML file that will be auto-generated on first run.
|
||||
Should you need to create a config file manually, you can do
|
||||
so via:
|
||||
|
||||
```sh
|
||||
mkdir -p ~/.config/cheat && cheat --init > ~/.config/cheat/conf.yml
|
||||
```
|
||||
|
||||
By default, the config file is assumed to exist on an XDG-compliant
|
||||
configuration path like `~/.config/cheat/conf.yml`. If you would like to store
|
||||
@ -94,11 +95,26 @@ The `cheat` executable includes no cheatsheets, but [community-sourced
|
||||
cheatsheets are available][cheatsheets]. You will be asked if you would like to
|
||||
install the community-sourced cheatsheets the first time you run `cheat`.
|
||||
|
||||
### Script ###
|
||||
You can manage the cheatsheets via a script `cheatsheets`.
|
||||
|
||||
#### Download and install ####
|
||||
```sh
|
||||
mkdir -p ~/.local/bin
|
||||
wget -O ~/.local/bin/cheatsheets https://raw.githubusercontent.com/cheat/cheat/master/scripts/git/cheatsheets
|
||||
chmod +x ~/.local/bin/cheatsheets
|
||||
```
|
||||
|
||||
#### Pull changes ####
|
||||
To pull the community and personal cheatsheets call `cheatsheets pull`
|
||||
|
||||
#### Push changes ####
|
||||
To push your personal cheatsheets call `cheatsheets push`
|
||||
|
||||
Cheatpaths
|
||||
----------
|
||||
Cheatsheets are stored on "cheatpaths", which are directories that contain
|
||||
cheetsheets. Cheatpaths are specified in the `conf.yml` file.
|
||||
cheatsheets. Cheatpaths are specified in the `conf.yml` file.
|
||||
|
||||
It can be useful to configure `cheat` against multiple cheatpaths. A common
|
||||
pattern is to store cheatsheets from multiple repositories on individual
|
||||
@ -211,3 +227,4 @@ Additionally, `cheat` supports enhanced autocompletion via integration with
|
||||
[cheatsheets]: https://github.com/cheat/cheatsheets
|
||||
[completions]: https://github.com/cheat/cheat/tree/master/scripts
|
||||
[fzf]: https://github.com/junegunn/fzf
|
||||
[go]: https://golang.org
|
||||
|
@ -1,3 +1,4 @@
|
||||
//go:build ignore
|
||||
// +build ignore
|
||||
|
||||
// This script embeds `docopt.txt and `conf.yml` into the binary during at
|
||||
@ -5,13 +6,11 @@
|
||||
|
||||
package main
|
||||
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
@ -52,10 +51,10 @@ func main() {
|
||||
for _, file := range files {
|
||||
|
||||
// delete the outfile
|
||||
os.Remove(path.Join(root, file.Out))
|
||||
os.Remove(filepath.Join(root, file.Out))
|
||||
|
||||
// read the static template
|
||||
bytes, err := ioutil.ReadFile(path.Join(root, file.In))
|
||||
bytes, err := ioutil.ReadFile(filepath.Join(root, file.In))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@ -64,7 +63,7 @@ func main() {
|
||||
data := template(file.Method, string(bytes))
|
||||
|
||||
// write the file to the specified outpath
|
||||
spath := path.Join(root, file.Out)
|
||||
spath := filepath.Join(root, file.Out)
|
||||
err = ioutil.WriteFile(spath, []byte(data), 0644)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
|
@ -1,18 +1,20 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
)
|
||||
|
||||
// cmdDirectories lists the configured cheatpaths.
|
||||
func cmdDirectories(opts map[string]interface{}, conf config.Config) {
|
||||
|
||||
// initialize a tabwriter to produce cleanly columnized output
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0)
|
||||
var out bytes.Buffer
|
||||
w := tabwriter.NewWriter(&out, 0, 0, 1, ' ', 0)
|
||||
|
||||
// generate sorted, columnized output
|
||||
for _, path := range conf.Cheatpaths {
|
||||
@ -25,4 +27,5 @@ func cmdDirectories(opts map[string]interface{}, conf config.Config) {
|
||||
|
||||
// write columnized output to stdout
|
||||
w.Flush()
|
||||
display.Write(out.String(), conf)
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/cheat/cheat/internal/cheatpath"
|
||||
@ -58,10 +58,10 @@ func cmdEdit(opts map[string]interface{}, conf config.Config) {
|
||||
}
|
||||
|
||||
// compute the new edit path
|
||||
editpath = path.Join(writepath.Path, sheet.Title)
|
||||
editpath = filepath.Join(writepath.Path, sheet.Title)
|
||||
|
||||
// create any necessary subdirectories
|
||||
dirs := path.Dir(editpath)
|
||||
dirs := filepath.Dir(editpath)
|
||||
if dirs != "." {
|
||||
if err := os.MkdirAll(dirs, 0755); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to create directory: %s, %v\n", dirs, err)
|
||||
@ -87,10 +87,10 @@ func cmdEdit(opts map[string]interface{}, conf config.Config) {
|
||||
}
|
||||
|
||||
// compute the new edit path
|
||||
editpath = path.Join(writepath.Path, cheatsheet)
|
||||
editpath = filepath.Join(writepath.Path, cheatsheet)
|
||||
|
||||
// create any necessary subdirectories
|
||||
dirs := path.Dir(editpath)
|
||||
dirs := filepath.Dir(editpath)
|
||||
if dirs != "." {
|
||||
if err := os.MkdirAll(dirs, 0755); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to create directory: %s, %v\n", dirs, err)
|
||||
|
@ -3,7 +3,7 @@ package main
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
@ -42,11 +42,11 @@ func cmdInit() {
|
||||
// determine the appropriate paths for config data and (optional) community
|
||||
// cheatsheets based on the user's platform
|
||||
confpath := confpaths[0]
|
||||
confdir := path.Dir(confpath)
|
||||
confdir := filepath.Dir(confpath)
|
||||
|
||||
// create paths for community and personal cheatsheets
|
||||
community := path.Join(confdir, "/cheatsheets/community")
|
||||
personal := path.Join(confdir, "/cheatsheets/personal")
|
||||
community := filepath.Join(confdir, "cheatsheets", "community")
|
||||
personal := filepath.Join(confdir, "cheatsheets", "personal")
|
||||
|
||||
// template the above paths into the default configs
|
||||
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
|
||||
|
@ -1,6 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
@ -9,6 +10,7 @@ import (
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
"github.com/cheat/cheat/internal/sheet"
|
||||
"github.com/cheat/cheat/internal/sheets"
|
||||
)
|
||||
@ -23,7 +25,7 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// filter cheatcheats by tag if --tag was provided
|
||||
// filter cheatsheets by tag if --tag was provided
|
||||
if opts["--tag"] != nil {
|
||||
cheatsheets = sheets.Filter(
|
||||
cheatsheets,
|
||||
@ -35,8 +37,8 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
|
||||
// sheets with local sheets), here we simply want to create a slice
|
||||
// containing all sheets.
|
||||
flattened := []sheet.Sheet{}
|
||||
for _, pathSheets := range cheatsheets {
|
||||
for _, s := range pathSheets {
|
||||
for _, pathsheets := range cheatsheets {
|
||||
for _, s := range pathsheets {
|
||||
flattened = append(flattened, s)
|
||||
}
|
||||
}
|
||||
@ -79,16 +81,19 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
|
||||
flattened = filtered
|
||||
}
|
||||
|
||||
// exit early if no cheatsheets are available
|
||||
// return exit code 2 if no cheatsheets are available
|
||||
if len(flattened) == 0 {
|
||||
os.Exit(0)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// initialize a tabwriter to produce cleanly columnized output
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0)
|
||||
var out bytes.Buffer
|
||||
w := tabwriter.NewWriter(&out, 0, 0, 1, ' ', 0)
|
||||
|
||||
// write a header row
|
||||
fmt.Fprintln(w, "title:\tfile:\ttags:")
|
||||
|
||||
// generate sorted, columnized output
|
||||
fmt.Fprintln(w, "title:\tfile:\ttags:")
|
||||
for _, sheet := range flattened {
|
||||
fmt.Fprintln(w, fmt.Sprintf(
|
||||
"%s\t%s\t%s",
|
||||
@ -100,4 +105,5 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
|
||||
|
||||
// write columnized output to stdout
|
||||
w.Flush()
|
||||
display.Write(out.String(), conf)
|
||||
}
|
||||
|
@ -37,8 +37,8 @@ func cmdRemove(opts map[string]interface{}, conf config.Config) {
|
||||
// fail early if the requested cheatsheet does not exist
|
||||
sheet, ok := consolidated[cheatsheet]
|
||||
if !ok {
|
||||
fmt.Fprintln(os.Stderr, fmt.Sprintf("no cheatsheet found for '%s'.\n", cheatsheet))
|
||||
os.Exit(1)
|
||||
fmt.Fprintln(os.Stderr, fmt.Sprintf("No cheatsheet found for '%s'.\n", cheatsheet))
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// fail early if the sheet is read-only
|
||||
|
@ -7,7 +7,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/sheet"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
"github.com/cheat/cheat/internal/sheets"
|
||||
)
|
||||
|
||||
@ -31,68 +31,65 @@ func cmdSearch(opts map[string]interface{}, conf config.Config) {
|
||||
)
|
||||
}
|
||||
|
||||
// consolidate the cheatsheets found on all paths into a single map of
|
||||
// `title` => `sheet` (ie, allow more local cheatsheets to override less
|
||||
// local cheatsheets)
|
||||
consolidated := sheets.Consolidate(cheatsheets)
|
||||
// iterate over each cheatpath
|
||||
out := ""
|
||||
for _, pathcheats := range cheatsheets {
|
||||
|
||||
// if <cheatsheet> was provided, search that single sheet only
|
||||
if opts["<cheatsheet>"] != nil {
|
||||
// sort the cheatsheets alphabetically, and search for matches
|
||||
for _, sheet := range sheets.Sort(pathcheats) {
|
||||
|
||||
cheatsheet := opts["<cheatsheet>"].(string)
|
||||
// if <cheatsheet> was provided, constrain the search only to
|
||||
// matching cheatsheets
|
||||
if opts["<cheatsheet>"] != nil && sheet.Title != opts["<cheatsheet>"] {
|
||||
continue
|
||||
}
|
||||
|
||||
// assert that the cheatsheet exists
|
||||
s, ok := consolidated[cheatsheet]
|
||||
if !ok {
|
||||
fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet)
|
||||
os.Exit(0)
|
||||
}
|
||||
// assume that we want to perform a case-insensitive search for <phrase>
|
||||
pattern := "(?i)" + phrase
|
||||
|
||||
consolidated = map[string]sheet.Sheet{
|
||||
cheatsheet: s,
|
||||
// unless --regex is provided, in which case we pass the regex unaltered
|
||||
if opts["--regex"] == true {
|
||||
pattern = phrase
|
||||
}
|
||||
|
||||
// compile the regex
|
||||
reg, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to compile regexp: %s, %v", pattern, err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// `Search` will return text entries that match the search terms. We're
|
||||
// using it here to overwrite the prior cheatsheet Text, filtering it to
|
||||
// only what is relevant
|
||||
sheet.Text = sheet.Search(reg)
|
||||
|
||||
// if the sheet did not match the search, ignore it and move on
|
||||
if sheet.Text == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// if colorization was requested, apply it here
|
||||
if conf.Color(opts) {
|
||||
sheet.Colorize(conf)
|
||||
}
|
||||
|
||||
// display the cheatsheet title and path
|
||||
out += fmt.Sprintf("%s %s\n",
|
||||
display.Underline(sheet.Title),
|
||||
display.Faint(fmt.Sprintf("(%s)", sheet.CheatPath), conf),
|
||||
)
|
||||
|
||||
// indent each line of content
|
||||
out += display.Indent(sheet.Text) + "\n"
|
||||
}
|
||||
}
|
||||
|
||||
// sort the cheatsheets alphabetically, and search for matches
|
||||
for _, sheet := range sheets.Sort(consolidated) {
|
||||
// trim superfluous newlines
|
||||
out = strings.TrimSpace(out)
|
||||
|
||||
// assume that we want to perform a case-insensitive search for <phrase>
|
||||
pattern := "(?i)" + phrase
|
||||
|
||||
// unless --regex is provided, in which case we pass the regex unaltered
|
||||
if opts["--regex"] == true {
|
||||
pattern = phrase
|
||||
}
|
||||
|
||||
// compile the regex
|
||||
reg, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to compile regexp: %s, %v", pattern, err))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// `Search` will return text entries that match the search terms. We're
|
||||
// using it here to overwrite the prior cheatsheet Text, filtering it to
|
||||
// only what is relevant
|
||||
sheet.Text = sheet.Search(reg)
|
||||
|
||||
// if the sheet did not match the search, ignore it and move on
|
||||
if sheet.Text == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// if colorization was requested, apply it here
|
||||
if conf.Color(opts) {
|
||||
sheet.Colorize(conf)
|
||||
}
|
||||
|
||||
// output the cheatsheet title
|
||||
fmt.Printf("%s:\n", sheet.Title)
|
||||
|
||||
// indent each line of content with two spaces
|
||||
for _, line := range strings.Split(sheet.Text, "\n") {
|
||||
fmt.Printf(" %s\n", line)
|
||||
}
|
||||
fmt.Println("")
|
||||
}
|
||||
// display the output
|
||||
// NB: resist the temptation to call `display.Display` multiple times in
|
||||
// the loop above. That will not play nicely with the paginator.
|
||||
display.Write(out, conf)
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"os"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
"github.com/cheat/cheat/internal/sheets"
|
||||
)
|
||||
|
||||
@ -18,8 +19,12 @@ func cmdTags(opts map[string]interface{}, conf config.Config) {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// write sheet tags to stdout
|
||||
// assemble the output
|
||||
out := ""
|
||||
for _, tag := range sheets.Tags(cheatsheets) {
|
||||
fmt.Println(tag)
|
||||
out += fmt.Sprintln(tag)
|
||||
}
|
||||
|
||||
// display the output
|
||||
display.Write(out, conf)
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
"github.com/cheat/cheat/internal/display"
|
||||
"github.com/cheat/cheat/internal/sheets"
|
||||
)
|
||||
|
||||
@ -29,16 +30,46 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
|
||||
)
|
||||
}
|
||||
|
||||
// consolidate the cheatsheets found on all paths into a single map of
|
||||
// `title` => `sheet` (ie, allow more local cheatsheets to override less
|
||||
// local cheatsheets)
|
||||
// if --all was passed, display cheatsheets from all cheatpaths
|
||||
if opts["--all"].(bool) {
|
||||
// iterate over the cheatpaths
|
||||
out := ""
|
||||
for _, cheatpath := range cheatsheets {
|
||||
|
||||
// if the cheatpath contains the specified cheatsheet, display it
|
||||
if sheet, ok := cheatpath[cheatsheet]; ok {
|
||||
|
||||
// identify the matching cheatsheet
|
||||
out += fmt.Sprintf("%s %s\n",
|
||||
display.Underline(sheet.Title),
|
||||
display.Faint(fmt.Sprintf("(%s)", sheet.CheatPath), conf),
|
||||
)
|
||||
|
||||
// apply colorization if requested
|
||||
if conf.Color(opts) {
|
||||
sheet.Colorize(conf)
|
||||
}
|
||||
|
||||
// display the cheatsheet
|
||||
out += display.Indent(sheet.Text) + "\n"
|
||||
}
|
||||
}
|
||||
|
||||
// display and exit
|
||||
display.Write(strings.TrimSuffix(out, "\n"), conf)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// otherwise, consolidate the cheatsheets found on all paths into a single
|
||||
// map of `title` => `sheet` (ie, allow more local cheatsheets to override
|
||||
// less local cheatsheets)
|
||||
consolidated := sheets.Consolidate(cheatsheets)
|
||||
|
||||
// fail early if the requested cheatsheet does not exist
|
||||
sheet, ok := consolidated[cheatsheet]
|
||||
if !ok {
|
||||
fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet)
|
||||
os.Exit(0)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
// apply colorization if requested
|
||||
@ -47,5 +78,5 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
|
||||
}
|
||||
|
||||
// display the cheatsheet
|
||||
fmt.Print(sheet.Text)
|
||||
display.Write(sheet.Text, conf)
|
||||
}
|
||||
|
@ -3,11 +3,12 @@ Usage:
|
||||
|
||||
Options:
|
||||
--init Write a default config file to stdout
|
||||
-a --all Search among all cheatpaths
|
||||
-c --colorize Colorize output
|
||||
-d --directories List cheatsheet directories
|
||||
-e --edit=<cheatsheet> Edit <cheatsheet>
|
||||
-l --list List cheatsheets
|
||||
-p --path=<name> Return only sheets found on path <name>
|
||||
-p --path=<name> Return only sheets found on cheatpath <name>
|
||||
-r --regex Treat search <phrase> as a regex
|
||||
-s --search=<phrase> Search cheatsheets for <phrase>
|
||||
-t --tag=<tag> Return only sheets matching <tag>
|
||||
|
@ -5,7 +5,6 @@ package main
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
@ -17,12 +16,12 @@ import (
|
||||
"github.com/cheat/cheat/internal/installer"
|
||||
)
|
||||
|
||||
const version = "3.9.0"
|
||||
const version = "4.2.5"
|
||||
|
||||
func main() {
|
||||
|
||||
// initialize options
|
||||
opts, err := docopt.Parse(usage(), nil, true, version, false)
|
||||
opts, err := docopt.ParseArgs(usage(), nil, version)
|
||||
if err != nil {
|
||||
// panic here, because this should never happen
|
||||
panic(fmt.Errorf("docopt failed to parse: %v", err))
|
||||
@ -46,6 +45,9 @@ func main() {
|
||||
envvars := map[string]string{}
|
||||
for _, e := range os.Environ() {
|
||||
pair := strings.SplitN(e, "=", 2)
|
||||
if runtime.GOOS == "windows" {
|
||||
pair[0] = strings.ToUpper(pair[0])
|
||||
}
|
||||
envvars[pair[0]] = pair[1]
|
||||
}
|
||||
|
||||
@ -74,62 +76,16 @@ func main() {
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// read the config template
|
||||
configs := configs()
|
||||
|
||||
// determine the appropriate paths for config data and (optional) community
|
||||
// cheatsheets based on the user's platform
|
||||
// choose a confpath
|
||||
confpath = confpaths[0]
|
||||
confdir := path.Dir(confpath)
|
||||
|
||||
// create paths for community and personal cheatsheets
|
||||
community := path.Join(confdir, "/cheatsheets/community")
|
||||
personal := path.Join(confdir, "/cheatsheets/personal")
|
||||
|
||||
// template the above paths into the default configs
|
||||
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
|
||||
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
|
||||
|
||||
// prompt the user to download the community cheatsheets
|
||||
yes, err = installer.Prompt(
|
||||
"Would you like to download the community cheatsheets? [Y/n]",
|
||||
true,
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// clone the community cheatsheets if so instructed
|
||||
if yes {
|
||||
// clone the community cheatsheets
|
||||
if err := installer.Clone(community); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// also create a directory for personal cheatsheets
|
||||
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
|
||||
fmt.Fprintf(
|
||||
os.Stderr,
|
||||
"failed to create config: failed to create directory: %s: %v\n",
|
||||
personal,
|
||||
err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// the config file does not exist, so we'll try to create one
|
||||
if err = config.Init(confpath, configs); err != nil {
|
||||
fmt.Fprintf(
|
||||
os.Stderr,
|
||||
"failed to create config file: %s: %v\n",
|
||||
confpath,
|
||||
err,
|
||||
)
|
||||
// run the installer
|
||||
if err := installer.Run(configs(), confpath); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "failed to run installer: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// notify the user and exit
|
||||
fmt.Printf("Created config file: %s\n", confpath)
|
||||
fmt.Println("Please read this file for advanced configuration information.")
|
||||
os.Exit(0)
|
||||
@ -185,6 +141,9 @@ func main() {
|
||||
case opts["<cheatsheet>"] != nil:
|
||||
cmd = cmdView
|
||||
|
||||
case opts["--tag"] != nil && opts["--tag"].(string) != "":
|
||||
cmd = cmdList
|
||||
|
||||
default:
|
||||
fmt.Println(usage())
|
||||
os.Exit(0)
|
||||
|
@ -9,19 +9,23 @@ import (
|
||||
func configs() string {
|
||||
return strings.TrimSpace(`---
|
||||
# The editor to use with 'cheat -e <sheet>'. Defaults to $EDITOR or $VISUAL.
|
||||
editor: vim
|
||||
# editor: vim
|
||||
|
||||
# Should 'cheat' always colorize output?
|
||||
colorize: true
|
||||
colorize: false
|
||||
|
||||
# Which 'chroma' colorscheme should be applied to the output?
|
||||
# Options are available here:
|
||||
# https://github.com/alecthomas/chroma/tree/master/styles
|
||||
style: monokai
|
||||
# style: monokai
|
||||
|
||||
# Which 'chroma' "formatter" should be applied?
|
||||
# One of: "terminal", "terminal256", "terminal16m"
|
||||
formatter: terminal16m
|
||||
formatter: terminal
|
||||
|
||||
# Through which pager should output be piped? (Unset this key for no pager.)
|
||||
pager: more
|
||||
# pager: less -FRX # <- recommended where available
|
||||
|
||||
# The paths at which cheatsheets are available. Tags associated with a cheatpath
|
||||
# are automatically attached to all cheatsheets residing on that path.
|
||||
|
@ -12,11 +12,12 @@ func usage() string {
|
||||
|
||||
Options:
|
||||
--init Write a default config file to stdout
|
||||
-a --all Search among all cheatpaths
|
||||
-c --colorize Colorize output
|
||||
-d --directories List cheatsheet directories
|
||||
-e --edit=<cheatsheet> Edit <cheatsheet>
|
||||
-l --list List cheatsheets
|
||||
-p --path=<name> Return only sheets found on path <name>
|
||||
-p --path=<name> Return only sheets found on cheatpath <name>
|
||||
-r --regex Treat search <phrase> as a regex
|
||||
-s --search=<phrase> Search cheatsheets for <phrase>
|
||||
-t --tag=<tag> Return only sheets matching <tag>
|
||||
|
@ -1,18 +1,22 @@
|
||||
---
|
||||
# The editor to use with 'cheat -e <sheet>'. Defaults to $EDITOR or $VISUAL.
|
||||
editor: vim
|
||||
# editor: vim
|
||||
|
||||
# Should 'cheat' always colorize output?
|
||||
colorize: true
|
||||
colorize: false
|
||||
|
||||
# Which 'chroma' colorscheme should be applied to the output?
|
||||
# Options are available here:
|
||||
# https://github.com/alecthomas/chroma/tree/master/styles
|
||||
style: monokai
|
||||
# style: monokai
|
||||
|
||||
# Which 'chroma' "formatter" should be applied?
|
||||
# One of: "terminal", "terminal256", "terminal16m"
|
||||
formatter: terminal16m
|
||||
formatter: terminal
|
||||
|
||||
# Through which pager should output be piped? (Unset this key for no pager.)
|
||||
pager: more
|
||||
# pager: less -FRX # <- recommended where available
|
||||
|
||||
# The paths at which cheatsheets are available. Tags associated with a cheatpath
|
||||
# are automatically attached to all cheatsheets residing on that path.
|
||||
|
55
doc/cheat.1
55
doc/cheat.1
@ -1,4 +1,4 @@
|
||||
.\" Automatically generated by Pandoc 1.17.2
|
||||
.\" Automatically generated by Pandoc 2.2.1
|
||||
.\"
|
||||
.TH "CHEAT" "1" "" "" "General Commands Manual"
|
||||
.hy
|
||||
@ -17,62 +17,62 @@ commands that they use frequently, but not frequently enough to
|
||||
remember.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-\-init
|
||||
.B \[en]init
|
||||
Print a config file to stdout.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-c, \-\-colorize
|
||||
.B \-c, \[en]colorize
|
||||
Colorize output.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-d, \-\-directories
|
||||
.B \-d, \[en]directories
|
||||
List cheatsheet directories.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-e, \-\-edit=\f[I]CHEATSHEET\f[]
|
||||
.B \-e, \[en]edit=\f[I]CHEATSHEET\f[]
|
||||
Open \f[I]CHEATSHEET\f[] for editing.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-l, \-\-list
|
||||
.B \-l, \[en]list
|
||||
List available cheatsheets.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-p, \-\-path=\f[I]PATH\f[]
|
||||
.B \-p, \[en]path=\f[I]PATH\f[]
|
||||
Filter only to sheets found on path \f[I]PATH\f[].
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-r, \-\-regex
|
||||
.B \-r, \[en]regex
|
||||
Treat search \f[I]PHRASE\f[] as a regular expression.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-s, \-\-search=\f[I]PHRASE\f[]
|
||||
.B \-s, \[en]search=\f[I]PHRASE\f[]
|
||||
Search cheatsheets for \f[I]PHRASE\f[].
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-t, \-\-tag=\f[I]TAG\f[]
|
||||
.B \-t, \[en]tag=\f[I]TAG\f[]
|
||||
Filter only to sheets tagged with \f[I]TAG\f[].
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-T, \-\-tags
|
||||
.B \-T, \[en]tags
|
||||
List all tags in use.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-v, \-\-version
|
||||
.B \-v, \[en]version
|
||||
Print the version number.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-rm=\f[I]CHEATSHEET\f[]
|
||||
.B \[en]rm=\f[I]CHEATSHEET\f[]
|
||||
Remove (deletes) \f[I]CHEATSHEET\f[].
|
||||
.RS
|
||||
.RE
|
||||
@ -88,7 +88,7 @@ cheat \-e \f[I]foo\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To edit (or create) the foo/bar cheatsheet on the \[aq]work\[aq] cheatpath:
|
||||
.B To edit (or create) the foo/bar cheatsheet on the `work' cheatpath:
|
||||
cheat \-p \f[I]work\f[] \-e \f[I]foo/bar\f[]
|
||||
.RS
|
||||
.RE
|
||||
@ -103,7 +103,7 @@ cheat \-l
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To list all cheatsheets whose titles match \[aq]apt\[aq]:
|
||||
.B To list all cheatsheets whose titles match `apt':
|
||||
cheat \-l \f[I]apt\f[]
|
||||
.RS
|
||||
.RE
|
||||
@ -113,23 +113,23 @@ cheat \-T
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To list available cheatsheets that are tagged as \[aq]personal\[aq]:
|
||||
.B To list available cheatsheets that are tagged as `personal':
|
||||
cheat \-l \-t \f[I]personal\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To search for \[aq]ssh\[aq] among all cheatsheets, and colorize matches:
|
||||
.B To search for `ssh' among all cheatsheets, and colorize matches:
|
||||
cheat \-c \-s \f[I]ssh\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To search (by regex) for cheatsheets that contain an IP address:
|
||||
cheat \-c \-r \-s \f[I]\[aq](?:[0\-9]{1,3}.){3}[0\-9]{1,3}\[aq]\f[]
|
||||
cheat \-c \-r \-s \f[I]`(?:[0\-9]{1,3}.){3}[0\-9]{1,3}'\f[]
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B To remove (delete) the foo/bar cheatsheet:
|
||||
cheat \-\-rm \f[I]foo/bar\f[]
|
||||
cheat \[en]rm \f[I]foo/bar\f[]
|
||||
.RS
|
||||
.RE
|
||||
.SH FILES
|
||||
@ -159,15 +159,15 @@ depending upon your platform:
|
||||
\f[B]cheat\f[] will search in the order specified above.
|
||||
The first \f[I]conf.yaml\f[] encountered will be respected.
|
||||
.PP
|
||||
If \f[B]cheat\f[] cannot locate a config file, it will ask if you\[aq]d
|
||||
like to generate one automatically.
|
||||
If \f[B]cheat\f[] cannot locate a config file, it will ask if you'd like
|
||||
to generate one automatically.
|
||||
Alternatively, you may also generate a config file manually by running
|
||||
\f[B]cheat \-\-init\f[] and saving its output to the appropriate
|
||||
\f[B]cheat \[en]init\f[] and saving its output to the appropriate
|
||||
location for your platform.
|
||||
.SS Cheatpaths
|
||||
.PP
|
||||
\f[B]cheat\f[] reads its cheatsheets from "cheatpaths", which are the
|
||||
directories in which cheatsheets are stored.
|
||||
\f[B]cheat\f[] reads its cheatsheets from \[lq]cheatpaths\[rq], which
|
||||
are the directories in which cheatsheets are stored.
|
||||
Cheatpaths may be configured in \f[I]conf.yaml\f[], and viewed via
|
||||
\f[B]cheat \-d\f[].
|
||||
.PP
|
||||
@ -203,6 +203,13 @@ If set, autocompletion scripts will attempt to integrate with
|
||||
\f[B]fzf\f[].
|
||||
.RS
|
||||
.RE
|
||||
.SH RETURN VALUES
|
||||
.IP "0." 3
|
||||
Successful termination
|
||||
.IP "1." 3
|
||||
Application error
|
||||
.IP "2." 3
|
||||
Cheatsheet(s) not found
|
||||
.SH BUGS
|
||||
.PP
|
||||
See GitHub issues: <https://github.com/cheat/cheat/issues>
|
||||
|
@ -163,6 +163,15 @@ set, all other config paths will be ignored.
|
||||
|
||||
: If set, autocompletion scripts will attempt to integrate with **fzf**.
|
||||
|
||||
RETURN VALUES
|
||||
=============
|
||||
|
||||
0. Successful termination
|
||||
|
||||
1. Application error
|
||||
|
||||
2. Cheatsheet(s) not found
|
||||
|
||||
|
||||
BUGS
|
||||
====
|
||||
|
9
go.mod
9
go.mod
@ -3,16 +3,15 @@ module github.com/cheat/cheat
|
||||
go 1.14
|
||||
|
||||
require (
|
||||
github.com/alecthomas/chroma v0.7.1
|
||||
github.com/alecthomas/chroma v0.10.0
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.12
|
||||
github.com/mattn/go-isatty v0.0.14
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
|
||||
github.com/sergi/go-diff v1.1.0 // indirect
|
||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 // indirect
|
||||
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
||||
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0
|
||||
gopkg.in/yaml.v2 v2.2.8
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
|
61
go.sum
61
go.sum
@ -1,68 +1,37 @@
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||
github.com/alecthomas/chroma v0.7.1 h1:G1i02OhUbRi2nJxcNkwJaY/J1gHXj9tt72qN6ZouLFQ=
|
||||
github.com/alecthomas/chroma v0.7.1/go.mod h1:gHw09mkX1Qp80JlYbmN9L3+4R5o6DJJ3GRShh+AICNc=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||
github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek=
|
||||
github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg=
|
||||
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
||||
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 h1:YAFjXN64LMvktoUZH9zgY4lGc/msGN7HQfoSuKCgaDU=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So=
|
||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e h1:CsOuNlbOuf0mzxJIefr6Q4uAUetRUwZE4qt7VfzP+xo=
|
||||
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 h1:POO/ycCATvegFmVuPpQzZFJ+pGZeX22Ufu6fibxDVjU=
|
||||
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
22
internal/config/color_test.go
Normal file
22
internal/config/color_test.go
Normal file
@ -0,0 +1,22 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestColor asserts that colorization rules are properly respected
|
||||
func TestColor(t *testing.T) {
|
||||
|
||||
// mock a config
|
||||
conf := Config{}
|
||||
|
||||
opts := map[string]interface{}{"--colorize": false}
|
||||
if conf.Color(opts) {
|
||||
t.Errorf("failed to respect --colorize (false)")
|
||||
}
|
||||
|
||||
opts = map[string]interface{}{"--colorize": true}
|
||||
if !conf.Color(opts) {
|
||||
t.Errorf("failed to respect --colorize (true)")
|
||||
}
|
||||
}
|
@ -4,7 +4,10 @@ import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
cp "github.com/cheat/cheat/internal/cheatpath"
|
||||
|
||||
@ -19,6 +22,7 @@ type Config struct {
|
||||
Cheatpaths []cp.Cheatpath `yaml:"cheatpaths"`
|
||||
Style string `yaml:"style"`
|
||||
Formatter string `yaml:"formatter"`
|
||||
Pager string `yaml:"pager"`
|
||||
}
|
||||
|
||||
// New returns a new Config struct
|
||||
@ -96,8 +100,22 @@ func New(opts map[string]interface{}, confPath string, resolve bool) (Config, er
|
||||
conf.Editor = os.Getenv("VISUAL")
|
||||
} else if os.Getenv("EDITOR") != "" {
|
||||
conf.Editor = os.Getenv("EDITOR")
|
||||
} else if runtime.GOOS == "windows" {
|
||||
conf.Editor = "notepad"
|
||||
} else {
|
||||
return Config{}, fmt.Errorf("no editor set")
|
||||
// try to fall back to `nano`
|
||||
path, err := exec.LookPath("nano")
|
||||
if err != nil {
|
||||
return Config{}, fmt.Errorf("failed to locate nano: %s", err)
|
||||
}
|
||||
|
||||
// use `nano` if we found it
|
||||
if path != "" {
|
||||
conf.Editor = "nano"
|
||||
// otherwise, give up
|
||||
} else {
|
||||
return Config{}, fmt.Errorf("no editor set")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -108,7 +126,13 @@ func New(opts map[string]interface{}, confPath string, resolve bool) (Config, er
|
||||
|
||||
// if a chroma formatter was not provided, set a default
|
||||
if conf.Formatter == "" {
|
||||
conf.Formatter = "terminal16m"
|
||||
conf.Formatter = "terminal"
|
||||
}
|
||||
|
||||
// attempt to fall back to `PAGER` if a pager is not specified in configs
|
||||
conf.Pager = strings.TrimSpace(conf.Pager)
|
||||
if conf.Pager == "" && os.Getenv("PAGER") != "" {
|
||||
conf.Pager = os.Getenv("PAGER")
|
||||
}
|
||||
|
||||
return conf, nil
|
||||
|
@ -39,17 +39,17 @@ func TestConfigSuccessful(t *testing.T) {
|
||||
// assert that the cheatpaths are correct
|
||||
want := []cheatpath.Cheatpath{
|
||||
cheatpath.Cheatpath{
|
||||
Path: filepath.Join(home, ".dotfiles/cheat/community"),
|
||||
Path: filepath.Join(home, ".dotfiles", "cheat", "community"),
|
||||
ReadOnly: true,
|
||||
Tags: []string{"community"},
|
||||
},
|
||||
cheatpath.Cheatpath{
|
||||
Path: filepath.Join(home, ".dotfiles/cheat/work"),
|
||||
Path: filepath.Join(home, ".dotfiles", "cheat", "work"),
|
||||
ReadOnly: false,
|
||||
Tags: []string{"work"},
|
||||
},
|
||||
cheatpath.Cheatpath{
|
||||
Path: filepath.Join(home, ".dotfiles/cheat/personal"),
|
||||
Path: filepath.Join(home, ".dotfiles", "cheat", "personal"),
|
||||
ReadOnly: false,
|
||||
Tags: []string{"personal"},
|
||||
},
|
||||
@ -85,8 +85,8 @@ func TestEmptyEditor(t *testing.T) {
|
||||
|
||||
// initialize a config
|
||||
conf, err := New(map[string]interface{}{}, mock.Path("conf/empty.yml"), false)
|
||||
if err == nil {
|
||||
t.Errorf("failed to return an error on empty editor")
|
||||
if err != nil {
|
||||
t.Errorf("failed to initialize test: %v", err)
|
||||
}
|
||||
|
||||
// set editor, and assert that it is respected
|
||||
|
38
internal/config/init_test.go
Normal file
38
internal/config/init_test.go
Normal file
@ -0,0 +1,38 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestInit asserts that configs are properly initialized
|
||||
func TestInit(t *testing.T) {
|
||||
|
||||
// initialize a temporary config file
|
||||
confFile, err := ioutil.TempFile("", "cheat-test")
|
||||
if err != nil {
|
||||
t.Errorf("failed to create temp file: %v", err)
|
||||
}
|
||||
|
||||
// clean up the temp file
|
||||
defer os.Remove(confFile.Name())
|
||||
|
||||
// initialize the config file
|
||||
conf := "mock config data"
|
||||
if err = Init(confFile.Name(), conf); err != nil {
|
||||
t.Errorf("failed to init config file: %v", err)
|
||||
}
|
||||
|
||||
// read back the config file contents
|
||||
bytes, err := ioutil.ReadFile(confFile.Name())
|
||||
if err != nil {
|
||||
t.Errorf("failed to read config file: %v", err)
|
||||
}
|
||||
|
||||
// assert that the contents were written correctly
|
||||
got := string(bytes)
|
||||
if got != conf {
|
||||
t.Errorf("failed to write configs: want: %s, got: %s", conf, got)
|
||||
}
|
||||
}
|
53
internal/config/path_test.go
Normal file
53
internal/config/path_test.go
Normal file
@ -0,0 +1,53 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestPathConfigNotExists asserts that `Path` identifies non-existent config
|
||||
// files
|
||||
func TestPathConfigNotExists(t *testing.T) {
|
||||
|
||||
// package (invalid) cheatpaths
|
||||
paths := []string{"/cheat-test-conf-does-not-exist"}
|
||||
|
||||
// assert
|
||||
if _, err := Path(paths); err == nil {
|
||||
t.Errorf("failed to identify non-existent config file")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// TestPathConfigExists asserts that `Path` identifies existent config files
|
||||
func TestPathConfigExists(t *testing.T) {
|
||||
|
||||
// initialize a temporary config file
|
||||
confFile, err := ioutil.TempFile("", "cheat-test")
|
||||
if err != nil {
|
||||
t.Errorf("failed to create temp file: %v", err)
|
||||
}
|
||||
|
||||
// clean up the temp file
|
||||
defer os.Remove(confFile.Name())
|
||||
|
||||
// package cheatpaths
|
||||
paths := []string{
|
||||
"/cheat-test-conf-does-not-exist",
|
||||
confFile.Name(),
|
||||
}
|
||||
|
||||
// assert
|
||||
got, err := Path(paths)
|
||||
if err != nil {
|
||||
t.Errorf("failed to identify config file: %v", err)
|
||||
}
|
||||
if got != confFile.Name() {
|
||||
t.Errorf(
|
||||
"failed to return config path: want: %s, got: %s",
|
||||
confFile.Name(),
|
||||
got,
|
||||
)
|
||||
}
|
||||
}
|
@ -2,7 +2,7 @@ package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/mitchellh/go-homedir"
|
||||
)
|
||||
@ -28,25 +28,25 @@ func Paths(
|
||||
}
|
||||
|
||||
switch sys {
|
||||
case "darwin", "linux", "freebsd":
|
||||
case "android", "darwin", "linux", "freebsd":
|
||||
paths := []string{}
|
||||
|
||||
// don't include the `XDG_CONFIG_HOME` path if that envvar is not set
|
||||
if xdgpath, ok := envvars["XDG_CONFIG_HOME"]; ok {
|
||||
paths = append(paths, path.Join(xdgpath, "/cheat/conf.yml"))
|
||||
paths = append(paths, filepath.Join(xdgpath, "cheat", "conf.yml"))
|
||||
}
|
||||
|
||||
// if `XDG_CONFIG_HOME` is not set, search the user's home directory
|
||||
paths = append(paths, []string{
|
||||
path.Join(home, ".config/cheat/conf.yml"),
|
||||
path.Join(home, ".cheat/conf.yml"),
|
||||
filepath.Join(home, ".config", "cheat", "conf.yml"),
|
||||
filepath.Join(home, ".cheat", "conf.yml"),
|
||||
"/etc/cheat/conf.yml",
|
||||
}...)
|
||||
|
||||
return paths, nil
|
||||
case "windows":
|
||||
return []string{
|
||||
path.Join(envvars["APPDATA"], "/cheat/conf.yml"),
|
||||
path.Join(envvars["PROGRAMDATA"], "/cheat/conf.yml"),
|
||||
filepath.Join(envvars["APPDATA"], "cheat", "conf.yml"),
|
||||
filepath.Join(envvars["PROGRAMDATA"], "cheat", "conf.yml"),
|
||||
}, nil
|
||||
default:
|
||||
return []string{}, fmt.Errorf("unsupported os: %s", sys)
|
||||
|
@ -21,6 +21,7 @@ func TestValidatePathsNix(t *testing.T) {
|
||||
|
||||
// specify the platforms to test
|
||||
oses := []string{
|
||||
"android",
|
||||
"darwin",
|
||||
"freebsd",
|
||||
"linux",
|
||||
@ -39,6 +40,7 @@ func TestValidatePathsNix(t *testing.T) {
|
||||
"/home/bar/cheat/conf.yml",
|
||||
"/home/foo/.config/cheat/conf.yml",
|
||||
"/home/foo/.cheat/conf.yml",
|
||||
"/etc/cheat/conf.yml",
|
||||
}
|
||||
|
||||
// assert that output matches expectations
|
||||
@ -81,6 +83,7 @@ func TestValidatePathsNixNoXDG(t *testing.T) {
|
||||
want := []string{
|
||||
"/home/foo/.config/cheat/conf.yml",
|
||||
"/home/foo/.cheat/conf.yml",
|
||||
"/etc/cheat/conf.yml",
|
||||
}
|
||||
|
||||
// assert that output matches expectations
|
||||
|
18
internal/display/faint.go
Normal file
18
internal/display/faint.go
Normal file
@ -0,0 +1,18 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
)
|
||||
|
||||
// Faint returns an faint string
|
||||
func Faint(str string, conf config.Config) string {
|
||||
// make `str` faint only if colorization has been requested
|
||||
if conf.Colorize {
|
||||
return fmt.Sprintf(fmt.Sprintf("\033[2m%s\033[0m", str))
|
||||
}
|
||||
|
||||
// otherwise, return the string unmodified
|
||||
return str
|
||||
}
|
27
internal/display/faint_test.go
Normal file
27
internal/display/faint_test.go
Normal file
@ -0,0 +1,27 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
)
|
||||
|
||||
// TestFaint asserts that Faint applies faint formatting
|
||||
func TestFaint(t *testing.T) {
|
||||
|
||||
// case: apply colorization
|
||||
conf := config.Config{Colorize: true}
|
||||
want := "\033[2mfoo\033[0m"
|
||||
got := Faint("foo", conf)
|
||||
if want != got {
|
||||
t.Errorf("failed to faint: want: %s, got: %s", want, got)
|
||||
}
|
||||
|
||||
// case: do not apply colorization
|
||||
conf.Colorize = false
|
||||
want = "foo"
|
||||
got = Faint("foo", conf)
|
||||
if want != got {
|
||||
t.Errorf("failed to faint: want: %s, got: %s", want, got)
|
||||
}
|
||||
}
|
21
internal/display/indent.go
Normal file
21
internal/display/indent.go
Normal file
@ -0,0 +1,21 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Indent prepends each line of a string with a tab
|
||||
func Indent(str string) string {
|
||||
|
||||
// trim superfluous whitespace
|
||||
str = strings.TrimSpace(str)
|
||||
|
||||
// prepend each line with a tab character
|
||||
out := ""
|
||||
for _, line := range strings.Split(str, "\n") {
|
||||
out += fmt.Sprintf("\t%s\n", line)
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
12
internal/display/indent_test.go
Normal file
12
internal/display/indent_test.go
Normal file
@ -0,0 +1,12 @@
|
||||
package display
|
||||
|
||||
import "testing"
|
||||
|
||||
// TestIndent asserts that Indent prepends a tab to each line
|
||||
func TestIndent(t *testing.T) {
|
||||
got := Indent("foo\nbar\nbaz")
|
||||
want := "\tfoo\n\tbar\n\tbaz\n"
|
||||
if got != want {
|
||||
t.Errorf("failed to indent: want: %s, got: %s", want, got)
|
||||
}
|
||||
}
|
8
internal/display/underline.go
Normal file
8
internal/display/underline.go
Normal file
@ -0,0 +1,8 @@
|
||||
package display
|
||||
|
||||
import "fmt"
|
||||
|
||||
// Underline returns an underlined string
|
||||
func Underline(str string) string {
|
||||
return fmt.Sprintf(fmt.Sprintf("\033[4m%s\033[0m", str))
|
||||
}
|
14
internal/display/underline_test.go
Normal file
14
internal/display/underline_test.go
Normal file
@ -0,0 +1,14 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestUnderline asserts that Underline applies underline formatting
|
||||
func TestUnderline(t *testing.T) {
|
||||
want := "\033[4mfoo\033[0m"
|
||||
got := Underline("foo")
|
||||
if want != got {
|
||||
t.Errorf("failed to underline: want: %s, got: %s", want, got)
|
||||
}
|
||||
}
|
37
internal/display/write.go
Normal file
37
internal/display/write.go
Normal file
@ -0,0 +1,37 @@
|
||||
package display
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
)
|
||||
|
||||
// Write writes output either directly to stdout, or through a pager,
|
||||
// depending upon configuration.
|
||||
func Write(out string, conf config.Config) {
|
||||
// if no pager was configured, print the output to stdout and exit
|
||||
if conf.Pager == "" {
|
||||
fmt.Print(out)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// otherwise, pipe output through the pager
|
||||
parts := strings.Split(conf.Pager, " ")
|
||||
pager := parts[0]
|
||||
args := parts[1:]
|
||||
|
||||
// run the pager
|
||||
cmd := exec.Command(pager, args...)
|
||||
cmd.Stdin = strings.NewReader(out)
|
||||
cmd.Stdout = os.Stdout
|
||||
|
||||
// handle errors
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to write to pager: %v", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
@ -8,8 +8,8 @@ import (
|
||||
|
||||
const cloneURL = "https://github.com/cheat/cheatsheets.git"
|
||||
|
||||
// Clone clones the community cheatsheets
|
||||
func Clone(path string) error {
|
||||
// clone clones the community cheatsheets
|
||||
func clone(path string) error {
|
||||
|
||||
// perform the clone in a shell
|
||||
cmd := exec.Command("git", "clone", cloneURL, path)
|
||||
|
@ -23,7 +23,7 @@ func Prompt(prompt string, def bool) (bool, error) {
|
||||
}
|
||||
|
||||
// normalize the answer
|
||||
ans = strings.ToLower(strings.TrimRight(ans, "\n"))
|
||||
ans = strings.ToLower(strings.TrimSpace(ans))
|
||||
|
||||
// return the appropriate response
|
||||
switch ans {
|
||||
|
57
internal/installer/run.go
Normal file
57
internal/installer/run.go
Normal file
@ -0,0 +1,57 @@
|
||||
package installer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
)
|
||||
|
||||
// Run runs the installer
|
||||
func Run(configs string, confpath string) error {
|
||||
|
||||
// determine the appropriate paths for config data and (optional) community
|
||||
// cheatsheets based on the user's platform
|
||||
confdir := filepath.Dir(confpath)
|
||||
|
||||
// create paths for community and personal cheatsheets
|
||||
community := filepath.Join(confdir, "cheatsheets", "community")
|
||||
personal := filepath.Join(confdir, "cheatsheets", "personal")
|
||||
|
||||
// template the above paths into the default configs
|
||||
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
|
||||
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
|
||||
|
||||
// prompt the user to download the community cheatsheets
|
||||
yes, err := Prompt(
|
||||
"Would you like to download the community cheatsheets? [Y/n]",
|
||||
true,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prompt: %v", err)
|
||||
}
|
||||
|
||||
// clone the community cheatsheets if so instructed
|
||||
if yes {
|
||||
// clone the community cheatsheets
|
||||
fmt.Printf("Cloning community cheatsheets to %s.\n", community)
|
||||
if err := clone(community); err != nil {
|
||||
return fmt.Errorf("failed to clone cheatsheets: %v", err)
|
||||
}
|
||||
|
||||
// also create a directory for personal cheatsheets
|
||||
fmt.Printf("Cloning personal cheatsheets to %s.\n", personal)
|
||||
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
|
||||
return fmt.Errorf("failed to create directory: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// the config file does not exist, so we'll try to create one
|
||||
if err = config.Init(confpath, configs); err != nil {
|
||||
return fmt.Errorf("failed to create config file: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -13,7 +13,7 @@ func Path(filename string) string {
|
||||
// determine the path of this file during runtime
|
||||
_, thisfile, _, _ := runtime.Caller(0)
|
||||
|
||||
// compute the config path
|
||||
// compute the mock path
|
||||
file, err := filepath.Abs(
|
||||
path.Join(
|
||||
filepath.Dir(thisfile),
|
||||
@ -22,7 +22,7 @@ func Path(filename string) string {
|
||||
),
|
||||
)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("failed to resolve config path: %v", err))
|
||||
panic(fmt.Errorf("failed to resolve mock path: %v", err))
|
||||
}
|
||||
|
||||
return file
|
||||
|
34
internal/sheet/colorize_test.go
Normal file
34
internal/sheet/colorize_test.go
Normal file
@ -0,0 +1,34 @@
|
||||
package sheet
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/cheat/cheat/internal/config"
|
||||
)
|
||||
|
||||
// TestColorize asserts that syntax-highlighting is correctly applied
|
||||
func TestColorize(t *testing.T) {
|
||||
|
||||
// mock configs
|
||||
conf := config.Config{
|
||||
Formatter: "terminal16m",
|
||||
Style: "solarized-dark",
|
||||
}
|
||||
|
||||
// mock a sheet
|
||||
s := Sheet{
|
||||
Text: "echo 'foo'",
|
||||
}
|
||||
|
||||
// colorize the sheet text
|
||||
s.Colorize(conf)
|
||||
|
||||
// initialize expectations
|
||||
want := "[38;2;181;137;0mecho[0m[38;2;147;161;161m"
|
||||
want += " [0m[38;2;42;161;152m'foo'[0m"
|
||||
|
||||
// assert
|
||||
if s.Text != want {
|
||||
t.Errorf("failed to colorize sheet: want: %s, got: %s", want, s.Text)
|
||||
}
|
||||
}
|
@ -4,7 +4,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// Copy copies a cheatsheet to a new location
|
||||
@ -22,7 +22,7 @@ func (s *Sheet) Copy(dest string) error {
|
||||
defer infile.Close()
|
||||
|
||||
// create any necessary subdirectories
|
||||
dirs := path.Dir(dest)
|
||||
dirs := filepath.Dir(dest)
|
||||
if dirs != "." {
|
||||
if err := os.MkdirAll(dirs, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory: %s, %v", dirs, err)
|
||||
|
@ -25,7 +25,7 @@ func TestCopyFlat(t *testing.T) {
|
||||
}
|
||||
|
||||
// mock a cheatsheet struct
|
||||
sheet, err := New("foo", src.Name(), []string{}, false)
|
||||
sheet, err := New("foo", "community", src.Name(), []string{}, false)
|
||||
if err != nil {
|
||||
t.Errorf("failed to init cheatsheet: %v", err)
|
||||
}
|
||||
@ -72,7 +72,13 @@ func TestCopyDeep(t *testing.T) {
|
||||
}
|
||||
|
||||
// mock a cheatsheet struct
|
||||
sheet, err := New("/cheat-tests/alpha/bravo/foo", src.Name(), []string{}, false)
|
||||
sheet, err := New(
|
||||
"/cheat-tests/alpha/bravo/foo",
|
||||
"community",
|
||||
src.Name(),
|
||||
[]string{},
|
||||
false,
|
||||
)
|
||||
if err != nil {
|
||||
t.Errorf("failed to init cheatsheet: %v", err)
|
||||
}
|
||||
|
@ -10,17 +10,19 @@ import (
|
||||
|
||||
// Sheet encapsulates sheet information
|
||||
type Sheet struct {
|
||||
Title string
|
||||
Path string
|
||||
Text string
|
||||
Tags []string
|
||||
Syntax string
|
||||
ReadOnly bool
|
||||
Title string
|
||||
CheatPath string
|
||||
Path string
|
||||
Text string
|
||||
Tags []string
|
||||
Syntax string
|
||||
ReadOnly bool
|
||||
}
|
||||
|
||||
// New initializes a new Sheet
|
||||
func New(
|
||||
title string,
|
||||
cheatpath string,
|
||||
path string,
|
||||
tags []string,
|
||||
readOnly bool,
|
||||
@ -46,11 +48,12 @@ func New(
|
||||
|
||||
// initialize and return a sheet
|
||||
return Sheet{
|
||||
Title: title,
|
||||
Path: path,
|
||||
Text: text + "\n",
|
||||
Tags: tags,
|
||||
Syntax: fm.Syntax,
|
||||
ReadOnly: readOnly,
|
||||
Title: title,
|
||||
CheatPath: cheatpath,
|
||||
Path: path,
|
||||
Text: text + "\n",
|
||||
Tags: tags,
|
||||
Syntax: fm.Syntax,
|
||||
ReadOnly: readOnly,
|
||||
}, nil
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ func TestSheetSuccess(t *testing.T) {
|
||||
// initialize a sheet
|
||||
sheet, err := New(
|
||||
"foo",
|
||||
"community",
|
||||
mock.Path("sheet/foo"),
|
||||
[]string{"alpha", "bravo"},
|
||||
false,
|
||||
@ -61,6 +62,7 @@ func TestSheetFailure(t *testing.T) {
|
||||
// initialize a sheet
|
||||
_, err := New(
|
||||
"foo",
|
||||
"community",
|
||||
mock.Path("/does-not-exist"),
|
||||
[]string{"alpha", "bravo"},
|
||||
false,
|
||||
@ -69,3 +71,20 @@ func TestSheetFailure(t *testing.T) {
|
||||
t.Errorf("failed to return an error on unreadable sheet")
|
||||
}
|
||||
}
|
||||
|
||||
// TestSheetFrontMatterFailure asserts that an error is returned if the sheet's
|
||||
// frontmatter cannot be parsed.
|
||||
func TestSheetFrontMatterFailure(t *testing.T) {
|
||||
|
||||
// initialize a sheet
|
||||
_, err := New(
|
||||
"foo",
|
||||
"community",
|
||||
mock.Path("sheet/bad-fm"),
|
||||
[]string{"alpha", "bravo"},
|
||||
false,
|
||||
)
|
||||
if err == nil {
|
||||
t.Errorf("failed to return an error on malformed front-matter")
|
||||
}
|
||||
}
|
||||
|
@ -59,7 +59,13 @@ func Load(cheatpaths []cp.Cheatpath) ([]map[string]sheet.Sheet, error) {
|
||||
}
|
||||
|
||||
// parse the cheatsheet file into a `sheet` struct
|
||||
s, err := sheet.New(title, path, cheatpath.Tags, cheatpath.ReadOnly)
|
||||
s, err := sheet.New(
|
||||
title,
|
||||
cheatpath.Name,
|
||||
path,
|
||||
cheatpath.Tags,
|
||||
cheatpath.ReadOnly,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf(
|
||||
"failed to load sheet: %s, path: %s, err: %v",
|
||||
|
@ -1,3 +1,62 @@
|
||||
package sheets
|
||||
|
||||
// TODO
|
||||
import (
|
||||
"path"
|
||||
"testing"
|
||||
|
||||
"github.com/cheat/cheat/internal/cheatpath"
|
||||
"github.com/cheat/cheat/internal/mock"
|
||||
)
|
||||
|
||||
// TestLoad asserts that sheets on valid cheatpaths can be loaded successfully
|
||||
func TestLoad(t *testing.T) {
|
||||
|
||||
// mock cheatpaths
|
||||
cheatpaths := []cheatpath.Cheatpath{
|
||||
{
|
||||
Name: "community",
|
||||
Path: path.Join(mock.Path("cheatsheets"), "community"),
|
||||
ReadOnly: true,
|
||||
},
|
||||
{
|
||||
Name: "personal",
|
||||
Path: path.Join(mock.Path("cheatsheets"), "personal"),
|
||||
ReadOnly: false,
|
||||
},
|
||||
}
|
||||
|
||||
// load cheatsheets
|
||||
sheets, err := Load(cheatpaths)
|
||||
if err != nil {
|
||||
t.Errorf("failed to load cheatsheets: %v", err)
|
||||
}
|
||||
|
||||
// assert that the correct number of sheets loaded
|
||||
// (sheet load details are tested in `sheet_test.go`)
|
||||
want := 4
|
||||
if len(sheets) != want {
|
||||
t.Errorf(
|
||||
"failed to load correct number of cheatsheets: want: %d, got: %d",
|
||||
want,
|
||||
len(sheets),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// TestLoadBadPath asserts that an error is returned if a cheatpath is invalid
|
||||
func TestLoadBadPath(t *testing.T) {
|
||||
|
||||
// mock a bad cheatpath
|
||||
cheatpaths := []cheatpath.Cheatpath{
|
||||
{
|
||||
Name: "badpath",
|
||||
Path: "/cheat/test/path/does/not/exist",
|
||||
ReadOnly: true,
|
||||
},
|
||||
}
|
||||
|
||||
// attempt to load the cheatpath
|
||||
if _, err := Load(cheatpaths); err == nil {
|
||||
t.Errorf("failed to reject invalid cheatpath")
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ import (
|
||||
"github.com/cheat/cheat/internal/sheet"
|
||||
)
|
||||
|
||||
// TestTags asserts that cheetsheet tags are properly returned
|
||||
// TestTags asserts that cheatsheet tags are properly returned
|
||||
func TestTags(t *testing.T) {
|
||||
|
||||
// mock cheatsheets available on multiple cheatpaths
|
||||
|
0
mocks/cheatsheets/community/.hiddenfile
Normal file
0
mocks/cheatsheets/community/.hiddenfile
Normal file
4
mocks/cheatsheets/community/bar
Normal file
4
mocks/cheatsheets/community/bar
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
tags: [ community ]
|
||||
---
|
||||
This is the bar cheatsheet.
|
4
mocks/cheatsheets/community/foo
Normal file
4
mocks/cheatsheets/community/foo
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
tags: [ community ]
|
||||
---
|
||||
This is the foo cheatsheet.
|
4
mocks/cheatsheets/personal/bat
Normal file
4
mocks/cheatsheets/personal/bat
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
tags: [ personal ]
|
||||
---
|
||||
This is the bat cheatsheet.
|
4
mocks/cheatsheets/personal/baz
Normal file
4
mocks/cheatsheets/personal/baz
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
tags: [ personal ]
|
||||
---
|
||||
This is the baz cheatsheet.
|
4
mocks/sheet/bad-fm
Normal file
4
mocks/sheet/bad-fm
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
syntax: sh
|
||||
|
||||
This is malformed frontmatter.
|
@ -2,12 +2,18 @@
|
||||
|
||||
local cheats taglist pathlist
|
||||
|
||||
_cheat_complete_cheatsheets()
|
||||
_cheat_complete_personal_cheatsheets()
|
||||
{
|
||||
cheats=("${(f)$(cheat -l -t personal | tail -n +2 | cut -d' ' -f1)}")
|
||||
_describe -t cheats 'cheats' cheats
|
||||
}
|
||||
|
||||
_cheat_complete_full_cheatsheets()
|
||||
{
|
||||
cheats=("${(f)$(cheat -l | tail -n +2 | cut -d' ' -f1)}")
|
||||
_describe -t cheats 'cheats' cheats
|
||||
}
|
||||
|
||||
_cheat_complete_tags()
|
||||
{
|
||||
taglist=("${(f)$(cheat -T)}")
|
||||
@ -26,7 +32,7 @@ _cheat() {
|
||||
'(--init)--init[Write a default config file to stdout]: :->none' \
|
||||
'(-c --colorize)'{-c,--colorize}'[Colorize output]: :->none' \
|
||||
'(-d --directories)'{-d,--directories}'[List cheatsheet directories]: :->none' \
|
||||
'(-e --edit)'{-e,--edit}'[Edit <sheet>]: :->full' \
|
||||
'(-e --edit)'{-e,--edit}'[Edit <sheet>]: :->personal' \
|
||||
'(-l --list)'{-l,--list}'[List cheatsheets]: :->full' \
|
||||
'(-p --path)'{-p,--path}'[Return only sheets found on path <name>]: :->pathlist' \
|
||||
'(-r --regex)'{-r,--regex}'[Treat search <phrase> as a regex]: :->none' \
|
||||
@ -34,13 +40,16 @@ _cheat() {
|
||||
'(-t --tag)'{-t,--tag}'[Return only sheets matching <tag>]: :->taglist' \
|
||||
'(-T --tags)'{-T,--tags}'[List all tags in use]: :->none' \
|
||||
'(-v --version)'{-v,--version}'[Print the version number]: :->none' \
|
||||
'(--rm)--rm[Remove (delete) <sheet>]: :->full' \
|
||||
'(--rm)--rm[Remove (delete) <sheet>]: :->personal'
|
||||
|
||||
case $state in
|
||||
(none)
|
||||
;;
|
||||
(full)
|
||||
_cheat_complete_cheatsheets
|
||||
_cheat_complete_full_cheatsheets
|
||||
;;
|
||||
(personal)
|
||||
_cheat_complete_personal_cheatsheets
|
||||
;;
|
||||
(taglist)
|
||||
_cheat_complete_tags
|
||||
@ -49,9 +58,8 @@ _cheat() {
|
||||
_cheat_complete_paths
|
||||
;;
|
||||
(*)
|
||||
_cheat_complete_cheatsheets
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_cheat
|
||||
compdef _cheat cheat
|
||||
|
46
scripts/git/cheatsheets
Executable file
46
scripts/git/cheatsheets
Executable file
@ -0,0 +1,46 @@
|
||||
#!/bin/sh -e
|
||||
|
||||
pull() {
|
||||
for d in `cheat -d | awk '{print $2}'`;
|
||||
do
|
||||
echo "Update $d"
|
||||
cd "$d"
|
||||
[ -d ".git" ] && git pull || :
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Finished update"
|
||||
}
|
||||
|
||||
push() {
|
||||
for d in `cheat -d | grep -v "community" | awk '{print $2}'`;
|
||||
do
|
||||
cd "$d"
|
||||
if [ -d ".git" ]
|
||||
then
|
||||
echo "Push modifications $d"
|
||||
files=$(git ls-files -mo | tr '\n' ' ')
|
||||
git add -A && git commit -m "Edited files: $files" && git push || :
|
||||
else
|
||||
echo "$(pwd) is not a git managed folder"
|
||||
echo "First connect this to your personal git repository"
|
||||
fi
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Finished push operation"
|
||||
}
|
||||
|
||||
|
||||
if [ "$1" = "pull" ]; then
|
||||
pull
|
||||
elif [ "$1" = "push" ]; then
|
||||
push
|
||||
else
|
||||
echo "Usage:
|
||||
# pull changes
|
||||
cheatsheets pull
|
||||
|
||||
# push changes
|
||||
cheatsheets push"
|
||||
fi
|
21
vendor/github.com/alecthomas/chroma/.golangci.yml
generated
vendored
21
vendor/github.com/alecthomas/chroma/.golangci.yml
generated
vendored
@ -20,6 +20,22 @@ linters:
|
||||
- wsl
|
||||
- gomnd
|
||||
- gocognit
|
||||
- goerr113
|
||||
- nolintlint
|
||||
- testpackage
|
||||
- godot
|
||||
- nestif
|
||||
- paralleltest
|
||||
- nlreturn
|
||||
- cyclop
|
||||
- exhaustivestruct
|
||||
- gci
|
||||
- gofumpt
|
||||
- errorlint
|
||||
- exhaustive
|
||||
- ifshort
|
||||
- wrapcheck
|
||||
- stylecheck
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
@ -31,6 +47,11 @@ linters-settings:
|
||||
goconst:
|
||||
min-len: 8
|
||||
min-occurrences: 3
|
||||
forbidigo:
|
||||
forbid:
|
||||
- (Must)?NewLexer
|
||||
exclude_godoc_examples: false
|
||||
|
||||
|
||||
issues:
|
||||
max-per-linter: 0
|
||||
|
36
vendor/github.com/alecthomas/chroma/.goreleaser.yml
generated
vendored
36
vendor/github.com/alecthomas/chroma/.goreleaser.yml
generated
vendored
@ -3,28 +3,34 @@ release:
|
||||
github:
|
||||
owner: alecthomas
|
||||
name: chroma
|
||||
brew:
|
||||
install: bin.install "chroma"
|
||||
brews:
|
||||
-
|
||||
install: bin.install "chroma"
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
builds:
|
||||
- goos:
|
||||
- linux
|
||||
- darwin
|
||||
- windows
|
||||
- linux
|
||||
- darwin
|
||||
- windows
|
||||
goarch:
|
||||
- amd64
|
||||
- "386"
|
||||
- arm64
|
||||
- amd64
|
||||
- "386"
|
||||
goarm:
|
||||
- "6"
|
||||
main: ./cmd/chroma/main.go
|
||||
- "6"
|
||||
dir: ./cmd/chroma
|
||||
main: .
|
||||
ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
|
||||
binary: chroma
|
||||
archive:
|
||||
format: tar.gz
|
||||
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
|
||||
archives:
|
||||
-
|
||||
format: tar.gz
|
||||
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
|
||||
.Arm }}{{ end }}'
|
||||
files:
|
||||
- COPYING
|
||||
- README*
|
||||
files:
|
||||
- COPYING
|
||||
- README*
|
||||
snapshot:
|
||||
name_template: SNAPSHOT-{{ .Commit }}
|
||||
checksum:
|
||||
|
12
vendor/github.com/alecthomas/chroma/.travis.yml
generated
vendored
12
vendor/github.com/alecthomas/chroma/.travis.yml
generated
vendored
@ -1,12 +0,0 @@
|
||||
sudo: false
|
||||
language: go
|
||||
go:
|
||||
- "1.13.x"
|
||||
script:
|
||||
- go test -v ./...
|
||||
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.22.2
|
||||
- ./bin/golangci-lint run
|
||||
- git clean -fdx .
|
||||
after_success:
|
||||
curl -sL https://git.io/goreleaser | bash && goreleaser
|
||||
|
6
vendor/github.com/alecthomas/chroma/Makefile
generated
vendored
6
vendor/github.com/alecthomas/chroma/Makefile
generated
vendored
@ -1,5 +1,7 @@
|
||||
.PHONY: chromad upload all
|
||||
|
||||
VERSION ?= $(shell git describe --tags --dirty --always)
|
||||
|
||||
all: README.md tokentype_string.go
|
||||
|
||||
README.md: lexers/*/*.go
|
||||
@ -9,10 +11,8 @@ tokentype_string.go: types.go
|
||||
go generate
|
||||
|
||||
chromad:
|
||||
(cd ./cmd/chromad && go get github.com/GeertJohan/go.rice/rice@master && go install github.com/GeertJohan/go.rice/rice)
|
||||
rm -f chromad
|
||||
(export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -o ../../chromad .)
|
||||
rice append -i ./cmd/chromad --exec=./chromad
|
||||
(export CGOENABLED=0 GOOS=linux GOARCH=amd64; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .)
|
||||
|
||||
upload: chromad
|
||||
scp chromad root@swapoff.org: && \
|
||||
|
60
vendor/github.com/alecthomas/chroma/README.md
generated
vendored
60
vendor/github.com/alecthomas/chroma/README.md
generated
vendored
@ -1,4 +1,5 @@
|
||||
# Chroma — A general purpose syntax highlighter in pure Go [](https://godoc.org/github.com/alecthomas/chroma) [](https://travis-ci.org/alecthomas/chroma) [](https://gitter.im/alecthomas/Lobby)
|
||||
# Chroma — A general purpose syntax highlighter in pure Go
|
||||
[](https://godoc.org/github.com/alecthomas/chroma) [](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [](https://invite.slack.golangbridge.org/)
|
||||
|
||||
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
|
||||
|
||||
@ -36,29 +37,30 @@ translators for Pygments lexers and styles.
|
||||
Prefix | Language
|
||||
:----: | --------
|
||||
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
|
||||
B | Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, BNF, Brainfuck
|
||||
C | C, C#, C++, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
|
||||
D | D, Dart, Diff, Django/Jinja, Docker, DTD
|
||||
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, Brainfuck
|
||||
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
|
||||
D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan
|
||||
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
|
||||
F | Factor, Fish, Forth, Fortran, FSharp
|
||||
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
|
||||
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HTML, HTTP, Hy
|
||||
I | Idris, INI, Io
|
||||
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy
|
||||
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy
|
||||
I | Idris, Igor, INI, Io
|
||||
J | J, Java, JavaScript, JSON, Julia, Jungle
|
||||
K | Kotlin
|
||||
L | Lighttpd configuration file, LLVM, Lua
|
||||
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
|
||||
N | NASM, Newspeak, Nginx configuration file, Nim, Nix
|
||||
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
|
||||
P | PacmanConf, Perl, PHP, Pig, PkgConfig, PL/pgSQL, plaintext, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3
|
||||
O | Objective-C, OCaml, Octave, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode
|
||||
P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python 2, Python
|
||||
Q | QBasic
|
||||
R | R, Racket, Ragel, react, reg, reStructuredText, Rexx, Ruby, Rust
|
||||
S | Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, SML, Snobol, Solidity, SPARQL, SQL, SquidConf, Swift, SYSTEMD, systemverilog
|
||||
R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust
|
||||
S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Svelte, Swift, SYSTEMD, systemverilog
|
||||
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
|
||||
V | VB.net, verilog, VHDL, VimL, vue
|
||||
W | WDTE
|
||||
X | XML, Xorg
|
||||
Y | YAML
|
||||
Y | YAML, YANG
|
||||
Z | Zig
|
||||
|
||||
|
||||
_I will attempt to keep this section up to date, but an authoritative list can be
|
||||
@ -183,7 +185,7 @@ following constructor options:
|
||||
- `ClassPrefix(prefix)` - prefix each generated CSS class.
|
||||
- `TabWidth(width)` - Set the rendered tab width, in characters.
|
||||
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
|
||||
- `LinkableLineNumbers()` - Make the line numbers linkable.
|
||||
- `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves.
|
||||
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
|
||||
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
|
||||
|
||||
@ -209,13 +211,13 @@ using the included Python 3 script `pygments2chroma.py`. I use something like
|
||||
the following:
|
||||
|
||||
```sh
|
||||
python3 ~/Projects/chroma/_tools/pygments2chroma.py \
|
||||
python3 _tools/pygments2chroma.py \
|
||||
pygments.lexers.jvm.KotlinLexer \
|
||||
> ~/Projects/chroma/lexers/kotlin.go \
|
||||
&& gofmt -s -w ~/Projects/chroma/lexers/*.go
|
||||
> lexers/k/kotlin.go \
|
||||
&& gofmt -s -w lexers/k/kotlin.go
|
||||
```
|
||||
|
||||
See notes in [pygments-lexers.go](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
|
||||
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
|
||||
for a list of lexers, and notes on some of the issues importing them.
|
||||
|
||||
<a id="markdown-formatters" name="formatters"></a>
|
||||
@ -248,18 +250,34 @@ For a quick overview of the available styles and how they look, check out the [C
|
||||
<a id="markdown-command-line-interface" name="command-line-interface"></a>
|
||||
## Command-line interface
|
||||
|
||||
A command-line interface to Chroma is included. It can be installed with:
|
||||
A command-line interface to Chroma is included.
|
||||
|
||||
```sh
|
||||
go get -u github.com/alecthomas/chroma/cmd/chroma
|
||||
Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases).
|
||||
|
||||
The CLI can be used as a preprocessor to colorise output of `less(1)`,
|
||||
see documentation for the `LESSOPEN` environment variable.
|
||||
|
||||
The `--fail` flag can be used to suppress output and return with exit status
|
||||
1 to facilitate falling back to some other preprocessor in case chroma
|
||||
does not resolve a specific lexer to use for the given file. For example:
|
||||
|
||||
```shell
|
||||
export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"'
|
||||
```
|
||||
|
||||
Replace `cat` with your favourite fallback preprocessor.
|
||||
|
||||
When invoked as `.lessfilter`, the `--fail` flag is automatically turned
|
||||
on under the hood for easy integration with [lesspipe shipping with
|
||||
Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS);
|
||||
for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`.
|
||||
|
||||
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a>
|
||||
## What's missing compared to Pygments?
|
||||
|
||||
- Quite a few lexers, for various reasons (pull-requests welcome):
|
||||
- Pygments lexers for complex languages often include custom code to
|
||||
handle certain aspects, such as Perl6's ability to nest code inside
|
||||
handle certain aspects, such as Raku's ability to nest code inside
|
||||
regular expressions. These require time and effort to convert.
|
||||
- I mostly only converted languages I had heard of, to reduce the porting cost.
|
||||
- Some more esoteric features of Pygments are omitted for simplicity.
|
||||
|
105
vendor/github.com/alecthomas/chroma/formatters/html/html.go
generated
vendored
105
vendor/github.com/alecthomas/chroma/formatters/html/html.go
generated
vendored
@ -22,6 +22,9 @@ func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix =
|
||||
// WithClasses emits HTML using CSS classes, rather than inline styles.
|
||||
func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } }
|
||||
|
||||
// WithAllClasses disables an optimisation that omits redundant CSS classes.
|
||||
func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } }
|
||||
|
||||
// TabWidth sets the number of characters for a tab. Defaults to 8.
|
||||
func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } }
|
||||
|
||||
@ -43,6 +46,13 @@ func WithPreWrapper(wrapper PreWrapper) Option {
|
||||
}
|
||||
}
|
||||
|
||||
// WrapLongLines wraps long lines.
|
||||
func WrapLongLines(b bool) Option {
|
||||
return func(f *Formatter) {
|
||||
f.wrapLongLines = b
|
||||
}
|
||||
}
|
||||
|
||||
// WithLineNumbers formats output with line numbers.
|
||||
func WithLineNumbers(b bool) Option {
|
||||
return func(f *Formatter) {
|
||||
@ -128,10 +138,18 @@ var (
|
||||
}
|
||||
defaultPreWrapper = preWrapper{
|
||||
start: func(code bool, styleAttr string) string {
|
||||
return fmt.Sprintf("<pre%s>", styleAttr)
|
||||
if code {
|
||||
return fmt.Sprintf(`<pre tabindex="0"%s><code>`, styleAttr)
|
||||
}
|
||||
|
||||
return fmt.Sprintf(`<pre tabindex="0"%s>`, styleAttr)
|
||||
},
|
||||
end: func(code bool) string {
|
||||
return "</pre>"
|
||||
if code {
|
||||
return `</code></pre>`
|
||||
}
|
||||
|
||||
return `</pre>`
|
||||
},
|
||||
}
|
||||
)
|
||||
@ -141,8 +159,10 @@ type Formatter struct {
|
||||
standalone bool
|
||||
prefix string
|
||||
Classes bool // Exported field to detect when classes are being used
|
||||
allClasses bool
|
||||
preWrapper PreWrapper
|
||||
tabWidth int
|
||||
wrapLongLines bool
|
||||
lineNumbers bool
|
||||
lineNumbersInTable bool
|
||||
linkableLineNumbers bool
|
||||
@ -188,15 +208,15 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||
wrapInTable := f.lineNumbers && f.lineNumbersInTable
|
||||
|
||||
lines := chroma.SplitTokensIntoLines(tokens)
|
||||
lineDigits := len(fmt.Sprintf("%d", len(lines)))
|
||||
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
|
||||
highlightIndex := 0
|
||||
|
||||
if wrapInTable {
|
||||
// List line numbers in its own <td>
|
||||
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.Background))
|
||||
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper))
|
||||
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
|
||||
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
|
||||
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.Background)))
|
||||
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
|
||||
for index := range lines {
|
||||
line := f.baseLineNumber + index
|
||||
highlight, next := f.shouldHighlight(highlightIndex, line)
|
||||
@ -207,7 +227,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, "<span%s%s>%*d\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), lineDigits, line)
|
||||
fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
|
||||
|
||||
if highlight {
|
||||
fmt.Fprintf(w, "</span>")
|
||||
@ -218,7 +238,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.Background)))
|
||||
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
|
||||
|
||||
highlightIndex = 0
|
||||
for index, tokens := range lines {
|
||||
@ -228,14 +248,28 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||
if next {
|
||||
highlightIndex++
|
||||
}
|
||||
|
||||
// Start of Line
|
||||
fmt.Fprint(w, `<span`)
|
||||
if highlight {
|
||||
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
|
||||
// Line + LineHighlight
|
||||
if f.Classes {
|
||||
fmt.Fprintf(w, ` class="%s %s"`, f.class(chroma.Line), f.class(chroma.LineHighlight))
|
||||
} else {
|
||||
fmt.Fprintf(w, ` style="%s %s"`, css[chroma.Line], css[chroma.LineHighlight])
|
||||
}
|
||||
fmt.Fprint(w, `>`)
|
||||
} else {
|
||||
fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line))
|
||||
}
|
||||
|
||||
// Line number
|
||||
if f.lineNumbers && !wrapInTable {
|
||||
fmt.Fprintf(w, "<span%s%s>%*d</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), lineDigits, line)
|
||||
fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, `<span%s>`, f.styleAttr(css, chroma.CodeLine))
|
||||
|
||||
for _, token := range tokens {
|
||||
html := html.EscapeString(token.String())
|
||||
attr := f.styleAttr(css, token.Type)
|
||||
@ -244,9 +278,10 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||
}
|
||||
fmt.Fprint(w, html)
|
||||
}
|
||||
if highlight {
|
||||
fmt.Fprintf(w, "</span>")
|
||||
}
|
||||
|
||||
fmt.Fprint(w, `</span>`) // End of CodeLine
|
||||
|
||||
fmt.Fprint(w, `</span>`) // End of Line
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, f.preWrapper.End(true))
|
||||
@ -268,7 +303,19 @@ func (f *Formatter) lineIDAttribute(line int) string {
|
||||
if !f.linkableLineNumbers {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf(" id=\"%s%d\"", f.lineNumbersIDPrefix, line)
|
||||
return fmt.Sprintf(" id=\"%s\"", f.lineID(line))
|
||||
}
|
||||
|
||||
func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string {
|
||||
title := fmt.Sprintf("%*d", lineDigits, line)
|
||||
if !f.linkableLineNumbers {
|
||||
return title
|
||||
}
|
||||
return fmt.Sprintf("<a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#%s\">%s</a>", f.lineID(line), title)
|
||||
}
|
||||
|
||||
func (f *Formatter) lineID(line int) string {
|
||||
return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line)
|
||||
}
|
||||
|
||||
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {
|
||||
@ -335,7 +382,11 @@ func (f *Formatter) tabWidthStyle() string {
|
||||
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
|
||||
css := f.styleToCSS(style)
|
||||
// Special-case background as it is mapped to the outer ".chroma" class.
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
|
||||
return err
|
||||
}
|
||||
// Special-case PreWrapper as it is the ".chroma" class.
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.PreWrapper, f.prefix, css[chroma.PreWrapper]); err != nil {
|
||||
return err
|
||||
}
|
||||
// Special-case code column of table to expand width.
|
||||
@ -359,11 +410,16 @@ func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
|
||||
sort.Ints(tts)
|
||||
for _, ti := range tts {
|
||||
tt := chroma.TokenType(ti)
|
||||
if tt == chroma.Background {
|
||||
switch tt {
|
||||
case chroma.Background, chroma.PreWrapper:
|
||||
continue
|
||||
}
|
||||
class := f.class(tt)
|
||||
if class == "" {
|
||||
continue
|
||||
}
|
||||
styles := css[tt]
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, f.class(tt), styles); err != nil {
|
||||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -379,18 +435,27 @@ func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string
|
||||
if t != chroma.Background {
|
||||
entry = entry.Sub(bg)
|
||||
}
|
||||
if entry.IsZero() {
|
||||
if !f.allClasses && entry.IsZero() {
|
||||
continue
|
||||
}
|
||||
classes[t] = StyleEntryToCSS(entry)
|
||||
}
|
||||
classes[chroma.Background] += f.tabWidthStyle()
|
||||
lineNumbersStyle := "margin-right: 0.4em; padding: 0 0.4em 0 0.4em;"
|
||||
classes[chroma.PreWrapper] += classes[chroma.Background] + `;`
|
||||
// Make PreWrapper a grid to show highlight style with full width.
|
||||
if len(f.highlightRanges) > 0 {
|
||||
classes[chroma.PreWrapper] += `display: grid;`
|
||||
}
|
||||
// Make PreWrapper wrap long lines.
|
||||
if f.wrapLongLines {
|
||||
classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;`
|
||||
}
|
||||
lineNumbersStyle := `white-space: pre; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;`
|
||||
// All rules begin with default rules followed by user provided rules
|
||||
classes[chroma.Line] = `display: flex;` + classes[chroma.Line]
|
||||
classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers]
|
||||
classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable]
|
||||
classes[chroma.LineHighlight] = "display: block; width: 100%;" + classes[chroma.LineHighlight]
|
||||
classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0; width: auto; overflow: auto; display: block;" + classes[chroma.LineTable]
|
||||
classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable]
|
||||
classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD]
|
||||
return classes
|
||||
}
|
||||
|
4
vendor/github.com/alecthomas/chroma/formatters/svg/svg.go
generated
vendored
4
vendor/github.com/alecthomas/chroma/formatters/svg/svg.go
generated
vendored
@ -120,7 +120,7 @@ func maxLineWidth(lines [][]chroma.Token) int {
|
||||
for _, tokens := range lines {
|
||||
length := 0
|
||||
for _, token := range tokens {
|
||||
length += len(strings.Replace(token.String(), ` `, " ", -1))
|
||||
length += len(strings.ReplaceAll(token.String(), ` `, " "))
|
||||
}
|
||||
if length > maxWidth {
|
||||
maxWidth = length
|
||||
@ -136,7 +136,7 @@ func (f *Formatter) writeTokenBackgrounds(w io.Writer, lines [][]chroma.Token, s
|
||||
for index, tokens := range lines {
|
||||
lineLength := 0
|
||||
for _, token := range tokens {
|
||||
length := len(strings.Replace(token.String(), ` `, " ", -1))
|
||||
length := len(strings.ReplaceAll(token.String(), ` `, " "))
|
||||
tokenBackground := style.Get(token.Type).Background
|
||||
if tokenBackground.IsSet() && tokenBackground != style.Get(chroma.Background).Background {
|
||||
fmt.Fprintf(w, "<rect id=\"%s\" x=\"%dch\" y=\"%fem\" width=\"%dch\" height=\"1.2em\" fill=\"%s\" />\n", escapeString(token.String()), lineLength, 1.2*float64(index)+0.25, length, style.Get(token.Type).Background.String())
|
||||
|
32
vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go
generated
vendored
32
vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go
generated
vendored
@ -17,6 +17,20 @@ var c = chroma.MustParseColour
|
||||
|
||||
var ttyTables = map[int]*ttyTable{
|
||||
8: {
|
||||
foreground: map[chroma.Colour]string{
|
||||
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
|
||||
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
|
||||
c("#555555"): "\033[1m\033[30m", c("#ff0000"): "\033[1m\033[31m", c("#00ff00"): "\033[1m\033[32m", c("#ffff00"): "\033[1m\033[33m",
|
||||
c("#0000ff"): "\033[1m\033[34m", c("#ff00ff"): "\033[1m\033[35m", c("#00ffff"): "\033[1m\033[36m", c("#ffffff"): "\033[1m\033[37m",
|
||||
},
|
||||
background: map[chroma.Colour]string{
|
||||
c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
|
||||
c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
|
||||
c("#555555"): "\033[1m\033[40m", c("#ff0000"): "\033[1m\033[41m", c("#00ff00"): "\033[1m\033[42m", c("#ffff00"): "\033[1m\033[43m",
|
||||
c("#0000ff"): "\033[1m\033[44m", c("#ff00ff"): "\033[1m\033[45m", c("#00ffff"): "\033[1m\033[46m", c("#ffffff"): "\033[1m\033[47m",
|
||||
},
|
||||
},
|
||||
16: {
|
||||
foreground: map[chroma.Colour]string{
|
||||
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
|
||||
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
|
||||
@ -227,15 +241,11 @@ type indexedTTYFormatter struct {
|
||||
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
|
||||
theme := styleToEscapeSequence(c.table, style)
|
||||
for token := it(); token != chroma.EOF; token = it() {
|
||||
// TODO: Cache token lookups?
|
||||
clr, ok := theme[token.Type]
|
||||
if !ok {
|
||||
clr, ok = theme[token.Type.SubCategory()]
|
||||
if !ok {
|
||||
clr = theme[token.Type.Category()]
|
||||
// if !ok {
|
||||
// clr = theme[chroma.InheritStyle]
|
||||
// }
|
||||
}
|
||||
}
|
||||
if clr != "" {
|
||||
@ -249,10 +259,22 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma
|
||||
return nil
|
||||
}
|
||||
|
||||
// TTY is an 8-colour terminal formatter.
|
||||
//
|
||||
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||
var TTY = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
|
||||
|
||||
// TTY8 is an 8-colour terminal formatter.
|
||||
//
|
||||
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||
var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
|
||||
var TTY8 = Register("terminal8", &indexedTTYFormatter{ttyTables[8]})
|
||||
|
||||
// TTY16 is a 16-colour terminal formatter.
|
||||
//
|
||||
// It uses \033[3xm for normal colours and \033[90Xm for bright colours.
|
||||
//
|
||||
// The Lab colour space is used to map RGB values to the most appropriate index colour.
|
||||
var TTY16 = Register("terminal16", &indexedTTYFormatter{ttyTables[16]})
|
||||
|
||||
// TTY256 is a 256-colour terminal formatter.
|
||||
//
|
||||
|
22
vendor/github.com/alecthomas/chroma/go.mod
generated
vendored
22
vendor/github.com/alecthomas/chroma/go.mod
generated
vendored
@ -1,19 +1,9 @@
|
||||
module github.com/alecthomas/chroma
|
||||
|
||||
require (
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 // indirect
|
||||
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
|
||||
github.com/dlclark/regexp2 v1.1.6
|
||||
github.com/mattn/go-colorable v0.0.9
|
||||
github.com/mattn/go-isatty v0.0.4
|
||||
github.com/sergi/go-diff v1.0.0 // indirect
|
||||
github.com/stretchr/testify v1.3.0 // indirect
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 // indirect
|
||||
)
|
||||
|
||||
replace github.com/GeertJohan/go.rice => github.com/alecthomas/go.rice v1.0.1-0.20190719113735-961b99d742e7
|
||||
|
||||
go 1.13
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dlclark/regexp2 v1.4.0
|
||||
github.com/stretchr/testify v1.7.0
|
||||
)
|
||||
|
35
vendor/github.com/alecthomas/chroma/go.sum
generated
vendored
35
vendor/github.com/alecthomas/chroma/go.sum
generated
vendored
@ -1,33 +1,14 @@
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae h1:C4Q9m+oXOxcSWwYk9XzzafY2xAVAaeubZbUHJkw3PlY=
|
||||
github.com/alecthomas/kong v0.2.1-0.20190708041108-0548c6b1afae/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg=
|
||||
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
||||
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 h1:YAFjXN64LMvktoUZH9zgY4lGc/msGN7HQfoSuKCgaDU=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
2
vendor/github.com/alecthomas/chroma/iterator.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/iterator.go
generated
vendored
@ -4,7 +4,7 @@ import "strings"
|
||||
|
||||
// An Iterator across tokens.
|
||||
//
|
||||
// nil will be returned at the end of the Token stream.
|
||||
// EOF will be returned at the end of the Token stream.
|
||||
//
|
||||
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
|
||||
type Iterator func() Token
|
||||
|
16
vendor/github.com/alecthomas/chroma/lexer.go
generated
vendored
16
vendor/github.com/alecthomas/chroma/lexer.go
generated
vendored
@ -2,11 +2,13 @@ package chroma
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
defaultOptions = &TokeniseOptions{
|
||||
State: "root",
|
||||
State: "root",
|
||||
EnsureLF: true,
|
||||
}
|
||||
)
|
||||
|
||||
@ -80,6 +82,10 @@ type TokeniseOptions struct {
|
||||
State string
|
||||
// Nested tokenisation.
|
||||
Nested bool
|
||||
|
||||
// If true, all EOLs are converted into LF
|
||||
// by replacing CRLF and CR
|
||||
EnsureLF bool
|
||||
}
|
||||
|
||||
// A Lexer for tokenising source code.
|
||||
@ -93,9 +99,11 @@ type Lexer interface {
|
||||
// Lexers is a slice of lexers sortable by name.
|
||||
type Lexers []Lexer
|
||||
|
||||
func (l Lexers) Len() int { return len(l) }
|
||||
func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
||||
func (l Lexers) Less(i, j int) bool { return l[i].Config().Name < l[j].Config().Name }
|
||||
func (l Lexers) Len() int { return len(l) }
|
||||
func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
||||
func (l Lexers) Less(i, j int) bool {
|
||||
return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name)
|
||||
}
|
||||
|
||||
// PrioritisedLexers is a slice of lexers sortable by priority.
|
||||
type PrioritisedLexers []Lexer
|
||||
|
3
vendor/github.com/alecthomas/chroma/lexers/README.md
generated
vendored
3
vendor/github.com/alecthomas/chroma/lexers/README.md
generated
vendored
@ -3,6 +3,9 @@
|
||||
The tests in this directory feed a known input `testdata/<name>.actual` into the parser for `<name>` and check
|
||||
that its output matches `<name>.exported`.
|
||||
|
||||
It is also possible to perform several tests on a same parser `<name>`, by placing know inputs `*.actual` into a
|
||||
directory `testdata/<name>/`.
|
||||
|
||||
## Running the tests
|
||||
|
||||
Run the tests as normal:
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/abap.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/abap.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// ABAP lexer.
|
||||
var Abap = internal.Register(MustNewLexer(
|
||||
var Abap = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "ABAP",
|
||||
Aliases: []string{"abap"},
|
||||
@ -14,7 +14,11 @@ var Abap = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"text/x-abap"},
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
abapRules,
|
||||
))
|
||||
|
||||
func abapRules() Rules {
|
||||
return Rules{
|
||||
"common": {
|
||||
{`\s+`, Text, nil},
|
||||
{`^\*.*$`, CommentSingle, nil},
|
||||
@ -52,5 +56,5 @@ var Abap = internal.Register(MustNewLexer(
|
||||
{`[/;:()\[\],.]`, Punctuation, nil},
|
||||
{`(!)(\w+)`, ByGroups(Operator, Name), nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/abnf.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/abnf.go
generated
vendored
@ -6,14 +6,18 @@ import (
|
||||
)
|
||||
|
||||
// Abnf lexer.
|
||||
var Abnf = internal.Register(MustNewLexer(
|
||||
var Abnf = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "ABNF",
|
||||
Aliases: []string{"abnf"},
|
||||
Filenames: []string{"*.abnf"},
|
||||
MimeTypes: []string{"text/x-abnf"},
|
||||
},
|
||||
Rules{
|
||||
abnfRules,
|
||||
))
|
||||
|
||||
func abnfRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`;.*$`, CommentSingle, nil},
|
||||
{`(%[si])?"[^"]*"`, Literal, nil},
|
||||
@ -34,5 +38,5 @@ var Abnf = internal.Register(MustNewLexer(
|
||||
{`\s+`, Text, nil},
|
||||
{`.`, Text, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Actionscript lexer.
|
||||
var Actionscript = internal.Register(MustNewLexer(
|
||||
var Actionscript = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "ActionScript",
|
||||
Aliases: []string{"as", "actionscript"},
|
||||
@ -15,7 +15,11 @@ var Actionscript = internal.Register(MustNewLexer(
|
||||
NotMultiline: true,
|
||||
DotAll: true,
|
||||
},
|
||||
Rules{
|
||||
actionscriptRules,
|
||||
))
|
||||
|
||||
func actionscriptRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`\s+`, Text, nil},
|
||||
{`//.*?\n`, CommentSingle, nil},
|
||||
@ -35,5 +39,5 @@ var Actionscript = internal.Register(MustNewLexer(
|
||||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Actionscript 3 lexer.
|
||||
var Actionscript3 = internal.Register(MustNewLexer(
|
||||
var Actionscript3 = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "ActionScript 3",
|
||||
Aliases: []string{"as3", "actionscript3"},
|
||||
@ -14,7 +14,11 @@ var Actionscript3 = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"},
|
||||
DotAll: true,
|
||||
},
|
||||
Rules{
|
||||
actionscript3Rules,
|
||||
))
|
||||
|
||||
func actionscript3Rules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`\s+`, Text, nil},
|
||||
{`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")},
|
||||
@ -52,5 +56,5 @@ var Actionscript3 = internal.Register(MustNewLexer(
|
||||
{`,`, Operator, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/ada.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/ada.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Ada lexer.
|
||||
var Ada = internal.Register(MustNewLexer(
|
||||
var Ada = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Ada",
|
||||
Aliases: []string{"ada", "ada95", "ada2005"},
|
||||
@ -14,7 +14,11 @@ var Ada = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"text/x-ada"},
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
adaRules,
|
||||
))
|
||||
|
||||
func adaRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`[^\S\n]+`, Text, nil},
|
||||
{`--.*?\n`, CommentSingle, nil},
|
||||
@ -110,5 +114,5 @@ var Ada = internal.Register(MustNewLexer(
|
||||
{`\)`, Punctuation, Pop(1)},
|
||||
Include("root"),
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
47
vendor/github.com/alecthomas/chroma/lexers/a/al.go
generated
vendored
Normal file
47
vendor/github.com/alecthomas/chroma/lexers/a/al.go
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
package a
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// Al lexer.
|
||||
var Al = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "AL",
|
||||
Aliases: []string{"al"},
|
||||
Filenames: []string{"*.al", "*.dal"},
|
||||
MimeTypes: []string{"text/x-al"},
|
||||
DotAll: true,
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
alRules,
|
||||
))
|
||||
|
||||
// https://github.com/microsoft/AL/blob/master/grammar/alsyntax.tmlanguage
|
||||
func alRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`\s+`, TextWhitespace, nil},
|
||||
{`(?s)\/\*.*?\\*\*\/`, CommentMultiline, nil},
|
||||
{`(?s)//.*?\n`, CommentSingle, nil},
|
||||
{`\"([^\"])*\"`, Text, nil},
|
||||
{`'([^'])*'`, LiteralString, nil},
|
||||
{`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil},
|
||||
{`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil},
|
||||
{`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil},
|
||||
{`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|INTERFACE|PERMISSIONSET|PERMISSIONSETEXTENSION|ENTITLEMENT))\b`, Keyword, nil},
|
||||
{`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil},
|
||||
{`\b([<>]=|<>|<|>)\b?`, Operator, nil},
|
||||
{`\b(\-|\+|\/|\*)\b`, Operator, nil},
|
||||
{`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil},
|
||||
{`\b(?i:(ADD|ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil},
|
||||
{`\s*[(\.\.)&\|]\s*`, Operator, nil},
|
||||
{`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil},
|
||||
{`[;:,]`, Punctuation, nil},
|
||||
{`#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
|
||||
{`\w+`, Text, nil},
|
||||
{`.`, Text, nil},
|
||||
},
|
||||
}
|
||||
}
|
12
vendor/github.com/alecthomas/chroma/lexers/a/angular2.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/angular2.go
generated
vendored
@ -6,14 +6,18 @@ import (
|
||||
)
|
||||
|
||||
// Angular2 lexer.
|
||||
var Angular2 = internal.Register(MustNewLexer(
|
||||
var Angular2 = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Angular2",
|
||||
Aliases: []string{"ng2"},
|
||||
Filenames: []string{},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
Rules{
|
||||
angular2Rules,
|
||||
))
|
||||
|
||||
func angular2Rules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`[^{([*#]+`, Other, nil},
|
||||
{`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")},
|
||||
@ -38,5 +42,5 @@ var Angular2 = internal.Register(MustNewLexer(
|
||||
{`'.*?'`, LiteralString, Pop(1)},
|
||||
{`[^\s>]+`, LiteralString, Pop(1)},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/antlr.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/antlr.go
generated
vendored
@ -6,14 +6,18 @@ import (
|
||||
)
|
||||
|
||||
// ANTLR lexer.
|
||||
var ANTLR = internal.Register(MustNewLexer(
|
||||
var ANTLR = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "ANTLR",
|
||||
Aliases: []string{"antlr"},
|
||||
Filenames: []string{},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
Rules{
|
||||
antlrRules,
|
||||
))
|
||||
|
||||
func antlrRules() Rules {
|
||||
return Rules{
|
||||
"whitespace": {
|
||||
{`\s+`, TextWhitespace, nil},
|
||||
},
|
||||
@ -97,5 +101,5 @@ var ANTLR = internal.Register(MustNewLexer(
|
||||
{`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil},
|
||||
{`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/apache.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/apache.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Apacheconf lexer.
|
||||
var Apacheconf = internal.Register(MustNewLexer(
|
||||
var Apacheconf = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "ApacheConf",
|
||||
Aliases: []string{"apacheconf", "aconf", "apache"},
|
||||
@ -14,7 +14,11 @@ var Apacheconf = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"text/x-apacheconf"},
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
apacheconfRules,
|
||||
))
|
||||
|
||||
func apacheconfRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`\s+`, Text, nil},
|
||||
{`(#.*?)$`, Comment, nil},
|
||||
@ -34,5 +38,5 @@ var Apacheconf = internal.Register(MustNewLexer(
|
||||
{`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil},
|
||||
{`[^\s"\\]+`, Text, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
18
vendor/github.com/alecthomas/chroma/lexers/a/apl.go
generated
vendored
18
vendor/github.com/alecthomas/chroma/lexers/a/apl.go
generated
vendored
@ -6,14 +6,18 @@ import (
|
||||
)
|
||||
|
||||
// Apl lexer.
|
||||
var Apl = internal.Register(MustNewLexer(
|
||||
var Apl = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "APL",
|
||||
Aliases: []string{"apl"},
|
||||
Filenames: []string{"*.apl"},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
Rules{
|
||||
aplRules,
|
||||
))
|
||||
|
||||
func aplRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`\s+`, Text, nil},
|
||||
{`[⍝#].*$`, CommentSingle, nil},
|
||||
@ -22,15 +26,15 @@ var Apl = internal.Register(MustNewLexer(
|
||||
{`[⋄◇()]`, Punctuation, nil},
|
||||
{`[\[\];]`, LiteralStringRegex, nil},
|
||||
{`⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameFunction, nil},
|
||||
{`[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameVariable, nil},
|
||||
{`[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*`, NameVariable, nil},
|
||||
{`¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?`, LiteralNumber, nil},
|
||||
{`[\.\\/⌿⍀¨⍣⍨⍠⍤∘]`, NameAttribute, nil},
|
||||
{`[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]`, Operator, nil},
|
||||
{`[\.\\/⌿⍀¨⍣⍨⍠⍤∘⍥@⌺⌶⍢]`, NameAttribute, nil},
|
||||
{`[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⍸]`, Operator, nil},
|
||||
{`⍬`, NameConstant, nil},
|
||||
{`[⎕⍞]`, NameVariableGlobal, nil},
|
||||
{`[←→]`, KeywordDeclaration, nil},
|
||||
{`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil},
|
||||
{`[{}]`, KeywordType, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/applescript.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/applescript.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Applescript lexer.
|
||||
var Applescript = internal.Register(MustNewLexer(
|
||||
var Applescript = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "AppleScript",
|
||||
Aliases: []string{"applescript"},
|
||||
@ -14,7 +14,11 @@ var Applescript = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{},
|
||||
DotAll: true,
|
||||
},
|
||||
Rules{
|
||||
applescriptRules,
|
||||
))
|
||||
|
||||
func applescriptRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`\s+`, Text, nil},
|
||||
{`¬\n`, LiteralStringEscape, nil},
|
||||
@ -51,5 +55,5 @@ var Applescript = internal.Register(MustNewLexer(
|
||||
{`[^*(]+`, CommentMultiline, nil},
|
||||
{`[*(]`, CommentMultiline, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/a/arduino.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/a/arduino.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Arduino lexer.
|
||||
var Arduino = internal.Register(MustNewLexer(
|
||||
var Arduino = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Arduino",
|
||||
Aliases: []string{"arduino"},
|
||||
@ -14,7 +14,11 @@ var Arduino = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"text/x-arduino"},
|
||||
EnsureNL: true,
|
||||
},
|
||||
Rules{
|
||||
arduinoRules,
|
||||
))
|
||||
|
||||
func arduinoRules() Rules {
|
||||
return Rules{
|
||||
"statements": {
|
||||
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil},
|
||||
{`char(16_t|32_t)\b`, KeywordType, nil},
|
||||
@ -106,5 +110,5 @@ var Arduino = internal.Register(MustNewLexer(
|
||||
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
|
||||
{`.*?\n`, Comment, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
72
vendor/github.com/alecthomas/chroma/lexers/a/armasm.go
generated
vendored
Normal file
72
vendor/github.com/alecthomas/chroma/lexers/a/armasm.go
generated
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
package a
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
var ArmAsm = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "ArmAsm",
|
||||
Aliases: []string{"armasm"},
|
||||
EnsureNL: true,
|
||||
Filenames: []string{"*.s", "*.S"},
|
||||
MimeTypes: []string{"text/x-armasm", "text/x-asm"},
|
||||
},
|
||||
armasmRules,
|
||||
))
|
||||
|
||||
func armasmRules() Rules {
|
||||
return Rules{
|
||||
"commentsandwhitespace": {
|
||||
{`\s+`, Text, nil},
|
||||
{`[@;].*?\n`, CommentSingle, nil},
|
||||
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||
},
|
||||
"literal": {
|
||||
// Binary
|
||||
{`0b[01]+`, NumberBin, Pop(1)},
|
||||
// Hex
|
||||
{`0x\w{1,8}`, NumberHex, Pop(1)},
|
||||
// Octal
|
||||
{`0\d+`, NumberOct, Pop(1)},
|
||||
// Float
|
||||
{`\d+?\.\d+?`, NumberFloat, Pop(1)},
|
||||
// Integer
|
||||
{`\d+`, NumberInteger, Pop(1)},
|
||||
// String
|
||||
{`(")(.+)(")`, ByGroups(Punctuation, StringDouble, Punctuation), Pop(1)},
|
||||
// Char
|
||||
{`(')(.{1}|\\.{1})(')`, ByGroups(Punctuation, StringChar, Punctuation), Pop(1)},
|
||||
},
|
||||
"opcode": {
|
||||
// Escape at line end
|
||||
{`\n`, Text, Pop(1)},
|
||||
// Comment
|
||||
{`(@|;).*\n`, CommentSingle, Pop(1)},
|
||||
// Whitespace
|
||||
{`(\s+|,)`, Text, nil},
|
||||
// Register by number
|
||||
{`[rapcfxwbhsdqv]\d{1,2}`, NameClass, nil},
|
||||
// Address by hex
|
||||
{`=0x\w+`, ByGroups(Text, NameLabel), nil},
|
||||
// Pseudo address by label
|
||||
{`(=)(\w+)`, ByGroups(Text, NameLabel), nil},
|
||||
// Immediate
|
||||
{`#`, Text, Push("literal")},
|
||||
},
|
||||
"root": {
|
||||
Include("commentsandwhitespace"),
|
||||
// Directive with optional param
|
||||
{`(\.\w+)([ \t]+\w+\s+?)?`, ByGroups(KeywordNamespace, NameLabel), nil},
|
||||
// Label with data
|
||||
{`(\w+)(:)(\s+\.\w+\s+)`, ByGroups(NameLabel, Punctuation, KeywordNamespace), Push("literal")},
|
||||
// Label
|
||||
{`(\w+)(:)`, ByGroups(NameLabel, Punctuation), nil},
|
||||
// Syscall Op
|
||||
{`svc\s+\w+`, NameNamespace, nil},
|
||||
// Opcode
|
||||
{`[a-zA-Z]+`, Text, Push("opcode")},
|
||||
},
|
||||
}
|
||||
}
|
22
vendor/github.com/alecthomas/chroma/lexers/a/awk.go
generated
vendored
22
vendor/github.com/alecthomas/chroma/lexers/a/awk.go
generated
vendored
@ -6,14 +6,18 @@ import (
|
||||
)
|
||||
|
||||
// Awk lexer.
|
||||
var Awk = internal.Register(MustNewLexer(
|
||||
var Awk = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Awk",
|
||||
Aliases: []string{"awk", "gawk", "mawk", "nawk"},
|
||||
Filenames: []string{"*.awk"},
|
||||
MimeTypes: []string{"application/x-awk"},
|
||||
},
|
||||
Rules{
|
||||
awkRules,
|
||||
))
|
||||
|
||||
func awkRules() Rules {
|
||||
return Rules{
|
||||
"commentsandwhitespace": {
|
||||
{`\s+`, Text, nil},
|
||||
{`#.*$`, CommentSingle, nil},
|
||||
@ -30,19 +34,19 @@ var Awk = internal.Register(MustNewLexer(
|
||||
"root": {
|
||||
{`^(?=\s|/)`, Text, Push("slashstartsregex")},
|
||||
Include("commentsandwhitespace"),
|
||||
{`\+\+|--|\|\||&&|in\b|\$|!?~|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
|
||||
{`\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
|
||||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
||||
{`[})\].]`, Punctuation, nil},
|
||||
{`(break|continue|do|while|exit|for|if|else|return)\b`, Keyword, Push("slashstartsregex")},
|
||||
{`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")},
|
||||
{`function\b`, KeywordDeclaration, Push("slashstartsregex")},
|
||||
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next|nextfile|print|printf|strftime|systime|delete|system)\b`, KeywordReserved, nil},
|
||||
{`(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|SUBSEP)\b`, NameBuiltin, nil},
|
||||
{`[$a-zA-Z_]\w*`, NameOther, nil},
|
||||
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil},
|
||||
{`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil},
|
||||
{`[@$a-zA-Z_]\w*`, NameOther, nil},
|
||||
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
|
||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||
{`[0-9]+`, LiteralNumberInteger, nil},
|
||||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Ballerina lexer.
|
||||
var Ballerina = internal.Register(MustNewLexer(
|
||||
var Ballerina = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Ballerina",
|
||||
Aliases: []string{"ballerina"},
|
||||
@ -14,7 +14,11 @@ var Ballerina = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"text/x-ballerina"},
|
||||
DotAll: true,
|
||||
},
|
||||
Rules{
|
||||
ballerinaRules,
|
||||
))
|
||||
|
||||
func ballerinaRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`[^\S\n]+`, Text, nil},
|
||||
{`//.*?\n`, CommentSingle, nil},
|
||||
@ -42,5 +46,5 @@ var Ballerina = internal.Register(MustNewLexer(
|
||||
"import": {
|
||||
{`[\w.]+`, NameNamespace, Pop(1)},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
25
vendor/github.com/alecthomas/chroma/lexers/b/bash.go
generated
vendored
25
vendor/github.com/alecthomas/chroma/lexers/b/bash.go
generated
vendored
@ -7,17 +7,27 @@ import (
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// TODO(moorereason): can this be factored away?
|
||||
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`)
|
||||
|
||||
// Bash lexer.
|
||||
var Bash = internal.Register(MustNewLexer(
|
||||
var Bash = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Bash",
|
||||
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"},
|
||||
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
|
||||
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
|
||||
MimeTypes: []string{"application/x-sh", "application/x-shellscript"},
|
||||
},
|
||||
Rules{
|
||||
bashRules,
|
||||
).SetAnalyser(func(text string) float32 {
|
||||
if bashAnalyserRe.FindString(text) != "" {
|
||||
return 1.0
|
||||
}
|
||||
return 0.0
|
||||
}))
|
||||
|
||||
func bashRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
Include("basic"),
|
||||
{"`", LiteralStringBacktick, Push("backticks")},
|
||||
@ -36,7 +46,7 @@ var Bash = internal.Register(MustNewLexer(
|
||||
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
|
||||
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
|
||||
{`\A#!.+\n`, CommentPreproc, nil},
|
||||
{`#.*\S`, CommentSingle, nil},
|
||||
{`#.*(\S|$)`, CommentSingle, nil},
|
||||
{`\\[\w\W]`, LiteralStringEscape, nil},
|
||||
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
|
||||
{`[\[\]{}()=]`, Operator, nil},
|
||||
@ -86,10 +96,5 @@ var Bash = internal.Register(MustNewLexer(
|
||||
{"`", LiteralStringBacktick, Pop(1)},
|
||||
Include("root"),
|
||||
},
|
||||
},
|
||||
).SetAnalyser(func(text string) float32 {
|
||||
if bashAnalyserRe.FindString(text) != "" {
|
||||
return 1.0
|
||||
}
|
||||
return 0.0
|
||||
}))
|
||||
}
|
||||
|
27
vendor/github.com/alecthomas/chroma/lexers/b/bashsession.go
generated
vendored
Normal file
27
vendor/github.com/alecthomas/chroma/lexers/b/bashsession.go
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
package b
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// BashSession lexer.
|
||||
var BashSession = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "BashSession",
|
||||
Aliases: []string{"bash-session", "console", "shell-session"},
|
||||
Filenames: []string{".sh-session"},
|
||||
MimeTypes: []string{"text/x-sh"},
|
||||
EnsureNL: true,
|
||||
},
|
||||
bashsessionRules,
|
||||
))
|
||||
|
||||
func bashsessionRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`^((?:\[[^]]+@[^]]+\]\s?)?[#$%>])(\s*)(.*\n?)`, ByGroups(GenericPrompt, Text, Using(Bash)), nil},
|
||||
{`^.+\n?`, GenericOutput, nil},
|
||||
},
|
||||
}
|
||||
}
|
12
vendor/github.com/alecthomas/chroma/lexers/b/batch.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/b/batch.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Batchfile lexer.
|
||||
var Batchfile = internal.Register(MustNewLexer(
|
||||
var Batchfile = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Batchfile",
|
||||
Aliases: []string{"bat", "batch", "dosbatch", "winbatch"},
|
||||
@ -14,7 +14,11 @@ var Batchfile = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"application/x-dos-batch"},
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
batchfileRules,
|
||||
))
|
||||
|
||||
func batchfileRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil},
|
||||
{`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")},
|
||||
@ -190,5 +194,5 @@ var Batchfile = internal.Register(MustNewLexer(
|
||||
{`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Bibtex lexer.
|
||||
var Bibtex = internal.Register(MustNewLexer(
|
||||
var Bibtex = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "BibTeX",
|
||||
Aliases: []string{"bib", "bibtex"},
|
||||
@ -15,7 +15,11 @@ var Bibtex = internal.Register(MustNewLexer(
|
||||
NotMultiline: true,
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
bibtexRules,
|
||||
))
|
||||
|
||||
func bibtexRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
Include("whitespace"),
|
||||
{`@comment`, Comment, nil},
|
||||
@ -72,5 +76,5 @@ var Bibtex = internal.Register(MustNewLexer(
|
||||
"whitespace": {
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
112
vendor/github.com/alecthomas/chroma/lexers/b/bicep.go
generated
vendored
Normal file
112
vendor/github.com/alecthomas/chroma/lexers/b/bicep.go
generated
vendored
Normal file
@ -0,0 +1,112 @@
|
||||
package b
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// Bicep lexer.
|
||||
var Bicep = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Bicep",
|
||||
Aliases: []string{"bicep"},
|
||||
Filenames: []string{"*.bicep"},
|
||||
},
|
||||
bicepRules,
|
||||
))
|
||||
|
||||
func bicepRules() Rules {
|
||||
bicepFunctions := []string{
|
||||
"any",
|
||||
"array",
|
||||
"concat",
|
||||
"contains",
|
||||
"empty",
|
||||
"first",
|
||||
"intersection",
|
||||
"items",
|
||||
"last",
|
||||
"length",
|
||||
"min",
|
||||
"max",
|
||||
"range",
|
||||
"skip",
|
||||
"take",
|
||||
"union",
|
||||
"dateTimeAdd",
|
||||
"utcNow",
|
||||
"deployment",
|
||||
"environment",
|
||||
"loadFileAsBase64",
|
||||
"loadTextContent",
|
||||
"int",
|
||||
"json",
|
||||
"extensionResourceId",
|
||||
"getSecret",
|
||||
"list",
|
||||
"listKeys",
|
||||
"listKeyValue",
|
||||
"listAccountSas",
|
||||
"listSecrets",
|
||||
"pickZones",
|
||||
"reference",
|
||||
"resourceId",
|
||||
"subscriptionResourceId",
|
||||
"tenantResourceId",
|
||||
"managementGroup",
|
||||
"resourceGroup",
|
||||
"subscription",
|
||||
"tenant",
|
||||
"base64",
|
||||
"base64ToJson",
|
||||
"base64ToString",
|
||||
"dataUri",
|
||||
"dataUriToString",
|
||||
"endsWith",
|
||||
"format",
|
||||
"guid",
|
||||
"indexOf",
|
||||
"lastIndexOf",
|
||||
"length",
|
||||
"newGuid",
|
||||
"padLeft",
|
||||
"replace",
|
||||
"split",
|
||||
"startsWith",
|
||||
"string",
|
||||
"substring",
|
||||
"toLower",
|
||||
"toUpper",
|
||||
"trim",
|
||||
"uniqueString",
|
||||
"uri",
|
||||
"uriComponent",
|
||||
"uriComponentToString",
|
||||
}
|
||||
|
||||
return Rules{
|
||||
"root": {
|
||||
{`//[^\n\r]+`, CommentSingle, nil},
|
||||
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||
{`([']?\w+[']?)(:)`, ByGroups(NameProperty, Punctuation), nil},
|
||||
{`\b('(resourceGroup|subscription|managementGroup|tenant)')\b`, KeywordNamespace, nil},
|
||||
{`'[\w\$\{\(\)\}\.]{1,}?'`, LiteralStringInterpol, nil},
|
||||
{`('''|').*?('''|')`, LiteralString, nil},
|
||||
{`\b(allowed|batchSize|description|maxLength|maxValue|metadata|minLength|minValue|secure)\b`, NameDecorator, nil},
|
||||
{`\b(az|sys)\.`, NameNamespace, nil},
|
||||
{`\b(` + strings.Join(bicepFunctions, "|") + `)\b`, NameFunction, nil},
|
||||
// https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/bicep-functions-logical
|
||||
{`\b(bool)(\()`, ByGroups(NameFunction, Punctuation), nil},
|
||||
{`\b(for|if|in)\b`, Keyword, nil},
|
||||
{`\b(module|output|param|resource|var)\b`, KeywordDeclaration, nil},
|
||||
{`\b(array|bool|int|object|string)\b`, KeywordType, nil},
|
||||
// https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/operators
|
||||
{`(>=|>|<=|<|==|!=|=~|!~|::|&&|\?\?|!|-|%|\*|\/|\+)`, Operator, nil},
|
||||
{`[\(\)\[\]\.:\?{}@=]`, Punctuation, nil},
|
||||
{`[\w_-]+`, Text, nil},
|
||||
{`\s+`, TextWhitespace, nil},
|
||||
},
|
||||
}
|
||||
}
|
12
vendor/github.com/alecthomas/chroma/lexers/b/blitz.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/b/blitz.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Blitzbasic lexer.
|
||||
var Blitzbasic = internal.Register(MustNewLexer(
|
||||
var Blitzbasic = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "BlitzBasic",
|
||||
Aliases: []string{"blitzbasic", "b3d", "bplus"},
|
||||
@ -14,7 +14,11 @@ var Blitzbasic = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"text/x-bb"},
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
blitzbasicRules,
|
||||
))
|
||||
|
||||
func blitzbasicRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`[ \t]+`, Text, nil},
|
||||
{`;.*?\n`, CommentSingle, nil},
|
||||
@ -44,5 +48,5 @@ var Blitzbasic = internal.Register(MustNewLexer(
|
||||
{`"C?`, LiteralStringDouble, Pop(1)},
|
||||
{`[^"]+`, LiteralStringDouble, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/b/bnf.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/b/bnf.go
generated
vendored
@ -6,19 +6,23 @@ import (
|
||||
)
|
||||
|
||||
// Bnf lexer.
|
||||
var Bnf = internal.Register(MustNewLexer(
|
||||
var Bnf = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "BNF",
|
||||
Aliases: []string{"bnf"},
|
||||
Filenames: []string{"*.bnf"},
|
||||
MimeTypes: []string{"text/x-bnf"},
|
||||
},
|
||||
Rules{
|
||||
bnfRules,
|
||||
))
|
||||
|
||||
func bnfRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil},
|
||||
{`::=`, Operator, nil},
|
||||
{`[^<>:]+`, Text, nil},
|
||||
{`.`, Text, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go
generated
vendored
@ -6,14 +6,18 @@ import (
|
||||
)
|
||||
|
||||
// Brainfuck lexer.
|
||||
var Brainfuck = internal.Register(MustNewLexer(
|
||||
var Brainfuck = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Brainfuck",
|
||||
Aliases: []string{"brainfuck", "bf"},
|
||||
Filenames: []string{"*.bf", "*.b"},
|
||||
MimeTypes: []string{"application/x-brainfuck"},
|
||||
},
|
||||
Rules{
|
||||
brainfuckRules,
|
||||
))
|
||||
|
||||
func brainfuckRules() Rules {
|
||||
return Rules{
|
||||
"common": {
|
||||
{`[.,]+`, NameTag, nil},
|
||||
{`[+-]+`, NameBuiltin, nil},
|
||||
@ -30,5 +34,5 @@ var Brainfuck = internal.Register(MustNewLexer(
|
||||
{`\]`, Keyword, Pop(1)},
|
||||
Include("common"),
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
19
vendor/github.com/alecthomas/chroma/lexers/c/c.go
generated
vendored
19
vendor/github.com/alecthomas/chroma/lexers/c/c.go
generated
vendored
@ -6,14 +6,19 @@ import (
|
||||
)
|
||||
|
||||
// C lexer.
|
||||
var C = internal.Register(MustNewLexer(
|
||||
var C = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "C",
|
||||
Aliases: []string{"c"},
|
||||
Filenames: []string{"*.c", "*.h", "*.idc"},
|
||||
MimeTypes: []string{"text/x-chdr", "text/x-csrc"},
|
||||
Filenames: []string{"*.c", "*.h", "*.idc", "*.x[bp]m"},
|
||||
MimeTypes: []string{"text/x-chdr", "text/x-csrc", "image/x-xbitmap", "image/x-xpixmap"},
|
||||
EnsureNL: true,
|
||||
},
|
||||
Rules{
|
||||
cRules,
|
||||
))
|
||||
|
||||
func cRules() Rules {
|
||||
return Rules{
|
||||
"whitespace": {
|
||||
{`^#if\s+0`, CommentPreproc, Push("if0")},
|
||||
{`^#`, CommentPreproc, Push("macro")},
|
||||
@ -38,7 +43,7 @@ var C = internal.Register(MustNewLexer(
|
||||
{`[~!%^&*+=|?:<>/-]`, Operator, nil},
|
||||
{`[()\[\],.]`, Punctuation, nil},
|
||||
{Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil},
|
||||
{`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil},
|
||||
{`(bool|int|long|float|short|double|char((8|16|32)_t)?|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil},
|
||||
{Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil},
|
||||
{`(__m(128i|128d|128|64))\b`, KeywordReserved, nil},
|
||||
{Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil},
|
||||
@ -87,5 +92,5 @@ var C = internal.Register(MustNewLexer(
|
||||
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
|
||||
{`.*?\n`, Comment, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
216
vendor/github.com/alecthomas/chroma/lexers/c/caddyfile.go
generated
vendored
Normal file
216
vendor/github.com/alecthomas/chroma/lexers/c/caddyfile.go
generated
vendored
Normal file
@ -0,0 +1,216 @@
|
||||
package c
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// caddyfileCommon are the rules common to both of the lexer variants
|
||||
func caddyfileCommonRules() Rules {
|
||||
return Rules{
|
||||
"site_block_common": {
|
||||
// Import keyword
|
||||
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
|
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||
// Matcher token stub for docs
|
||||
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
|
||||
// These cannot have matchers but may have things that look like
|
||||
// matchers in their arguments, so we just parse as a subdirective.
|
||||
{`try_files`, Keyword, Push("subdirective")},
|
||||
// These are special, they can nest more directives
|
||||
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
|
||||
// Any other directive
|
||||
{`[^\s#]+`, Keyword, Push("directive")},
|
||||
Include("base"),
|
||||
},
|
||||
"matcher": {
|
||||
{`\{`, Punctuation, Push("block")},
|
||||
// Not can be one-liner
|
||||
{`not`, Keyword, Push("deep_not_matcher")},
|
||||
// Any other same-line matcher
|
||||
{`[^\s#]+`, Keyword, Push("arguments")},
|
||||
// Terminators
|
||||
{`\n`, Text, Pop(1)},
|
||||
{`\}`, Punctuation, Pop(1)},
|
||||
Include("base"),
|
||||
},
|
||||
"block": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
// Not can be one-liner
|
||||
{`not`, Keyword, Push("not_matcher")},
|
||||
// Any other subdirective
|
||||
{`[^\s#]+`, Keyword, Push("subdirective")},
|
||||
Include("base"),
|
||||
},
|
||||
"nested_block": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||
// Something that starts with literally < is probably a docs stub
|
||||
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
|
||||
// Any other directive
|
||||
{`[^\s#]+`, Keyword, Push("nested_directive")},
|
||||
Include("base"),
|
||||
},
|
||||
"not_matcher": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
{`[^\s#]+`, Keyword, Push("arguments")},
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
"deep_not_matcher": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
"directive": {
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
Include("matcher_token"),
|
||||
Include("comments_pop_1"),
|
||||
{`\n`, Text, Pop(1)},
|
||||
Include("base"),
|
||||
},
|
||||
"nested_directive": {
|
||||
{`\{(?=\s)`, Punctuation, Push("nested_block")},
|
||||
Include("matcher_token"),
|
||||
Include("comments_pop_1"),
|
||||
{`\n`, Text, Pop(1)},
|
||||
Include("base"),
|
||||
},
|
||||
"subdirective": {
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
Include("comments_pop_1"),
|
||||
{`\n`, Text, Pop(1)},
|
||||
Include("base"),
|
||||
},
|
||||
"arguments": {
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
Include("comments_pop_2"),
|
||||
{`\\\n`, Text, nil}, // Skip escaped newlines
|
||||
{`\n`, Text, Pop(2)},
|
||||
Include("base"),
|
||||
},
|
||||
"deep_subdirective": {
|
||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||
Include("comments_pop_3"),
|
||||
{`\n`, Text, Pop(3)},
|
||||
Include("base"),
|
||||
},
|
||||
"matcher_token": {
|
||||
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
|
||||
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
|
||||
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
|
||||
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
|
||||
},
|
||||
"comments": {
|
||||
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
|
||||
},
|
||||
"comments_pop_1": {
|
||||
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
|
||||
},
|
||||
"comments_pop_2": {
|
||||
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
|
||||
},
|
||||
"comments_pop_3": {
|
||||
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
|
||||
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
|
||||
},
|
||||
"base": {
|
||||
Include("comments"),
|
||||
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
|
||||
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
|
||||
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
|
||||
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
|
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
|
||||
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
|
||||
{`\]|\|`, Punctuation, nil},
|
||||
{`[^\s#{}$\]]+`, LiteralString, nil},
|
||||
{`/[^\s#]*`, Name, nil},
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Caddyfile lexer.
|
||||
var Caddyfile = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Caddyfile",
|
||||
Aliases: []string{"caddyfile", "caddy"},
|
||||
Filenames: []string{"Caddyfile*"},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
caddyfileRules,
|
||||
))
|
||||
|
||||
func caddyfileRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
Include("comments"),
|
||||
// Global options block
|
||||
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
|
||||
// Snippets
|
||||
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
|
||||
// Site label
|
||||
{`[^#{(\s,]+`, GenericHeading, Push("label")},
|
||||
// Site label with placeholder
|
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
|
||||
{`\s+`, Text, nil},
|
||||
},
|
||||
"globals": {
|
||||
{`\}`, Punctuation, Pop(1)},
|
||||
{`[^\s#]+`, Keyword, Push("directive")},
|
||||
Include("base"),
|
||||
},
|
||||
"snippet": {
|
||||
{`\}`, Punctuation, Pop(1)},
|
||||
// Matcher definition
|
||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||
// Any directive
|
||||
{`[^\s#]+`, Keyword, Push("directive")},
|
||||
Include("base"),
|
||||
},
|
||||
"label": {
|
||||
// Allow multiple labels, comma separated, newlines after
|
||||
// a comma means another label is coming
|
||||
{`,\s*\n?`, Text, nil},
|
||||
{` `, Text, nil},
|
||||
// Site label with placeholder
|
||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
|
||||
// Site label
|
||||
{`[^#{(\s,]+`, GenericHeading, nil},
|
||||
// Comment after non-block label (hack because comments end in \n)
|
||||
{`#.*\n`, CommentSingle, Push("site_block")},
|
||||
// Note: if \n, we'll never pop out of the site_block, it's valid
|
||||
{`\{(?=\s)|\n`, Punctuation, Push("site_block")},
|
||||
},
|
||||
"site_block": {
|
||||
{`\}`, Punctuation, Pop(2)},
|
||||
Include("site_block_common"),
|
||||
},
|
||||
}.Merge(caddyfileCommonRules())
|
||||
}
|
||||
|
||||
// Caddyfile directive-only lexer.
|
||||
var CaddyfileDirectives = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Caddyfile Directives",
|
||||
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
|
||||
Filenames: []string{},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
caddyfileDirectivesRules,
|
||||
))
|
||||
|
||||
func caddyfileDirectivesRules() Rules {
|
||||
return Rules{
|
||||
// Same as "site_block" in Caddyfile
|
||||
"root": {
|
||||
Include("site_block_common"),
|
||||
},
|
||||
}.Merge(caddyfileCommonRules())
|
||||
}
|
12
vendor/github.com/alecthomas/chroma/lexers/c/capnproto.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/c/capnproto.go
generated
vendored
@ -6,14 +6,18 @@ import (
|
||||
)
|
||||
|
||||
// Cap'N'Proto Proto lexer.
|
||||
var CapNProto = internal.Register(MustNewLexer(
|
||||
var CapNProto = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Cap'n Proto",
|
||||
Aliases: []string{"capnp"},
|
||||
Filenames: []string{"*.capnp"},
|
||||
MimeTypes: []string{},
|
||||
},
|
||||
Rules{
|
||||
capNProtoRules,
|
||||
))
|
||||
|
||||
func capNProtoRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`#.*?$`, CommentSingle, nil},
|
||||
{`@[0-9a-zA-Z]*`, NameDecorator, nil},
|
||||
@ -57,5 +61,5 @@ var CapNProto = internal.Register(MustNewLexer(
|
||||
{`[])]`, NameAttribute, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
12
vendor/github.com/alecthomas/chroma/lexers/c/ceylon.go
generated
vendored
12
vendor/github.com/alecthomas/chroma/lexers/c/ceylon.go
generated
vendored
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
// Ceylon lexer.
|
||||
var Ceylon = internal.Register(MustNewLexer(
|
||||
var Ceylon = internal.Register(MustNewLazyLexer(
|
||||
&Config{
|
||||
Name: "Ceylon",
|
||||
Aliases: []string{"ceylon"},
|
||||
@ -14,7 +14,11 @@ var Ceylon = internal.Register(MustNewLexer(
|
||||
MimeTypes: []string{"text/x-ceylon"},
|
||||
DotAll: true,
|
||||
},
|
||||
Rules{
|
||||
ceylonRules,
|
||||
))
|
||||
|
||||
func ceylonRules() Rules {
|
||||
return Rules{
|
||||
"root": {
|
||||
{`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil},
|
||||
{`[^\S\n]+`, Text, nil},
|
||||
@ -59,5 +63,5 @@ var Ceylon = internal.Register(MustNewLexer(
|
||||
{`\*/`, CommentMultiline, Pop(1)},
|
||||
{`[*/]`, CommentMultiline, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user