Compare commits

..

58 Commits
4.0.4 ... 4.2.4

Author SHA1 Message Date
fe8f39013e Merge pull request #673 from chrisallenlane/win-compat
fix: Windows compatibility
2022-07-04 17:03:11 -04:00
1016b20ef2 chore: bump version to 4.2.4
Bump version to `4.2.4`. This version contains numerous Windows fixes
and improvements.
2022-07-04 16:58:58 -04:00
def8985dcd fix: Windows support
Fix an issue whereby the installer installed cheatsheets into the wrong
directory on Windows. This occurred because previously `path.Join` was
used where `path/filepath.Join` should have been used.

This matters, because the former always uses `/` as the path separator,
whereas the latter will use `/` or `\` as is appropriate for the
runtime environment.

This should resolve bullet point 4 in #665.
2022-07-04 16:55:57 -04:00
e6f12147df fix: config fixes for Windows
- Update the default config file to use `more` instead of `less` as the
  default pager, in order to support Windows out-of-the-box. (#655, #665).

- Use `terminal` Chroma formatter (rather than `terminal16m`) in order
  to accommodate less capable terminal emulators like `cmd.exe` by
  default. Similarly, default to `colorize: false` in configs (changed
  from `true`) (#665).

- Comment out default `style` in order to avoid printing ANSI color
  codes into terminals without color support (#665)

- Attempt to intelligently choose a default editor, rather than rely on
  a hard-coded `vim` in the configs. This should make it easier to use
  `cheat` immediately without needing to specify configs. It should also
  improve `cheat`'s Windows compatibility. (#665)
2022-07-04 16:06:37 -04:00
a8c2c396ed feat(build): crate docker-run target
Create a `docker-run` `make` target for opening a shell in an Alpine
container for development.
2022-07-04 13:13:27 -04:00
35262df4f2 fix(build): Windows executable packaging
Fix an issue whereby the Windows zip release contained an extraneous
(and annoying) `dist` parent directory.
2022-07-04 12:34:06 -04:00
12ffa4cb5c Merge pull request #644 from cheat/develop
Windows fixes, Android support
2021-10-09 12:13:01 -04:00
d9c602f9e1 Merge pull request #643 from chrisallenlane/android
fix(Paths): Android support
2021-10-09 11:30:18 -04:00
b67ff8b6a8 fix(Paths): Android support
Add `"android"` to the explicit whitelist of supported operating
systems.  This may resolve incompatibilities with certain Android
environments.
2021-10-09 11:27:38 -04:00
a500a621a1 chore: bump version
Bump version to 4.2.3.
2021-10-09 10:59:02 -04:00
23b6928874 Merge pull request #639 from mattn/fix-windows
Fix Windows
2021-10-09 10:10:39 -04:00
9de39fb12b Merge pull request #634 from cheat/dependabot/go_modules/github.com/mattn/go-isatty-0.0.14
chore(deps): bump github.com/mattn/go-isatty from 0.0.13 to 0.0.14
2021-10-09 09:51:49 -04:00
ad501c4cbe Merge pull request #641 from OmgImAlexis/patch-1
chore: fix typo in comment
2021-10-09 09:39:41 -04:00
f17de401e5 docs(CONTRIBUTING): pr against develop
Add a note to `CONTRIBUTING.md` requesting that contributors open
pull-requests against the `develop` branch.
2021-10-09 09:34:23 -04:00
2c097adeda chore: fix typo in comment 2021-09-30 07:30:20 +09:30
b825e0f535 Fix Windows 2021-09-29 01:33:59 +09:00
8385277b28 chore(deps): bump github.com/mattn/go-isatty from 0.0.13 to 0.0.14
Bumps [github.com/mattn/go-isatty](https://github.com/mattn/go-isatty) from 0.0.13 to 0.0.14.
- [Release notes](https://github.com/mattn/go-isatty/releases)
- [Commits](https://github.com/mattn/go-isatty/compare/v0.0.13...v0.0.14)

---
updated-dependencies:
- dependency-name: github.com/mattn/go-isatty
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2021-09-07 22:10:46 +00:00
768d55e5d4 chore: bump version
Bump version to `4.2.2`.
2021-06-08 21:02:03 -04:00
6aedc5c116 chore: whitespace edit on Makefile 2021-06-08 20:59:57 -04:00
e881bb1f97 chore: update go.sum 2021-06-08 20:59:57 -04:00
501f9c66ad deps: upgrade dependencies 2021-06-08 20:59:57 -04:00
a2aa82d9f3 Add ARM64/ARMv8 build 2021-06-08 20:59:57 -04:00
018bce7ad5 Fix ZSH autocompletion 2021-06-07 11:42:47 +02:00
17acefdd9b Merge pull request #617 from bernermic/master
Adds some git helper scripts
2021-05-14 12:21:26 -04:00
37918e09a4 Adds some git helper scripts 2021-05-07 20:53:54 +02:00
86967873a8 Merge pull request #623 from cheat/github-actions
chore: migrate into Github Actions
2021-05-03 17:02:27 -04:00
d237d98c15 chore: migrate into Github Actions
Replace Travis CI integration in favor of Github Actions.
2021-05-03 16:43:27 -04:00
eb9b3e7798 Merge pull request #624 from cheat/dependabot/add-v2-config-file
chore: upgrade to GitHub-native Dependabot
2021-05-03 14:32:08 -04:00
b0a351033d Upgrade to GitHub-native Dependabot 2021-04-29 20:40:56 +00:00
1eb44e8809 Merge pull request #621 from chrisallenlane/v4.2.1
Squashed commit of the following:
2021-04-28 12:55:17 -04:00
55b18b4897 Squashed commit of the following:
commit 95479c8ad744db48386a5c78e54ef8da80e9120b
Author: Chris Lane <chris@chris-allen-lane.com>
Date:   Wed Apr 28 12:26:32 2021 -0400

    chore(version): bump version to 4.2.1

commit 6956f51cae
Author: Chris Lane <chris@chris-allen-lane.com>
Date:   Wed Apr 28 12:24:21 2021 -0400

    fix(Makefile): `vendor-update`

    Update the `vendor-update` build target to run `go mod vendor` after
    updating dependencies.

commit 0aca411279
Author: Chris Lane <chris@chris-allen-lane.com>
Date:   Wed Apr 28 12:23:24 2021 -0400

    chore(deps): update dependencies

commit e847956b02
Author: Chris Lane <chris@chris-allen-lane.com>
Date:   Wed Apr 28 08:26:51 2021 -0400

    chore(deps): build updates

    - Upgrade `go` to `1.16.3`

    - Attempt to fix build errors regarding dependencies
2021-04-28 12:35:32 -04:00
883a17092f Merge pull request #606 from chrisallenlane/4.2.0
4.2.0
2020-11-28 11:27:09 -05:00
4f2a57fce8 fix(view): whitespace corrections
- Fix bug whereby `--all` flag would conflict with pager

- Fix whitespace inconsistencies among view and search outputs
2020-11-28 11:18:16 -05:00
ecc96c64f9 refactor(installer): externalize installer
Move installation-related code out of `main.go` and into a new
`installer.Run` method.
2020-11-28 10:32:37 -05:00
a81dd96ff4 fix: rename display.go
Rename `display.go` to `write.go`. (I forgot to do this previously.)
2020-11-27 23:05:02 -05:00
fb538baba5 chore(version): bump to 4.2.0 2020-11-27 22:57:25 -05:00
1a7b5c6127 feat(display): make Faint respect Colorize
Make `display.Faint` respect the `Colorize` config value.
2020-11-27 22:50:55 -05:00
cdddfbb516 chore: rename display.Display
Rename `display.Display` to `display.Write` for clarity and to reduce
"stutter".
2020-11-27 22:35:24 -05:00
4ef4c35d8c feat(search): search all cheatpaths
Update the search function. It now searches all cheatpaths all the time,
as if `--all` were implicitly passed.
2020-11-27 22:31:16 -05:00
a58294859e chore: spelling
`s/pathSheets/pathsheets/g` in `cmd_list` for consistency elsewhere.
2020-11-27 22:26:14 -05:00
606092e288 feat(search): improve search output formatting
Improve the search output formatting.
2020-11-27 17:06:02 -05:00
233a9de1aa feat: implement --all flag
Implement an `--all` flag that can be used to view cheatsheets on all
chaetpaths. (Resolves #548)
2020-11-27 16:39:34 -05:00
aa16f68620 feat(display): add methods to display
- Add `indent`, `faint`, and `underline` methods to `display`
- Add tests for the above
2020-11-27 16:14:33 -05:00
367673d5d9 chore(dependencies): update dependencies
Run `make vendor-update`.
2020-11-27 09:51:39 -05:00
08fb9e11a9 feat(Makefile): add vendor-update
Add `vendor-update` target to `Makefile`, which updates all dependencies
to their newest versions.
2020-11-27 09:50:11 -05:00
3f4d4bddb2 feat(tests): add unit-tests
Add unit-tests for `sheets.Load`.
2020-11-11 19:33:31 -05:00
6c6753b35c Merge pull request #599 from chrisallenlane/issue-597
fix: update installation instructions in README
2020-11-07 18:56:37 -05:00
0718b606e1 fix(README): clarify installation verbiage
Update the installation verbiage in the `README` for clarity
(issue #597).
2020-11-07 18:48:24 -05:00
857119b443 feat(Docker): create development Docker image
- Create Docker image to be used for experimentation during development
- Create targets in `Makefile` pertaining to the above
2020-11-07 18:47:24 -05:00
f421483eea Merge pull request #596 from chrisallenlane/v4.1.1
v4.1.1
2020-11-03 18:32:25 -05:00
4adddbf504 chore: bump version to v4.1.1 2020-11-03 18:05:46 -05:00
b9c86b6975 chore(dependencies): update dependencies 2020-11-03 17:59:56 -05:00
0b21ccf6f8 feat(tests): improve test coverage 2020-11-03 17:29:49 -05:00
a3ad8c5101 Merge pull request #595 from chrisallenlane/codeql
feat: integrate CodeQL build action
2020-11-01 10:51:38 -05:00
bacb74929a feat: integrate CodeQL build action 2020-11-01 10:47:25 -05:00
82e1c27494 Merge pull request #588 from chrisallenlane/bare-tag
feat: implement `cheat -t` shorthand
2020-09-05 09:05:09 -04:00
45beeb2edb chore: bump version to 4.1.0 2020-09-05 08:56:51 -04:00
c2c479b36c feat: support -t shorthand
Make `cheat -t <tag>` function as a shorthand for `cheat -l -t <tag>`.
2020-09-02 17:17:44 -04:00
581 changed files with 25872 additions and 20715 deletions

11
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,11 @@
version: 2
updates:
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: github.com/alecthomas/chroma
versions:
- 0.9.1

57
.github/workflows/build.yml vendored Normal file
View File

@ -0,0 +1,57 @@
name: Go
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
# TODO: is it possible to DRY out these jobs? Aside from `runs-on`, they are
# identical.
build-linux:
runs-on: [ ubuntu-latest ]
steps:
- uses: actions/checkout@v2
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: 1.16
- name: Set up Revive (linter)
run: go get -u github.com/boyter/scc github.com/mgechev/revive
env:
GO111MODULE: off
- name: Build
run: make build
- name: Test
run: make test
build-osx:
runs-on: [ macos-latest ]
steps:
- uses: actions/checkout@v2
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: 1.16
- name: Set up Revive (linter)
run: go get -u github.com/boyter/scc github.com/mgechev/revive
env:
GO111MODULE: off
- name: Build
run: make build
- name: Test
run: make test
# TODO: windows

36
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: CodeQL
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
schedule:
- cron: '45 23 * * 0'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'go' ]
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
- name: Autobuild
uses: github/codeql-action/autobuild@v1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@ -1,15 +0,0 @@
language: go
go:
- 1.14.x
os:
- linux
- osx
env:
- GO111MODULE=on
install: true
script: make ci

View File

@ -35,6 +35,9 @@ Are you unable to do the above, but still want to contribute? You can help
`cheat` simply by telling others about it. Share it with friends and coworkers `cheat` simply by telling others about it. Share it with friends and coworkers
that might benefit from using it. that might benefit from using it.
#### Pull Requests ####
Please open all pull-requests against the `develop` branch.
[cheat]: https://github.com/cheat/cheat [cheat]: https://github.com/cheat/cheat
[cheatsheets]: https://github.com/cheat/cheatsheets [cheatsheets]: https://github.com/cheat/cheatsheets

8
Dockerfile Normal file
View File

@ -0,0 +1,8 @@
# NB: this image isn't used anywhere in the build pipeline. It exists to
# conveniently facilitate ad-hoc experimentation in a sandboxed environment
# during development.
FROM golang:1.15-alpine
RUN apk add git less make
WORKDIR /app

View File

@ -7,6 +7,7 @@ dist_dir := ./dist
CAT := cat CAT := cat
COLUMN := column COLUMN := column
CTAGS := ctags CTAGS := ctags
DOCKER := docker
GO := go GO := go
GREP := grep GREP := grep
GZIP := gzip --best GZIP := gzip --best
@ -20,6 +21,8 @@ SED := sed
SORT := sort SORT := sort
ZIP := zip -m ZIP := zip -m
docker_image := cheat-devel:latest
# build flags # build flags
BUILD_FLAGS := -ldflags="-s -w" -mod vendor -trimpath BUILD_FLAGS := -ldflags="-s -w" -mod vendor -trimpath
GOBIN := GOBIN :=
@ -33,21 +36,18 @@ releases := \
$(dist_dir)/cheat-linux-arm5 \ $(dist_dir)/cheat-linux-arm5 \
$(dist_dir)/cheat-linux-arm6 \ $(dist_dir)/cheat-linux-arm6 \
$(dist_dir)/cheat-linux-arm7 \ $(dist_dir)/cheat-linux-arm7 \
$(dist_dir)/cheat-linux-arm64 \
$(dist_dir)/cheat-windows-amd64.exe $(dist_dir)/cheat-windows-amd64.exe
## build: build an executable for your architecture ## build: build an executable for your architecture
.PHONY: build .PHONY: build
build: $(dist_dir) clean vendor generate man build: $(dist_dir) clean fmt lint vet vendor generate man
$(GO) build $(BUILD_FLAGS) -o $(dist_dir)/cheat $(cmd_dir) $(GO) build $(BUILD_FLAGS) -o $(dist_dir)/cheat $(cmd_dir)
## build-release: build release executables ## build-release: build release executables
.PHONY: build-release .PHONY: build-release
build-release: $(releases) build-release: $(releases)
## ci: build a "release" executable for the current architecture (used in ci)
.PHONY: ci
ci: | setup prepare build
# cheat-darwin-amd64 # cheat-darwin-amd64
$(dist_dir)/cheat-darwin-amd64: prepare $(dist_dir)/cheat-darwin-amd64: prepare
GOARCH=amd64 GOOS=darwin \ GOARCH=amd64 GOOS=darwin \
@ -77,11 +77,16 @@ $(dist_dir)/cheat-linux-arm6: prepare
$(dist_dir)/cheat-linux-arm7: prepare $(dist_dir)/cheat-linux-arm7: prepare
GOARCH=arm GOOS=linux GOARM=7 \ GOARCH=arm GOOS=linux GOARM=7 \
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz $(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
# cheat-linux-arm64
$(dist_dir)/cheat-linux-arm64: prepare
GOARCH=arm64 GOOS=linux \
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(GZIP) $@ && chmod -x $@.gz
# cheat-windows-amd64 # cheat-windows-amd64
$(dist_dir)/cheat-windows-amd64.exe: prepare $(dist_dir)/cheat-windows-amd64.exe: prepare
GOARCH=amd64 GOOS=windows \ GOARCH=amd64 GOOS=windows \
$(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(ZIP) $@.zip $@ $(GO) build $(BUILD_FLAGS) -o $@ $(cmd_dir) && $(ZIP) $@.zip $@ -j
# ./dist # ./dist
$(dist_dir): $(dist_dir):
@ -105,6 +110,7 @@ clean: $(dist_dir)
.PHONY: distclean .PHONY: distclean
distclean: distclean:
$(RM) -f tags $(RM) -f tags
@$(DOCKER) image rm -f $(docker_image)
## setup: install revive (linter) and scc (sloc tool) ## setup: install revive (linter) and scc (sloc tool)
.PHONY: setup .PHONY: setup
@ -132,6 +138,10 @@ man:
vendor: vendor:
$(GO) mod vendor && $(GO) mod tidy && $(GO) mod verify $(GO) mod vendor && $(GO) mod tidy && $(GO) mod verify
## vendor-update: update vendored dependencies
vendor-update:
$(GO) get -t -u ./... && $(GO) mod vendor
## fmt: run go fmt ## fmt: run go fmt
.PHONY: fmt .PHONY: fmt
fmt: fmt:
@ -165,6 +175,21 @@ check: | vendor fmt lint vet test
.PHONY: prepare .PHONY: prepare
prepare: | $(dist_dir) clean generate vendor fmt lint vet test prepare: | $(dist_dir) clean generate vendor fmt lint vet test
## docker-setup: create a docker image for use during development
.PHONY: docker-setup
docker-setup:
$(DOCKER) build -t $(docker_image) -f Dockerfile .
## docker-run: shell into the development docker container
.PHONY: docker-run
docker-run:
$(DOCKER) run -v `pwd`:/app -ti $(docker_image) sh
## docker-sh: shell into the docker development container
.PHONY: docker-sh
docker-sh:
$(DOCKER) run -v $(shell pwd):/app -ti $(docker_image) /bin/ash
## help: display this help text ## help: display this help text
.PHONY: help .PHONY: help
help: help:

View File

@ -1,8 +1,9 @@
![Workflow status](https://github.com/cheat/cheat/actions/workflows/build.yml/badge.svg)
cheat cheat
===== =====
[![Build Status](https://travis-ci.com/cheat/cheat.svg?branch=master)](https://travis-ci.com/cheat/cheat)
`cheat` allows you to create and view interactive cheatsheets on the `cheat` allows you to create and view interactive cheatsheets on the
command-line. It was designed to help remind \*nix system administrators of command-line. It was designed to help remind \*nix system administrators of
options for commands that they use frequently, but not frequently enough to options for commands that they use frequently, but not frequently enough to
@ -47,17 +48,17 @@ Installing
`cheat` has no dependencies. To install it, download the executable from the `cheat` has no dependencies. To install it, download the executable from the
[releases][] page and place it on your `PATH`. [releases][] page and place it on your `PATH`.
Alternatively, if you have [go][] installed, you may install `cheat` using `go
get`:
```sh
go get -u github.com/cheat/cheat/cmd/cheat
```
Configuring Configuring
----------- -----------
### conf.yml ### ### conf.yml ###
`cheat` is configured by a YAML file that will be auto-generated on first run. `cheat` is configured by a YAML file that will be auto-generated on first run.
Should you need to create a config file manually, you can do
so via:
```sh
mkdir -p ~/.config/cheat && cheat --init > ~/.config/cheat/conf.yml
```
By default, the config file is assumed to exist on an XDG-compliant By default, the config file is assumed to exist on an XDG-compliant
configuration path like `~/.config/cheat/conf.yml`. If you would like to store configuration path like `~/.config/cheat/conf.yml`. If you would like to store
@ -94,6 +95,21 @@ The `cheat` executable includes no cheatsheets, but [community-sourced
cheatsheets are available][cheatsheets]. You will be asked if you would like to cheatsheets are available][cheatsheets]. You will be asked if you would like to
install the community-sourced cheatsheets the first time you run `cheat`. install the community-sourced cheatsheets the first time you run `cheat`.
### Script ###
You can manage the cheatsheets via a script `cheatsheets`.
#### Download and install ####
```sh
mkdir -p ~/.local/bin
wget -O ~/.local/bin/cheatsheets https://raw.githubusercontent.com/cheat/cheat/master/scripts/git/cheatsheets
chmod +x ~/.local/bin/cheatsheets
```
#### Pull changes ####
To pull the community and personal cheatsheets call `cheatsheets pull`
#### Push changes ####
To push your personal cheatsheets call `cheatsheets push`
Cheatpaths Cheatpaths
---------- ----------
@ -211,3 +227,4 @@ Additionally, `cheat` supports enhanced autocompletion via integration with
[cheatsheets]: https://github.com/cheat/cheatsheets [cheatsheets]: https://github.com/cheat/cheatsheets
[completions]: https://github.com/cheat/cheat/tree/master/scripts [completions]: https://github.com/cheat/cheat/tree/master/scripts
[fzf]: https://github.com/junegunn/fzf [fzf]: https://github.com/junegunn/fzf
[go]: https://golang.org

View File

@ -1,3 +1,4 @@
//go:build ignore
// +build ignore // +build ignore
// This script embeds `docopt.txt and `conf.yml` into the binary during at // This script embeds `docopt.txt and `conf.yml` into the binary during at
@ -5,13 +6,11 @@
package main package main
import ( import (
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"log" "log"
"os" "os"
"path"
"path/filepath" "path/filepath"
) )
@ -52,10 +51,10 @@ func main() {
for _, file := range files { for _, file := range files {
// delete the outfile // delete the outfile
os.Remove(path.Join(root, file.Out)) os.Remove(filepath.Join(root, file.Out))
// read the static template // read the static template
bytes, err := ioutil.ReadFile(path.Join(root, file.In)) bytes, err := ioutil.ReadFile(filepath.Join(root, file.In))
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -64,7 +63,7 @@ func main() {
data := template(file.Method, string(bytes)) data := template(file.Method, string(bytes))
// write the file to the specified outpath // write the file to the specified outpath
spath := path.Join(root, file.Out) spath := filepath.Join(root, file.Out)
err = ioutil.WriteFile(spath, []byte(data), 0644) err = ioutil.WriteFile(spath, []byte(data), 0644)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)

View File

@ -27,5 +27,5 @@ func cmdDirectories(opts map[string]interface{}, conf config.Config) {
// write columnized output to stdout // write columnized output to stdout
w.Flush() w.Flush()
display.Display(out.String(), conf) display.Write(out.String(), conf)
} }

View File

@ -4,7 +4,7 @@ import (
"fmt" "fmt"
"os" "os"
"os/exec" "os/exec"
"path" "path/filepath"
"strings" "strings"
"github.com/cheat/cheat/internal/cheatpath" "github.com/cheat/cheat/internal/cheatpath"
@ -58,10 +58,10 @@ func cmdEdit(opts map[string]interface{}, conf config.Config) {
} }
// compute the new edit path // compute the new edit path
editpath = path.Join(writepath.Path, sheet.Title) editpath = filepath.Join(writepath.Path, sheet.Title)
// create any necessary subdirectories // create any necessary subdirectories
dirs := path.Dir(editpath) dirs := filepath.Dir(editpath)
if dirs != "." { if dirs != "." {
if err := os.MkdirAll(dirs, 0755); err != nil { if err := os.MkdirAll(dirs, 0755); err != nil {
fmt.Fprintf(os.Stderr, "failed to create directory: %s, %v\n", dirs, err) fmt.Fprintf(os.Stderr, "failed to create directory: %s, %v\n", dirs, err)
@ -87,10 +87,10 @@ func cmdEdit(opts map[string]interface{}, conf config.Config) {
} }
// compute the new edit path // compute the new edit path
editpath = path.Join(writepath.Path, cheatsheet) editpath = filepath.Join(writepath.Path, cheatsheet)
// create any necessary subdirectories // create any necessary subdirectories
dirs := path.Dir(editpath) dirs := filepath.Dir(editpath)
if dirs != "." { if dirs != "." {
if err := os.MkdirAll(dirs, 0755); err != nil { if err := os.MkdirAll(dirs, 0755); err != nil {
fmt.Fprintf(os.Stderr, "failed to create directory: %s, %v\n", dirs, err) fmt.Fprintf(os.Stderr, "failed to create directory: %s, %v\n", dirs, err)

View File

@ -3,7 +3,7 @@ package main
import ( import (
"fmt" "fmt"
"os" "os"
"path" "path/filepath"
"runtime" "runtime"
"strings" "strings"
@ -42,11 +42,11 @@ func cmdInit() {
// determine the appropriate paths for config data and (optional) community // determine the appropriate paths for config data and (optional) community
// cheatsheets based on the user's platform // cheatsheets based on the user's platform
confpath := confpaths[0] confpath := confpaths[0]
confdir := path.Dir(confpath) confdir := filepath.Dir(confpath)
// create paths for community and personal cheatsheets // create paths for community and personal cheatsheets
community := path.Join(confdir, "/cheatsheets/community") community := filepath.Join(confdir, "cheatsheets", "community")
personal := path.Join(confdir, "/cheatsheets/personal") personal := filepath.Join(confdir, "cheatsheets", "personal")
// template the above paths into the default configs // template the above paths into the default configs
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1) configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)

View File

@ -25,7 +25,7 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
os.Exit(1) os.Exit(1)
} }
// filter cheatcheats by tag if --tag was provided // filter cheatsheets by tag if --tag was provided
if opts["--tag"] != nil { if opts["--tag"] != nil {
cheatsheets = sheets.Filter( cheatsheets = sheets.Filter(
cheatsheets, cheatsheets,
@ -37,8 +37,8 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
// sheets with local sheets), here we simply want to create a slice // sheets with local sheets), here we simply want to create a slice
// containing all sheets. // containing all sheets.
flattened := []sheet.Sheet{} flattened := []sheet.Sheet{}
for _, pathSheets := range cheatsheets { for _, pathsheets := range cheatsheets {
for _, s := range pathSheets { for _, s := range pathsheets {
flattened = append(flattened, s) flattened = append(flattened, s)
} }
} }
@ -105,5 +105,5 @@ func cmdList(opts map[string]interface{}, conf config.Config) {
// write columnized output to stdout // write columnized output to stdout
w.Flush() w.Flush()
display.Display(out.String(), conf) display.Write(out.String(), conf)
} }

View File

@ -8,7 +8,6 @@ import (
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
"github.com/cheat/cheat/internal/display" "github.com/cheat/cheat/internal/display"
"github.com/cheat/cheat/internal/sheet"
"github.com/cheat/cheat/internal/sheets" "github.com/cheat/cheat/internal/sheets"
) )
@ -32,71 +31,65 @@ func cmdSearch(opts map[string]interface{}, conf config.Config) {
) )
} }
// consolidate the cheatsheets found on all paths into a single map of // iterate over each cheatpath
// `title` => `sheet` (ie, allow more local cheatsheets to override less
// local cheatsheets)
consolidated := sheets.Consolidate(cheatsheets)
// if <cheatsheet> was provided, search that single sheet only
if opts["<cheatsheet>"] != nil {
cheatsheet := opts["<cheatsheet>"].(string)
// assert that the cheatsheet exists
s, ok := consolidated[cheatsheet]
if !ok {
fmt.Printf("No cheatsheet found for '%s'.\n", cheatsheet)
os.Exit(2)
}
consolidated = map[string]sheet.Sheet{
cheatsheet: s,
}
}
// sort the cheatsheets alphabetically, and search for matches
out := "" out := ""
for _, sheet := range sheets.Sort(consolidated) { for _, pathcheats := range cheatsheets {
// assume that we want to perform a case-insensitive search for <phrase> // sort the cheatsheets alphabetically, and search for matches
pattern := "(?i)" + phrase for _, sheet := range sheets.Sort(pathcheats) {
// unless --regex is provided, in which case we pass the regex unaltered // if <cheatsheet> was provided, constrain the search only to
if opts["--regex"] == true { // matching cheatsheets
pattern = phrase if opts["<cheatsheet>"] != nil && sheet.Title != opts["<cheatsheet>"] {
} continue
}
// compile the regex // assume that we want to perform a case-insensitive search for <phrase>
reg, err := regexp.Compile(pattern) pattern := "(?i)" + phrase
if err != nil {
fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to compile regexp: %s, %v", pattern, err))
os.Exit(1)
}
// `Search` will return text entries that match the search terms. We're // unless --regex is provided, in which case we pass the regex unaltered
// using it here to overwrite the prior cheatsheet Text, filtering it to if opts["--regex"] == true {
// only what is relevant pattern = phrase
sheet.Text = sheet.Search(reg) }
// if the sheet did not match the search, ignore it and move on // compile the regex
if sheet.Text == "" { reg, err := regexp.Compile(pattern)
continue if err != nil {
} fmt.Fprintln(os.Stderr, fmt.Sprintf("failed to compile regexp: %s, %v", pattern, err))
os.Exit(1)
}
// if colorization was requested, apply it here // `Search` will return text entries that match the search terms. We're
if conf.Color(opts) { // using it here to overwrite the prior cheatsheet Text, filtering it to
sheet.Colorize(conf) // only what is relevant
} sheet.Text = sheet.Search(reg)
// output the cheatsheet title // if the sheet did not match the search, ignore it and move on
out += fmt.Sprintf("%s:\n", sheet.Title) if sheet.Text == "" {
continue
}
// indent each line of content with two spaces // if colorization was requested, apply it here
for _, line := range strings.Split(sheet.Text, "\n") { if conf.Color(opts) {
out += fmt.Sprintf(" %s\n", line) sheet.Colorize(conf)
}
// display the cheatsheet title and path
out += fmt.Sprintf("%s %s\n",
display.Underline(sheet.Title),
display.Faint(fmt.Sprintf("(%s)", sheet.CheatPath), conf),
)
// indent each line of content
out += display.Indent(sheet.Text) + "\n"
} }
} }
// trim superfluous newlines
out = strings.TrimSpace(out)
// display the output // display the output
display.Display(out, conf) // NB: resist the temptation to call `display.Display` multiple times in
// the loop above. That will not play nicely with the paginator.
display.Write(out, conf)
} }

View File

@ -26,5 +26,5 @@ func cmdTags(opts map[string]interface{}, conf config.Config) {
} }
// display the output // display the output
display.Display(out, conf) display.Write(out, conf)
} }

View File

@ -30,9 +30,39 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
) )
} }
// consolidate the cheatsheets found on all paths into a single map of // if --all was passed, display cheatsheets from all cheatpaths
// `title` => `sheet` (ie, allow more local cheatsheets to override less if opts["--all"].(bool) {
// local cheatsheets) // iterate over the cheatpaths
out := ""
for _, cheatpath := range cheatsheets {
// if the cheatpath contains the specified cheatsheet, display it
if sheet, ok := cheatpath[cheatsheet]; ok {
// identify the matching cheatsheet
out += fmt.Sprintf("%s %s\n",
display.Underline(sheet.Title),
display.Faint(fmt.Sprintf("(%s)", sheet.CheatPath), conf),
)
// apply colorization if requested
if conf.Color(opts) {
sheet.Colorize(conf)
}
// display the cheatsheet
out += display.Indent(sheet.Text) + "\n"
}
}
// display and exit
display.Write(strings.TrimSuffix(out, "\n"), conf)
os.Exit(0)
}
// otherwise, consolidate the cheatsheets found on all paths into a single
// map of `title` => `sheet` (ie, allow more local cheatsheets to override
// less local cheatsheets)
consolidated := sheets.Consolidate(cheatsheets) consolidated := sheets.Consolidate(cheatsheets)
// fail early if the requested cheatsheet does not exist // fail early if the requested cheatsheet does not exist
@ -48,5 +78,5 @@ func cmdView(opts map[string]interface{}, conf config.Config) {
} }
// display the cheatsheet // display the cheatsheet
display.Display(sheet.Text, conf) display.Write(sheet.Text, conf)
} }

View File

@ -3,11 +3,12 @@ Usage:
Options: Options:
--init Write a default config file to stdout --init Write a default config file to stdout
-a --all Search among all cheatpaths
-c --colorize Colorize output -c --colorize Colorize output
-d --directories List cheatsheet directories -d --directories List cheatsheet directories
-e --edit=<cheatsheet> Edit <cheatsheet> -e --edit=<cheatsheet> Edit <cheatsheet>
-l --list List cheatsheets -l --list List cheatsheets
-p --path=<name> Return only sheets found on path <name> -p --path=<name> Return only sheets found on cheatpath <name>
-r --regex Treat search <phrase> as a regex -r --regex Treat search <phrase> as a regex
-s --search=<phrase> Search cheatsheets for <phrase> -s --search=<phrase> Search cheatsheets for <phrase>
-t --tag=<tag> Return only sheets matching <tag> -t --tag=<tag> Return only sheets matching <tag>

View File

@ -5,7 +5,6 @@ package main
import ( import (
"fmt" "fmt"
"os" "os"
"path"
"runtime" "runtime"
"strings" "strings"
@ -17,7 +16,7 @@ import (
"github.com/cheat/cheat/internal/installer" "github.com/cheat/cheat/internal/installer"
) )
const version = "4.0.4" const version = "4.2.4"
func main() { func main() {
@ -46,6 +45,9 @@ func main() {
envvars := map[string]string{} envvars := map[string]string{}
for _, e := range os.Environ() { for _, e := range os.Environ() {
pair := strings.SplitN(e, "=", 2) pair := strings.SplitN(e, "=", 2)
if runtime.GOOS == "windows" {
pair[0] = strings.ToUpper(pair[0])
}
envvars[pair[0]] = pair[1] envvars[pair[0]] = pair[1]
} }
@ -74,62 +76,16 @@ func main() {
os.Exit(0) os.Exit(0)
} }
// read the config template // choose a confpath
configs := configs()
// determine the appropriate paths for config data and (optional) community
// cheatsheets based on the user's platform
confpath = confpaths[0] confpath = confpaths[0]
confdir := path.Dir(confpath)
// create paths for community and personal cheatsheets // run the installer
community := path.Join(confdir, "/cheatsheets/community") if err := installer.Run(configs(), confpath); err != nil {
personal := path.Join(confdir, "/cheatsheets/personal") fmt.Fprintf(os.Stderr, "failed to run installer: %v\n", err)
// template the above paths into the default configs
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
// prompt the user to download the community cheatsheets
yes, err = installer.Prompt(
"Would you like to download the community cheatsheets? [Y/n]",
true,
)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
os.Exit(1)
}
// clone the community cheatsheets if so instructed
if yes {
// clone the community cheatsheets
if err := installer.Clone(community); err != nil {
fmt.Fprintf(os.Stderr, "failed to create config: %v\n", err)
os.Exit(1)
}
// also create a directory for personal cheatsheets
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
fmt.Fprintf(
os.Stderr,
"failed to create config: failed to create directory: %s: %v\n",
personal,
err)
os.Exit(1)
}
}
// the config file does not exist, so we'll try to create one
if err = config.Init(confpath, configs); err != nil {
fmt.Fprintf(
os.Stderr,
"failed to create config file: %s: %v\n",
confpath,
err,
)
os.Exit(1) os.Exit(1)
} }
// notify the user and exit
fmt.Printf("Created config file: %s\n", confpath) fmt.Printf("Created config file: %s\n", confpath)
fmt.Println("Please read this file for advanced configuration information.") fmt.Println("Please read this file for advanced configuration information.")
os.Exit(0) os.Exit(0)
@ -185,6 +141,9 @@ func main() {
case opts["<cheatsheet>"] != nil: case opts["<cheatsheet>"] != nil:
cmd = cmdView cmd = cmdView
case opts["--tag"] != nil && opts["--tag"].(string) != "":
cmd = cmdList
default: default:
fmt.Println(usage()) fmt.Println(usage())
os.Exit(0) os.Exit(0)

View File

@ -9,22 +9,23 @@ import (
func configs() string { func configs() string {
return strings.TrimSpace(`--- return strings.TrimSpace(`---
# The editor to use with 'cheat -e <sheet>'. Defaults to $EDITOR or $VISUAL. # The editor to use with 'cheat -e <sheet>'. Defaults to $EDITOR or $VISUAL.
editor: vim # editor: vim
# Should 'cheat' always colorize output? # Should 'cheat' always colorize output?
colorize: true colorize: false
# Which 'chroma' colorscheme should be applied to the output? # Which 'chroma' colorscheme should be applied to the output?
# Options are available here: # Options are available here:
# https://github.com/alecthomas/chroma/tree/master/styles # https://github.com/alecthomas/chroma/tree/master/styles
style: monokai # style: monokai
# Which 'chroma' "formatter" should be applied? # Which 'chroma' "formatter" should be applied?
# One of: "terminal", "terminal256", "terminal16m" # One of: "terminal", "terminal256", "terminal16m"
formatter: terminal16m formatter: terminal
# Through which pager should output be piped? (Unset this key for no pager.) # Through which pager should output be piped? (Unset this key for no pager.)
pager: less -FRX pager: more
# pager: less -FRX # <- recommended where available
# The paths at which cheatsheets are available. Tags associated with a cheatpath # The paths at which cheatsheets are available. Tags associated with a cheatpath
# are automatically attached to all cheatsheets residing on that path. # are automatically attached to all cheatsheets residing on that path.

View File

@ -12,11 +12,12 @@ func usage() string {
Options: Options:
--init Write a default config file to stdout --init Write a default config file to stdout
-a --all Search among all cheatpaths
-c --colorize Colorize output -c --colorize Colorize output
-d --directories List cheatsheet directories -d --directories List cheatsheet directories
-e --edit=<cheatsheet> Edit <cheatsheet> -e --edit=<cheatsheet> Edit <cheatsheet>
-l --list List cheatsheets -l --list List cheatsheets
-p --path=<name> Return only sheets found on path <name> -p --path=<name> Return only sheets found on cheatpath <name>
-r --regex Treat search <phrase> as a regex -r --regex Treat search <phrase> as a regex
-s --search=<phrase> Search cheatsheets for <phrase> -s --search=<phrase> Search cheatsheets for <phrase>
-t --tag=<tag> Return only sheets matching <tag> -t --tag=<tag> Return only sheets matching <tag>

View File

@ -1,21 +1,22 @@
--- ---
# The editor to use with 'cheat -e <sheet>'. Defaults to $EDITOR or $VISUAL. # The editor to use with 'cheat -e <sheet>'. Defaults to $EDITOR or $VISUAL.
editor: vim # editor: vim
# Should 'cheat' always colorize output? # Should 'cheat' always colorize output?
colorize: true colorize: false
# Which 'chroma' colorscheme should be applied to the output? # Which 'chroma' colorscheme should be applied to the output?
# Options are available here: # Options are available here:
# https://github.com/alecthomas/chroma/tree/master/styles # https://github.com/alecthomas/chroma/tree/master/styles
style: monokai # style: monokai
# Which 'chroma' "formatter" should be applied? # Which 'chroma' "formatter" should be applied?
# One of: "terminal", "terminal256", "terminal16m" # One of: "terminal", "terminal256", "terminal16m"
formatter: terminal16m formatter: terminal
# Through which pager should output be piped? (Unset this key for no pager.) # Through which pager should output be piped? (Unset this key for no pager.)
pager: less -FRX pager: more
# pager: less -FRX # <- recommended where available
# The paths at which cheatsheets are available. Tags associated with a cheatpath # The paths at which cheatsheets are available. Tags associated with a cheatpath
# are automatically attached to all cheatsheets residing on that path. # are automatically attached to all cheatsheets residing on that path.

View File

@ -1,4 +1,4 @@
.\" Automatically generated by Pandoc 1.17.2 .\" Automatically generated by Pandoc 2.2.1
.\" .\"
.TH "CHEAT" "1" "" "" "General Commands Manual" .TH "CHEAT" "1" "" "" "General Commands Manual"
.hy .hy
@ -17,62 +17,62 @@ commands that they use frequently, but not frequently enough to
remember. remember.
.SH OPTIONS .SH OPTIONS
.TP .TP
.B \-\-init .B \[en]init
Print a config file to stdout. Print a config file to stdout.
.RS .RS
.RE .RE
.TP .TP
.B \-c, \-\-colorize .B \-c, \[en]colorize
Colorize output. Colorize output.
.RS .RS
.RE .RE
.TP .TP
.B \-d, \-\-directories .B \-d, \[en]directories
List cheatsheet directories. List cheatsheet directories.
.RS .RS
.RE .RE
.TP .TP
.B \-e, \-\-edit=\f[I]CHEATSHEET\f[] .B \-e, \[en]edit=\f[I]CHEATSHEET\f[]
Open \f[I]CHEATSHEET\f[] for editing. Open \f[I]CHEATSHEET\f[] for editing.
.RS .RS
.RE .RE
.TP .TP
.B \-l, \-\-list .B \-l, \[en]list
List available cheatsheets. List available cheatsheets.
.RS .RS
.RE .RE
.TP .TP
.B \-p, \-\-path=\f[I]PATH\f[] .B \-p, \[en]path=\f[I]PATH\f[]
Filter only to sheets found on path \f[I]PATH\f[]. Filter only to sheets found on path \f[I]PATH\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-r, \-\-regex .B \-r, \[en]regex
Treat search \f[I]PHRASE\f[] as a regular expression. Treat search \f[I]PHRASE\f[] as a regular expression.
.RS .RS
.RE .RE
.TP .TP
.B \-s, \-\-search=\f[I]PHRASE\f[] .B \-s, \[en]search=\f[I]PHRASE\f[]
Search cheatsheets for \f[I]PHRASE\f[]. Search cheatsheets for \f[I]PHRASE\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-t, \-\-tag=\f[I]TAG\f[] .B \-t, \[en]tag=\f[I]TAG\f[]
Filter only to sheets tagged with \f[I]TAG\f[]. Filter only to sheets tagged with \f[I]TAG\f[].
.RS .RS
.RE .RE
.TP .TP
.B \-T, \-\-tags .B \-T, \[en]tags
List all tags in use. List all tags in use.
.RS .RS
.RE .RE
.TP .TP
.B \-v, \-\-version .B \-v, \[en]version
Print the version number. Print the version number.
.RS .RS
.RE .RE
.TP .TP
.B \-\-rm=\f[I]CHEATSHEET\f[] .B \[en]rm=\f[I]CHEATSHEET\f[]
Remove (deletes) \f[I]CHEATSHEET\f[]. Remove (deletes) \f[I]CHEATSHEET\f[].
.RS .RS
.RE .RE
@ -88,7 +88,7 @@ cheat \-e \f[I]foo\f[]
.RS .RS
.RE .RE
.TP .TP
.B To edit (or create) the foo/bar cheatsheet on the \[aq]work\[aq] cheatpath: .B To edit (or create) the foo/bar cheatsheet on the `work' cheatpath:
cheat \-p \f[I]work\f[] \-e \f[I]foo/bar\f[] cheat \-p \f[I]work\f[] \-e \f[I]foo/bar\f[]
.RS .RS
.RE .RE
@ -103,7 +103,7 @@ cheat \-l
.RS .RS
.RE .RE
.TP .TP
.B To list all cheatsheets whose titles match \[aq]apt\[aq]: .B To list all cheatsheets whose titles match `apt':
cheat \-l \f[I]apt\f[] cheat \-l \f[I]apt\f[]
.RS .RS
.RE .RE
@ -113,23 +113,23 @@ cheat \-T
.RS .RS
.RE .RE
.TP .TP
.B To list available cheatsheets that are tagged as \[aq]personal\[aq]: .B To list available cheatsheets that are tagged as `personal':
cheat \-l \-t \f[I]personal\f[] cheat \-l \-t \f[I]personal\f[]
.RS .RS
.RE .RE
.TP .TP
.B To search for \[aq]ssh\[aq] among all cheatsheets, and colorize matches: .B To search for `ssh' among all cheatsheets, and colorize matches:
cheat \-c \-s \f[I]ssh\f[] cheat \-c \-s \f[I]ssh\f[]
.RS .RS
.RE .RE
.TP .TP
.B To search (by regex) for cheatsheets that contain an IP address: .B To search (by regex) for cheatsheets that contain an IP address:
cheat \-c \-r \-s \f[I]\[aq](?:[0\-9]{1,3}.){3}[0\-9]{1,3}\[aq]\f[] cheat \-c \-r \-s \f[I]`(?:[0\-9]{1,3}.){3}[0\-9]{1,3}'\f[]
.RS .RS
.RE .RE
.TP .TP
.B To remove (delete) the foo/bar cheatsheet: .B To remove (delete) the foo/bar cheatsheet:
cheat \-\-rm \f[I]foo/bar\f[] cheat \[en]rm \f[I]foo/bar\f[]
.RS .RS
.RE .RE
.SH FILES .SH FILES
@ -159,15 +159,15 @@ depending upon your platform:
\f[B]cheat\f[] will search in the order specified above. \f[B]cheat\f[] will search in the order specified above.
The first \f[I]conf.yaml\f[] encountered will be respected. The first \f[I]conf.yaml\f[] encountered will be respected.
.PP .PP
If \f[B]cheat\f[] cannot locate a config file, it will ask if you\[aq]d If \f[B]cheat\f[] cannot locate a config file, it will ask if you'd like
like to generate one automatically. to generate one automatically.
Alternatively, you may also generate a config file manually by running Alternatively, you may also generate a config file manually by running
\f[B]cheat \-\-init\f[] and saving its output to the appropriate \f[B]cheat \[en]init\f[] and saving its output to the appropriate
location for your platform. location for your platform.
.SS Cheatpaths .SS Cheatpaths
.PP .PP
\f[B]cheat\f[] reads its cheatsheets from "cheatpaths", which are the \f[B]cheat\f[] reads its cheatsheets from \[lq]cheatpaths\[rq], which
directories in which cheatsheets are stored. are the directories in which cheatsheets are stored.
Cheatpaths may be configured in \f[I]conf.yaml\f[], and viewed via Cheatpaths may be configured in \f[I]conf.yaml\f[], and viewed via
\f[B]cheat \-d\f[]. \f[B]cheat \-d\f[].
.PP .PP

6
go.mod
View File

@ -3,15 +3,15 @@ module github.com/cheat/cheat
go 1.14 go 1.14
require ( require (
github.com/alecthomas/chroma v0.8.0 github.com/alecthomas/chroma v0.9.1
github.com/davecgh/go-spew v1.1.1 github.com/davecgh/go-spew v1.1.1
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815
github.com/kr/text v0.2.0 // indirect github.com/kr/text v0.2.0 // indirect
github.com/mattn/go-isatty v0.0.12 github.com/mattn/go-isatty v0.0.14
github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/go-homedir v1.1.0
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
github.com/sergi/go-diff v1.1.0 // indirect github.com/sergi/go-diff v1.1.0 // indirect
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0
gopkg.in/yaml.v2 v2.3.0 gopkg.in/yaml.v2 v2.4.0
) )

25
go.sum
View File

@ -1,7 +1,7 @@
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
github.com/alecthomas/chroma v0.8.0 h1:HS+HE97sgcqjQGu5uVr8jIE55Mmh5UeQ7kckAhHg2pY= github.com/alecthomas/chroma v0.9.1 h1:cBmvQqRImzR5aWqdMxYZByND4S7BCS/g0svZb28h0Dc=
github.com/alecthomas/chroma v0.8.0/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM= github.com/alecthomas/chroma v0.9.1/go.mod h1:eMuEnpA18XbG/WhOWtCzJHS7WqEtDAI+HxdwoW0nVSk=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0= github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE= github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
@ -13,45 +13,40 @@ github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk= github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
@ -60,5 +55,5 @@ gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 h1:POO/ycCATvegFmVuPpQzZFJ+p
gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg= gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=

View File

@ -0,0 +1,22 @@
package config
import (
"testing"
)
// TestColor asserts that colorization rules are properly respected
func TestColor(t *testing.T) {
// mock a config
conf := Config{}
opts := map[string]interface{}{"--colorize": false}
if conf.Color(opts) {
t.Errorf("failed to respect --colorize (false)")
}
opts = map[string]interface{}{"--colorize": true}
if !conf.Color(opts) {
t.Errorf("failed to respect --colorize (true)")
}
}

View File

@ -4,7 +4,9 @@ import (
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os" "os"
"os/exec"
"path/filepath" "path/filepath"
"runtime"
"strings" "strings"
cp "github.com/cheat/cheat/internal/cheatpath" cp "github.com/cheat/cheat/internal/cheatpath"
@ -98,8 +100,22 @@ func New(opts map[string]interface{}, confPath string, resolve bool) (Config, er
conf.Editor = os.Getenv("VISUAL") conf.Editor = os.Getenv("VISUAL")
} else if os.Getenv("EDITOR") != "" { } else if os.Getenv("EDITOR") != "" {
conf.Editor = os.Getenv("EDITOR") conf.Editor = os.Getenv("EDITOR")
} else if runtime.GOOS == "windows" {
conf.Editor = "notepad"
} else { } else {
return Config{}, fmt.Errorf("no editor set") // try to fall back to `nano`
path, err := exec.LookPath("nano")
if err != nil {
return Config{}, fmt.Errorf("failed to locate nano: %s", err)
}
// use `nano` if we found it
if path != "" {
conf.Editor = "nano"
// otherwise, give up
} else {
return Config{}, fmt.Errorf("no editor set")
}
} }
} }
@ -110,12 +126,13 @@ func New(opts map[string]interface{}, confPath string, resolve bool) (Config, er
// if a chroma formatter was not provided, set a default // if a chroma formatter was not provided, set a default
if conf.Formatter == "" { if conf.Formatter == "" {
conf.Formatter = "terminal16m" conf.Formatter = "terminal"
} }
// if a pager was not provided, set a default // attempt to fall back to `PAGER` if a pager is not specified in configs
if strings.TrimSpace(conf.Pager) == "" { conf.Pager = strings.TrimSpace(conf.Pager)
conf.Pager = "" if conf.Pager == "" && os.Getenv("PAGER") != "" {
conf.Pager = os.Getenv("PAGER")
} }
return conf, nil return conf, nil

View File

@ -39,17 +39,17 @@ func TestConfigSuccessful(t *testing.T) {
// assert that the cheatpaths are correct // assert that the cheatpaths are correct
want := []cheatpath.Cheatpath{ want := []cheatpath.Cheatpath{
cheatpath.Cheatpath{ cheatpath.Cheatpath{
Path: filepath.Join(home, ".dotfiles/cheat/community"), Path: filepath.Join(home, ".dotfiles", "cheat", "community"),
ReadOnly: true, ReadOnly: true,
Tags: []string{"community"}, Tags: []string{"community"},
}, },
cheatpath.Cheatpath{ cheatpath.Cheatpath{
Path: filepath.Join(home, ".dotfiles/cheat/work"), Path: filepath.Join(home, ".dotfiles", "cheat", "work"),
ReadOnly: false, ReadOnly: false,
Tags: []string{"work"}, Tags: []string{"work"},
}, },
cheatpath.Cheatpath{ cheatpath.Cheatpath{
Path: filepath.Join(home, ".dotfiles/cheat/personal"), Path: filepath.Join(home, ".dotfiles", "cheat", "personal"),
ReadOnly: false, ReadOnly: false,
Tags: []string{"personal"}, Tags: []string{"personal"},
}, },
@ -85,8 +85,8 @@ func TestEmptyEditor(t *testing.T) {
// initialize a config // initialize a config
conf, err := New(map[string]interface{}{}, mock.Path("conf/empty.yml"), false) conf, err := New(map[string]interface{}{}, mock.Path("conf/empty.yml"), false)
if err == nil { if err != nil {
t.Errorf("failed to return an error on empty editor") t.Errorf("failed to initialize test: %v", err)
} }
// set editor, and assert that it is respected // set editor, and assert that it is respected

View File

@ -0,0 +1,38 @@
package config
import (
"io/ioutil"
"os"
"testing"
)
// TestInit asserts that configs are properly initialized
func TestInit(t *testing.T) {
// initialize a temporary config file
confFile, err := ioutil.TempFile("", "cheat-test")
if err != nil {
t.Errorf("failed to create temp file: %v", err)
}
// clean up the temp file
defer os.Remove(confFile.Name())
// initialize the config file
conf := "mock config data"
if err = Init(confFile.Name(), conf); err != nil {
t.Errorf("failed to init config file: %v", err)
}
// read back the config file contents
bytes, err := ioutil.ReadFile(confFile.Name())
if err != nil {
t.Errorf("failed to read config file: %v", err)
}
// assert that the contents were written correctly
got := string(bytes)
if got != conf {
t.Errorf("failed to write configs: want: %s, got: %s", conf, got)
}
}

View File

@ -0,0 +1,53 @@
package config
import (
"io/ioutil"
"os"
"testing"
)
// TestPathConfigNotExists asserts that `Path` identifies non-existent config
// files
func TestPathConfigNotExists(t *testing.T) {
// package (invalid) cheatpaths
paths := []string{"/cheat-test-conf-does-not-exist"}
// assert
if _, err := Path(paths); err == nil {
t.Errorf("failed to identify non-existent config file")
}
}
// TestPathConfigExists asserts that `Path` identifies existent config files
func TestPathConfigExists(t *testing.T) {
// initialize a temporary config file
confFile, err := ioutil.TempFile("", "cheat-test")
if err != nil {
t.Errorf("failed to create temp file: %v", err)
}
// clean up the temp file
defer os.Remove(confFile.Name())
// package cheatpaths
paths := []string{
"/cheat-test-conf-does-not-exist",
confFile.Name(),
}
// assert
got, err := Path(paths)
if err != nil {
t.Errorf("failed to identify config file: %v", err)
}
if got != confFile.Name() {
t.Errorf(
"failed to return config path: want: %s, got: %s",
confFile.Name(),
got,
)
}
}

View File

@ -2,7 +2,7 @@ package config
import ( import (
"fmt" "fmt"
"path" "path/filepath"
"github.com/mitchellh/go-homedir" "github.com/mitchellh/go-homedir"
) )
@ -28,25 +28,25 @@ func Paths(
} }
switch sys { switch sys {
case "darwin", "linux", "freebsd": case "android", "darwin", "linux", "freebsd":
paths := []string{} paths := []string{}
// don't include the `XDG_CONFIG_HOME` path if that envvar is not set // don't include the `XDG_CONFIG_HOME` path if that envvar is not set
if xdgpath, ok := envvars["XDG_CONFIG_HOME"]; ok { if xdgpath, ok := envvars["XDG_CONFIG_HOME"]; ok {
paths = append(paths, path.Join(xdgpath, "/cheat/conf.yml")) paths = append(paths, filepath.Join(xdgpath, "cheat", "conf.yml"))
} }
paths = append(paths, []string{ paths = append(paths, []string{
path.Join(home, ".config/cheat/conf.yml"), filepath.Join(home, ".config", "cheat", "conf.yml"),
path.Join(home, ".cheat/conf.yml"), filepath.Join(home, ".cheat", "conf.yml"),
"/etc/cheat/conf.yml", "/etc/cheat/conf.yml",
}...) }...)
return paths, nil return paths, nil
case "windows": case "windows":
return []string{ return []string{
path.Join(envvars["APPDATA"], "/cheat/conf.yml"), filepath.Join(envvars["APPDATA"], "cheat", "conf.yml"),
path.Join(envvars["PROGRAMDATA"], "/cheat/conf.yml"), filepath.Join(envvars["PROGRAMDATA"], "cheat", "conf.yml"),
}, nil }, nil
default: default:
return []string{}, fmt.Errorf("unsupported os: %s", sys) return []string{}, fmt.Errorf("unsupported os: %s", sys)

View File

@ -21,6 +21,7 @@ func TestValidatePathsNix(t *testing.T) {
// specify the platforms to test // specify the platforms to test
oses := []string{ oses := []string{
"android",
"darwin", "darwin",
"freebsd", "freebsd",
"linux", "linux",

18
internal/display/faint.go Normal file
View File

@ -0,0 +1,18 @@
package display
import (
"fmt"
"github.com/cheat/cheat/internal/config"
)
// Faint returns an faint string
func Faint(str string, conf config.Config) string {
// make `str` faint only if colorization has been requested
if conf.Colorize {
return fmt.Sprintf(fmt.Sprintf("\033[2m%s\033[0m", str))
}
// otherwise, return the string unmodified
return str
}

View File

@ -0,0 +1,27 @@
package display
import (
"testing"
"github.com/cheat/cheat/internal/config"
)
// TestFaint asserts that Faint applies faint formatting
func TestFaint(t *testing.T) {
// case: apply colorization
conf := config.Config{Colorize: true}
want := "\033[2mfoo\033[0m"
got := Faint("foo", conf)
if want != got {
t.Errorf("failed to faint: want: %s, got: %s", want, got)
}
// case: do not apply colorization
conf.Colorize = false
want = "foo"
got = Faint("foo", conf)
if want != got {
t.Errorf("failed to faint: want: %s, got: %s", want, got)
}
}

View File

@ -0,0 +1,21 @@
package display
import (
"fmt"
"strings"
)
// Indent prepends each line of a string with a tab
func Indent(str string) string {
// trim superfluous whitespace
str = strings.TrimSpace(str)
// prepend each line with a tab character
out := ""
for _, line := range strings.Split(str, "\n") {
out += fmt.Sprintf("\t%s\n", line)
}
return out
}

View File

@ -0,0 +1,12 @@
package display
import "testing"
// TestIndent asserts that Indent prepends a tab to each line
func TestIndent(t *testing.T) {
got := Indent("foo\nbar\nbaz")
want := "\tfoo\n\tbar\n\tbaz\n"
if got != want {
t.Errorf("failed to indent: want: %s, got: %s", want, got)
}
}

View File

@ -0,0 +1,8 @@
package display
import "fmt"
// Underline returns an underlined string
func Underline(str string) string {
return fmt.Sprintf(fmt.Sprintf("\033[4m%s\033[0m", str))
}

View File

@ -0,0 +1,14 @@
package display
import (
"testing"
)
// TestUnderline asserts that Underline applies underline formatting
func TestUnderline(t *testing.T) {
want := "\033[4mfoo\033[0m"
got := Underline("foo")
if want != got {
t.Errorf("failed to underline: want: %s, got: %s", want, got)
}
}

View File

@ -9,9 +9,9 @@ import (
"github.com/cheat/cheat/internal/config" "github.com/cheat/cheat/internal/config"
) )
// Display writes output either directly to stdout, or through a pager, // Write writes output either directly to stdout, or through a pager,
// depending upon configuration. // depending upon configuration.
func Display(out string, conf config.Config) { func Write(out string, conf config.Config) {
// if no pager was configured, print the output to stdout and exit // if no pager was configured, print the output to stdout and exit
if conf.Pager == "" { if conf.Pager == "" {
fmt.Print(out) fmt.Print(out)

View File

@ -8,8 +8,8 @@ import (
const cloneURL = "https://github.com/cheat/cheatsheets.git" const cloneURL = "https://github.com/cheat/cheatsheets.git"
// Clone clones the community cheatsheets // clone clones the community cheatsheets
func Clone(path string) error { func clone(path string) error {
// perform the clone in a shell // perform the clone in a shell
cmd := exec.Command("git", "clone", cloneURL, path) cmd := exec.Command("git", "clone", cloneURL, path)

View File

@ -23,7 +23,7 @@ func Prompt(prompt string, def bool) (bool, error) {
} }
// normalize the answer // normalize the answer
ans = strings.ToLower(strings.TrimRight(ans, "\n")) ans = strings.ToLower(strings.TrimSpace(ans))
// return the appropriate response // return the appropriate response
switch ans { switch ans {

57
internal/installer/run.go Normal file
View File

@ -0,0 +1,57 @@
package installer
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/cheat/cheat/internal/config"
)
// Run runs the installer
func Run(configs string, confpath string) error {
// determine the appropriate paths for config data and (optional) community
// cheatsheets based on the user's platform
confdir := filepath.Dir(confpath)
// create paths for community and personal cheatsheets
community := filepath.Join(confdir, "cheatsheets", "community")
personal := filepath.Join(confdir, "cheatsheets", "personal")
// template the above paths into the default configs
configs = strings.Replace(configs, "COMMUNITY_PATH", community, -1)
configs = strings.Replace(configs, "PERSONAL_PATH", personal, -1)
// prompt the user to download the community cheatsheets
yes, err := Prompt(
"Would you like to download the community cheatsheets? [Y/n]",
true,
)
if err != nil {
return fmt.Errorf("failed to prompt: %v", err)
}
// clone the community cheatsheets if so instructed
if yes {
// clone the community cheatsheets
fmt.Printf("Cloning community cheatsheets to %s.\n", community)
if err := clone(community); err != nil {
return fmt.Errorf("failed to clone cheatsheets: %v", err)
}
// also create a directory for personal cheatsheets
fmt.Printf("Cloning personal cheatsheets to %s.\n", personal)
if err := os.MkdirAll(personal, os.ModePerm); err != nil {
return fmt.Errorf("failed to create directory: %v", err)
}
}
// the config file does not exist, so we'll try to create one
if err = config.Init(confpath, configs); err != nil {
return fmt.Errorf("failed to create config file: %v", err)
}
return nil
}

View File

@ -13,7 +13,7 @@ func Path(filename string) string {
// determine the path of this file during runtime // determine the path of this file during runtime
_, thisfile, _, _ := runtime.Caller(0) _, thisfile, _, _ := runtime.Caller(0)
// compute the config path // compute the mock path
file, err := filepath.Abs( file, err := filepath.Abs(
path.Join( path.Join(
filepath.Dir(thisfile), filepath.Dir(thisfile),
@ -22,7 +22,7 @@ func Path(filename string) string {
), ),
) )
if err != nil { if err != nil {
panic(fmt.Errorf("failed to resolve config path: %v", err)) panic(fmt.Errorf("failed to resolve mock path: %v", err))
} }
return file return file

View File

@ -0,0 +1,34 @@
package sheet
import (
"testing"
"github.com/cheat/cheat/internal/config"
)
// TestColorize asserts that syntax-highlighting is correctly applied
func TestColorize(t *testing.T) {
// mock configs
conf := config.Config{
Formatter: "terminal16m",
Style: "solarized-dark",
}
// mock a sheet
s := Sheet{
Text: "echo 'foo'",
}
// colorize the sheet text
s.Colorize(conf)
// initialize expectations
want := "echo"
want += " 'foo'"
// assert
if s.Text != want {
t.Errorf("failed to colorize sheet: want: %s, got: %s", want, s.Text)
}
}

View File

@ -4,7 +4,7 @@ import (
"fmt" "fmt"
"io" "io"
"os" "os"
"path" "path/filepath"
) )
// Copy copies a cheatsheet to a new location // Copy copies a cheatsheet to a new location
@ -22,7 +22,7 @@ func (s *Sheet) Copy(dest string) error {
defer infile.Close() defer infile.Close()
// create any necessary subdirectories // create any necessary subdirectories
dirs := path.Dir(dest) dirs := filepath.Dir(dest)
if dirs != "." { if dirs != "." {
if err := os.MkdirAll(dirs, 0755); err != nil { if err := os.MkdirAll(dirs, 0755); err != nil {
return fmt.Errorf("failed to create directory: %s, %v", dirs, err) return fmt.Errorf("failed to create directory: %s, %v", dirs, err)

View File

@ -25,7 +25,7 @@ func TestCopyFlat(t *testing.T) {
} }
// mock a cheatsheet struct // mock a cheatsheet struct
sheet, err := New("foo", src.Name(), []string{}, false) sheet, err := New("foo", "community", src.Name(), []string{}, false)
if err != nil { if err != nil {
t.Errorf("failed to init cheatsheet: %v", err) t.Errorf("failed to init cheatsheet: %v", err)
} }
@ -72,7 +72,13 @@ func TestCopyDeep(t *testing.T) {
} }
// mock a cheatsheet struct // mock a cheatsheet struct
sheet, err := New("/cheat-tests/alpha/bravo/foo", src.Name(), []string{}, false) sheet, err := New(
"/cheat-tests/alpha/bravo/foo",
"community",
src.Name(),
[]string{},
false,
)
if err != nil { if err != nil {
t.Errorf("failed to init cheatsheet: %v", err) t.Errorf("failed to init cheatsheet: %v", err)
} }

View File

@ -10,17 +10,19 @@ import (
// Sheet encapsulates sheet information // Sheet encapsulates sheet information
type Sheet struct { type Sheet struct {
Title string Title string
Path string CheatPath string
Text string Path string
Tags []string Text string
Syntax string Tags []string
ReadOnly bool Syntax string
ReadOnly bool
} }
// New initializes a new Sheet // New initializes a new Sheet
func New( func New(
title string, title string,
cheatpath string,
path string, path string,
tags []string, tags []string,
readOnly bool, readOnly bool,
@ -46,11 +48,12 @@ func New(
// initialize and return a sheet // initialize and return a sheet
return Sheet{ return Sheet{
Title: title, Title: title,
Path: path, CheatPath: cheatpath,
Text: text + "\n", Path: path,
Tags: tags, Text: text + "\n",
Syntax: fm.Syntax, Tags: tags,
ReadOnly: readOnly, Syntax: fm.Syntax,
ReadOnly: readOnly,
}, nil }, nil
} }

View File

@ -13,6 +13,7 @@ func TestSheetSuccess(t *testing.T) {
// initialize a sheet // initialize a sheet
sheet, err := New( sheet, err := New(
"foo", "foo",
"community",
mock.Path("sheet/foo"), mock.Path("sheet/foo"),
[]string{"alpha", "bravo"}, []string{"alpha", "bravo"},
false, false,
@ -61,6 +62,7 @@ func TestSheetFailure(t *testing.T) {
// initialize a sheet // initialize a sheet
_, err := New( _, err := New(
"foo", "foo",
"community",
mock.Path("/does-not-exist"), mock.Path("/does-not-exist"),
[]string{"alpha", "bravo"}, []string{"alpha", "bravo"},
false, false,
@ -69,3 +71,20 @@ func TestSheetFailure(t *testing.T) {
t.Errorf("failed to return an error on unreadable sheet") t.Errorf("failed to return an error on unreadable sheet")
} }
} }
// TestSheetFrontMatterFailure asserts that an error is returned if the sheet's
// frontmatter cannot be parsed.
func TestSheetFrontMatterFailure(t *testing.T) {
// initialize a sheet
_, err := New(
"foo",
"community",
mock.Path("sheet/bad-fm"),
[]string{"alpha", "bravo"},
false,
)
if err == nil {
t.Errorf("failed to return an error on malformed front-matter")
}
}

View File

@ -59,7 +59,13 @@ func Load(cheatpaths []cp.Cheatpath) ([]map[string]sheet.Sheet, error) {
} }
// parse the cheatsheet file into a `sheet` struct // parse the cheatsheet file into a `sheet` struct
s, err := sheet.New(title, path, cheatpath.Tags, cheatpath.ReadOnly) s, err := sheet.New(
title,
cheatpath.Name,
path,
cheatpath.Tags,
cheatpath.ReadOnly,
)
if err != nil { if err != nil {
return fmt.Errorf( return fmt.Errorf(
"failed to load sheet: %s, path: %s, err: %v", "failed to load sheet: %s, path: %s, err: %v",

View File

@ -1,3 +1,62 @@
package sheets package sheets
// TODO import (
"path"
"testing"
"github.com/cheat/cheat/internal/cheatpath"
"github.com/cheat/cheat/internal/mock"
)
// TestLoad asserts that sheets on valid cheatpaths can be loaded successfully
func TestLoad(t *testing.T) {
// mock cheatpaths
cheatpaths := []cheatpath.Cheatpath{
{
Name: "community",
Path: path.Join(mock.Path("cheatsheets"), "community"),
ReadOnly: true,
},
{
Name: "personal",
Path: path.Join(mock.Path("cheatsheets"), "personal"),
ReadOnly: false,
},
}
// load cheatsheets
sheets, err := Load(cheatpaths)
if err != nil {
t.Errorf("failed to load cheatsheets: %v", err)
}
// assert that the correct number of sheets loaded
// (sheet load details are tested in `sheet_test.go`)
want := 4
if len(sheets) != want {
t.Errorf(
"failed to load correct number of cheatsheets: want: %d, got: %d",
want,
len(sheets),
)
}
}
// TestLoadBadPath asserts that an error is returned if a cheatpath is invalid
func TestLoadBadPath(t *testing.T) {
// mock a bad cheatpath
cheatpaths := []cheatpath.Cheatpath{
{
Name: "badpath",
Path: "/cheat/test/path/does/not/exist",
ReadOnly: true,
},
}
// attempt to load the cheatpath
if _, err := Load(cheatpaths); err == nil {
t.Errorf("failed to reject invalid cheatpath")
}
}

View File

View File

@ -0,0 +1,4 @@
---
tags: [ community ]
---
This is the bar cheatsheet.

View File

@ -0,0 +1,4 @@
---
tags: [ community ]
---
This is the foo cheatsheet.

View File

@ -0,0 +1,4 @@
---
tags: [ personal ]
---
This is the bat cheatsheet.

View File

@ -0,0 +1,4 @@
---
tags: [ personal ]
---
This is the baz cheatsheet.

4
mocks/sheet/bad-fm Normal file
View File

@ -0,0 +1,4 @@
---
syntax: sh
This is malformed frontmatter.

View File

@ -40,8 +40,7 @@ _cheat() {
'(-t --tag)'{-t,--tag}'[Return only sheets matching <tag>]: :->taglist' \ '(-t --tag)'{-t,--tag}'[Return only sheets matching <tag>]: :->taglist' \
'(-T --tags)'{-T,--tags}'[List all tags in use]: :->none' \ '(-T --tags)'{-T,--tags}'[List all tags in use]: :->none' \
'(-v --version)'{-v,--version}'[Print the version number]: :->none' \ '(-v --version)'{-v,--version}'[Print the version number]: :->none' \
'(--rm)--rm[Remove (delete) <sheet>]: :->personal' \ '(--rm)--rm[Remove (delete) <sheet>]: :->personal'
'(-)*: :->full'
case $state in case $state in
(none) (none)
@ -63,4 +62,4 @@ _cheat() {
esac esac
} }
_cheat compdef _cheat cheat

46
scripts/git/cheatsheets Executable file
View File

@ -0,0 +1,46 @@
#!/bin/sh -e
pull() {
for d in `cheat -d | awk '{print $2}'`;
do
echo "Update $d"
cd "$d"
[ -d ".git" ] && git pull || :
done
echo
echo "Finished update"
}
push() {
for d in `cheat -d | grep -v "community" | awk '{print $2}'`;
do
cd "$d"
if [ -d ".git" ]
then
echo "Push modifications $d"
files=$(git ls-files -mo | tr '\n' ' ')
git add -A && git commit -m "Edited files: $files" && git push || :
else
echo "$(pwd) is not a git managed folder"
echo "First connect this to your personal git repository"
fi
done
echo
echo "Finished push operation"
}
if [ "$1" = "pull" ]; then
pull
elif [ "$1" = "push" ]; then
push
else
echo "Usage:
# pull changes
cheatsheets pull
# push changes
cheatsheets push"
fi

View File

@ -6,6 +6,8 @@ release:
brews: brews:
- -
install: bin.install "chroma" install: bin.install "chroma"
env:
- CGO_ENABLED=0
builds: builds:
- goos: - goos:
- linux - linux

View File

@ -1,12 +0,0 @@
sudo: false
language: go
go:
- "1.13.x"
script:
- go test -v ./...
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
- ./bin/golangci-lint run
- git clean -fdx .
after_success:
curl -sL https://git.io/goreleaser | bash && goreleaser

View File

@ -1,5 +1,7 @@
.PHONY: chromad upload all .PHONY: chromad upload all
VERSION ?= $(shell git describe --tags --dirty --always)
all: README.md tokentype_string.go all: README.md tokentype_string.go
README.md: lexers/*/*.go README.md: lexers/*/*.go
@ -9,10 +11,8 @@ tokentype_string.go: types.go
go generate go generate
chromad: chromad:
(cd ./cmd/chromad && go get github.com/GeertJohan/go.rice/rice@master && go install github.com/GeertJohan/go.rice/rice)
rm -f chromad rm -f chromad
(export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -o ../../chromad .) (export CGOENABLED=0 GOOS=linux ; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .)
rice append -i ./cmd/chromad --exec=./chromad
upload: chromad upload: chromad
scp chromad root@swapoff.org: && \ scp chromad root@swapoff.org: && \

View File

@ -1,4 +1,4 @@
# Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![Build Status](https://travis-ci.org/alecthomas/chroma.svg)](https://travis-ci.org/alecthomas/chroma) [![Gitter chat](https://badges.gitter.im/alecthomas.svg)](https://gitter.im/alecthomas/Lobby) # Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CircleCI](https://img.shields.io/circleci/project/github/alecthomas/chroma.svg)](https://circleci.com/gh/alecthomas/chroma) [![Go Report Card](https://goreportcard.com/badge/github.com/alecthomas/chroma)](https://goreportcard.com/report/github.com/alecthomas/chroma) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/)
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly. > **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
@ -36,29 +36,30 @@ translators for Pygments lexers and styles.
Prefix | Language Prefix | Language
:----: | -------- :----: | --------
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
B | Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, BNF, Brainfuck B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, BlitzBasic, BNF, Brainfuck
C | C, C#, C++, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
D | D, Dart, Diff, Django/Jinja, Docker, DTD D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan
E | EBNF, Elixir, Elm, EmacsLisp, Erlang E | EBNF, Elixir, Elm, EmacsLisp, Erlang
F | Factor, Fish, Forth, Fortran, FSharp F | Factor, Fish, Forth, Fortran, FSharp
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HTML, HTTP, Hy H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy
I | Idris, INI, Io I | Idris, Igor, INI, Io
J | J, Java, JavaScript, JSON, Julia, Jungle J | J, Java, JavaScript, JSON, Julia, Jungle
K | Kotlin K | Kotlin
L | Lighttpd configuration file, LLVM, Lua L | Lighttpd configuration file, LLVM, Lua
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
N | NASM, Newspeak, Nginx configuration file, Nim, Nix N | NASM, Newspeak, Nginx configuration file, Nim, Nix
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
P | PacmanConf, Perl, PHP, Pig, PkgConfig, PL/pgSQL, plaintext, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3 P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python, Python 3
Q | QBasic Q | QBasic
R | R, Racket, Ragel, react, reg, reStructuredText, Rexx, Ruby, Rust R | R, Racket, Ragel, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust
S | Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, SML, Snobol, Solidity, SPARQL, SQL, SquidConf, Swift, SYSTEMD, systemverilog S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Swift, SYSTEMD, systemverilog
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
V | VB.net, verilog, VHDL, VimL, vue V | VB.net, verilog, VHDL, VimL, vue
W | WDTE W | WDTE
X | XML, Xorg X | XML, Xorg
Y | YAML Y | YAML, YANG
Z | Zig
_I will attempt to keep this section up to date, but an authoritative list can be _I will attempt to keep this section up to date, but an authoritative list can be
@ -183,7 +184,7 @@ following constructor options:
- `ClassPrefix(prefix)` - prefix each generated CSS class. - `ClassPrefix(prefix)` - prefix each generated CSS class.
- `TabWidth(width)` - Set the rendered tab width, in characters. - `TabWidth(width)` - Set the rendered tab width, in characters.
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). - `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
- `LinkableLineNumbers()` - Make the line numbers linkable. - `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves.
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). - `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. - `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
@ -215,7 +216,7 @@ python3 ~/Projects/chroma/_tools/pygments2chroma.py \
&& gofmt -s -w ~/Projects/chroma/lexers/*.go && gofmt -s -w ~/Projects/chroma/lexers/*.go
``` ```
See notes in [pygments-lexers.go](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
for a list of lexers, and notes on some of the issues importing them. for a list of lexers, and notes on some of the issues importing them.
<a id="markdown-formatters" name="formatters"></a> <a id="markdown-formatters" name="formatters"></a>

View File

@ -211,7 +211,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight)) fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
} }
fmt.Fprintf(w, "<span%s%s>%*d\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), lineDigits, line) fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
if highlight { if highlight {
fmt.Fprintf(w, "</span>") fmt.Fprintf(w, "</span>")
@ -237,7 +237,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
} }
if f.lineNumbers && !wrapInTable { if f.lineNumbers && !wrapInTable {
fmt.Fprintf(w, "<span%s%s>%*d</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), lineDigits, line) fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
} }
for _, token := range tokens { for _, token := range tokens {
@ -272,7 +272,19 @@ func (f *Formatter) lineIDAttribute(line int) string {
if !f.linkableLineNumbers { if !f.linkableLineNumbers {
return "" return ""
} }
return fmt.Sprintf(" id=\"%s%d\"", f.lineNumbersIDPrefix, line) return fmt.Sprintf(" id=\"%s\"", f.lineID(line))
}
func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string {
title := fmt.Sprintf("%*d", lineDigits, line)
if !f.linkableLineNumbers {
return title
}
return fmt.Sprintf("<a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#%s\">%s</a>", f.lineID(line), title)
}
func (f *Formatter) lineID(line int) string {
return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line)
} }
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) { func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {

View File

@ -17,6 +17,20 @@ var c = chroma.MustParseColour
var ttyTables = map[int]*ttyTable{ var ttyTables = map[int]*ttyTable{
8: { 8: {
foreground: map[chroma.Colour]string{
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
c("#555555"): "\033[1m\033[30m", c("#ff0000"): "\033[1m\033[31m", c("#00ff00"): "\033[1m\033[32m", c("#ffff00"): "\033[1m\033[33m",
c("#0000ff"): "\033[1m\033[34m", c("#ff00ff"): "\033[1m\033[35m", c("#00ffff"): "\033[1m\033[36m", c("#ffffff"): "\033[1m\033[37m",
},
background: map[chroma.Colour]string{
c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
c("#555555"): "\033[1m\033[40m", c("#ff0000"): "\033[1m\033[41m", c("#00ff00"): "\033[1m\033[42m", c("#ffff00"): "\033[1m\033[43m",
c("#0000ff"): "\033[1m\033[44m", c("#ff00ff"): "\033[1m\033[45m", c("#00ffff"): "\033[1m\033[46m", c("#ffffff"): "\033[1m\033[47m",
},
},
16: {
foreground: map[chroma.Colour]string{ foreground: map[chroma.Colour]string{
c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m", c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m", c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
@ -227,15 +241,11 @@ type indexedTTYFormatter struct {
func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) { func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
theme := styleToEscapeSequence(c.table, style) theme := styleToEscapeSequence(c.table, style)
for token := it(); token != chroma.EOF; token = it() { for token := it(); token != chroma.EOF; token = it() {
// TODO: Cache token lookups?
clr, ok := theme[token.Type] clr, ok := theme[token.Type]
if !ok { if !ok {
clr, ok = theme[token.Type.SubCategory()] clr, ok = theme[token.Type.SubCategory()]
if !ok { if !ok {
clr = theme[token.Type.Category()] clr = theme[token.Type.Category()]
// if !ok {
// clr = theme[chroma.InheritStyle]
// }
} }
} }
if clr != "" { if clr != "" {
@ -249,10 +259,22 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma
return nil return nil
} }
// TTY is an 8-colour terminal formatter.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
// TTY8 is an 8-colour terminal formatter. // TTY8 is an 8-colour terminal formatter.
// //
// The Lab colour space is used to map RGB values to the most appropriate index colour. // The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]}) var TTY8 = Register("terminal8", &indexedTTYFormatter{ttyTables[8]})
// TTY16 is a 16-colour terminal formatter.
//
// It uses \033[3xm for normal colours and \033[90Xm for bright colours.
//
// The Lab colour space is used to map RGB values to the most appropriate index colour.
var TTY16 = Register("terminal16", &indexedTTYFormatter{ttyTables[16]})
// TTY256 is a 256-colour terminal formatter. // TTY256 is a 256-colour terminal formatter.
// //

View File

@ -8,7 +8,7 @@ require (
github.com/alecthomas/kong v0.2.4 github.com/alecthomas/kong v0.2.4
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
github.com/dlclark/regexp2 v1.2.0 github.com/dlclark/regexp2 v1.4.0
github.com/mattn/go-colorable v0.1.6 github.com/mattn/go-colorable v0.1.6
github.com/mattn/go-isatty v0.0.12 github.com/mattn/go-isatty v0.0.12
github.com/pkg/errors v0.9.1 // indirect github.com/pkg/errors v0.9.1 // indirect

View File

@ -13,6 +13,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk= github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE= github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=

View File

@ -4,7 +4,7 @@ import "strings"
// An Iterator across tokens. // An Iterator across tokens.
// //
// nil will be returned at the end of the Token stream. // EOF will be returned at the end of the Token stream.
// //
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover. // If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
type Iterator func() Token type Iterator func() Token

View File

@ -6,7 +6,7 @@ import (
) )
// ABAP lexer. // ABAP lexer.
var Abap = internal.Register(MustNewLexer( var Abap = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "ABAP", Name: "ABAP",
Aliases: []string{"abap"}, Aliases: []string{"abap"},
@ -14,7 +14,11 @@ var Abap = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-abap"}, MimeTypes: []string{"text/x-abap"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ abapRules,
))
func abapRules() Rules {
return Rules{
"common": { "common": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`^\*.*$`, CommentSingle, nil}, {`^\*.*$`, CommentSingle, nil},
@ -52,5 +56,5 @@ var Abap = internal.Register(MustNewLexer(
{`[/;:()\[\],.]`, Punctuation, nil}, {`[/;:()\[\],.]`, Punctuation, nil},
{`(!)(\w+)`, ByGroups(Operator, Name), nil}, {`(!)(\w+)`, ByGroups(Operator, Name), nil},
}, },
}, }
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// Abnf lexer. // Abnf lexer.
var Abnf = internal.Register(MustNewLexer( var Abnf = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "ABNF", Name: "ABNF",
Aliases: []string{"abnf"}, Aliases: []string{"abnf"},
Filenames: []string{"*.abnf"}, Filenames: []string{"*.abnf"},
MimeTypes: []string{"text/x-abnf"}, MimeTypes: []string{"text/x-abnf"},
}, },
Rules{ abnfRules,
))
func abnfRules() Rules {
return Rules{
"root": { "root": {
{`;.*$`, CommentSingle, nil}, {`;.*$`, CommentSingle, nil},
{`(%[si])?"[^"]*"`, Literal, nil}, {`(%[si])?"[^"]*"`, Literal, nil},
@ -34,5 +38,5 @@ var Abnf = internal.Register(MustNewLexer(
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`.`, Text, nil}, {`.`, Text, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Actionscript lexer. // Actionscript lexer.
var Actionscript = internal.Register(MustNewLexer( var Actionscript = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "ActionScript", Name: "ActionScript",
Aliases: []string{"as", "actionscript"}, Aliases: []string{"as", "actionscript"},
@ -15,7 +15,11 @@ var Actionscript = internal.Register(MustNewLexer(
NotMultiline: true, NotMultiline: true,
DotAll: true, DotAll: true,
}, },
Rules{ actionscriptRules,
))
func actionscriptRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`//.*?\n`, CommentSingle, nil}, {`//.*?\n`, CommentSingle, nil},
@ -35,5 +39,5 @@ var Actionscript = internal.Register(MustNewLexer(
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Actionscript 3 lexer. // Actionscript 3 lexer.
var Actionscript3 = internal.Register(MustNewLexer( var Actionscript3 = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "ActionScript 3", Name: "ActionScript 3",
Aliases: []string{"as3", "actionscript3"}, Aliases: []string{"as3", "actionscript3"},
@ -14,7 +14,11 @@ var Actionscript3 = internal.Register(MustNewLexer(
MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"}, MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"},
DotAll: true, DotAll: true,
}, },
Rules{ actionscript3Rules,
))
func actionscript3Rules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")}, {`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")},
@ -52,5 +56,5 @@ var Actionscript3 = internal.Register(MustNewLexer(
{`,`, Operator, Pop(1)}, {`,`, Operator, Pop(1)},
Default(Pop(1)), Default(Pop(1)),
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Ada lexer. // Ada lexer.
var Ada = internal.Register(MustNewLexer( var Ada = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Ada", Name: "Ada",
Aliases: []string{"ada", "ada95", "ada2005"}, Aliases: []string{"ada", "ada95", "ada2005"},
@ -14,7 +14,11 @@ var Ada = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-ada"}, MimeTypes: []string{"text/x-ada"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ adaRules,
))
func adaRules() Rules {
return Rules{
"root": { "root": {
{`[^\S\n]+`, Text, nil}, {`[^\S\n]+`, Text, nil},
{`--.*?\n`, CommentSingle, nil}, {`--.*?\n`, CommentSingle, nil},
@ -110,5 +114,5 @@ var Ada = internal.Register(MustNewLexer(
{`\)`, Punctuation, Pop(1)}, {`\)`, Punctuation, Pop(1)},
Include("root"), Include("root"),
}, },
}, }
)) }

48
vendor/github.com/alecthomas/chroma/lexers/a/al.go generated vendored Normal file
View File

@ -0,0 +1,48 @@
package a
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// Al lexer.
var Al = internal.Register(MustNewLazyLexer(
&Config{
Name: "AL",
Aliases: []string{"al"},
Filenames: []string{"*.al", "*.dal"},
MimeTypes: []string{"text/x-al"},
DotAll: true,
CaseInsensitive: true,
},
alRules,
))
// https://github.com/microsoft/AL/blob/master/grammar/alsyntax.tmlanguage
func alRules() Rules {
return Rules{
"root": {
{`\s+`, TextWhitespace, nil},
{`(?s)\/\*.*?\\*\*\/`, CommentMultiline, nil},
{`(?s)//.*?\n`, CommentSingle, nil},
{`\"([^\"])*\"`, Text, nil},
{`'([^'])*'`, LiteralString, nil},
{`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil},
{`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil},
{`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil},
// Added new objects types of BC 2021 wave 1 (REPORTEXTENSION|Entitlement|PermissionSet|PermissionSetExtension)
{`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|Entitlement|PermissionSet|PermissionSetExtension))\b`, Keyword, nil},
{`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil},
{`\b([<>]=|<>|<|>)\b?`, Operator, nil},
{`\b(\-|\+|\/|\*)\b`, Operator, nil},
{`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil},
{`\b(?i:(ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil},
{`\s*[(\.\.)&\|]\s*`, Operator, nil},
{`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil},
{`[;:,]`, Punctuation, nil},
{`#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
{`\w+`, Text, nil},
{`.`, Text, nil},
},
}
}

View File

@ -6,14 +6,18 @@ import (
) )
// Angular2 lexer. // Angular2 lexer.
var Angular2 = internal.Register(MustNewLexer( var Angular2 = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Angular2", Name: "Angular2",
Aliases: []string{"ng2"}, Aliases: []string{"ng2"},
Filenames: []string{}, Filenames: []string{},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
Rules{ angular2Rules,
))
func angular2Rules() Rules {
return Rules{
"root": { "root": {
{`[^{([*#]+`, Other, nil}, {`[^{([*#]+`, Other, nil},
{`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")}, {`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")},
@ -38,5 +42,5 @@ var Angular2 = internal.Register(MustNewLexer(
{`'.*?'`, LiteralString, Pop(1)}, {`'.*?'`, LiteralString, Pop(1)},
{`[^\s>]+`, LiteralString, Pop(1)}, {`[^\s>]+`, LiteralString, Pop(1)},
}, },
}, }
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// ANTLR lexer. // ANTLR lexer.
var ANTLR = internal.Register(MustNewLexer( var ANTLR = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "ANTLR", Name: "ANTLR",
Aliases: []string{"antlr"}, Aliases: []string{"antlr"},
Filenames: []string{}, Filenames: []string{},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
Rules{ antlrRules,
))
func antlrRules() Rules {
return Rules{
"whitespace": { "whitespace": {
{`\s+`, TextWhitespace, nil}, {`\s+`, TextWhitespace, nil},
}, },
@ -97,5 +101,5 @@ var ANTLR = internal.Register(MustNewLexer(
{`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, {`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil},
{`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil}, {`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Apacheconf lexer. // Apacheconf lexer.
var Apacheconf = internal.Register(MustNewLexer( var Apacheconf = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "ApacheConf", Name: "ApacheConf",
Aliases: []string{"apacheconf", "aconf", "apache"}, Aliases: []string{"apacheconf", "aconf", "apache"},
@ -14,7 +14,11 @@ var Apacheconf = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-apacheconf"}, MimeTypes: []string{"text/x-apacheconf"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ apacheconfRules,
))
func apacheconfRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`(#.*?)$`, Comment, nil}, {`(#.*?)$`, Comment, nil},
@ -34,5 +38,5 @@ var Apacheconf = internal.Register(MustNewLexer(
{`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, {`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil},
{`[^\s"\\]+`, Text, nil}, {`[^\s"\\]+`, Text, nil},
}, },
}, }
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// Apl lexer. // Apl lexer.
var Apl = internal.Register(MustNewLexer( var Apl = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "APL", Name: "APL",
Aliases: []string{"apl"}, Aliases: []string{"apl"},
Filenames: []string{"*.apl"}, Filenames: []string{"*.apl"},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
Rules{ aplRules,
))
func aplRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`[⍝#].*$`, CommentSingle, nil}, {`[⍝#].*$`, CommentSingle, nil},
@ -32,5 +36,5 @@ var Apl = internal.Register(MustNewLexer(
{`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil}, {`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil},
{`[{}]`, KeywordType, nil}, {`[{}]`, KeywordType, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Applescript lexer. // Applescript lexer.
var Applescript = internal.Register(MustNewLexer( var Applescript = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "AppleScript", Name: "AppleScript",
Aliases: []string{"applescript"}, Aliases: []string{"applescript"},
@ -14,7 +14,11 @@ var Applescript = internal.Register(MustNewLexer(
MimeTypes: []string{}, MimeTypes: []string{},
DotAll: true, DotAll: true,
}, },
Rules{ applescriptRules,
))
func applescriptRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`¬\n`, LiteralStringEscape, nil}, {`¬\n`, LiteralStringEscape, nil},
@ -51,5 +55,5 @@ var Applescript = internal.Register(MustNewLexer(
{`[^*(]+`, CommentMultiline, nil}, {`[^*(]+`, CommentMultiline, nil},
{`[*(]`, CommentMultiline, nil}, {`[*(]`, CommentMultiline, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Arduino lexer. // Arduino lexer.
var Arduino = internal.Register(MustNewLexer( var Arduino = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Arduino", Name: "Arduino",
Aliases: []string{"arduino"}, Aliases: []string{"arduino"},
@ -14,7 +14,11 @@ var Arduino = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-arduino"}, MimeTypes: []string{"text/x-arduino"},
EnsureNL: true, EnsureNL: true,
}, },
Rules{ arduinoRules,
))
func arduinoRules() Rules {
return Rules{
"statements": { "statements": {
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil}, {Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil},
{`char(16_t|32_t)\b`, KeywordType, nil}, {`char(16_t|32_t)\b`, KeywordType, nil},
@ -106,5 +110,5 @@ var Arduino = internal.Register(MustNewLexer(
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)}, {`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil}, {`.*?\n`, Comment, nil},
}, },
}, }
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// Awk lexer. // Awk lexer.
var Awk = internal.Register(MustNewLexer( var Awk = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Awk", Name: "Awk",
Aliases: []string{"awk", "gawk", "mawk", "nawk"}, Aliases: []string{"awk", "gawk", "mawk", "nawk"},
Filenames: []string{"*.awk"}, Filenames: []string{"*.awk"},
MimeTypes: []string{"application/x-awk"}, MimeTypes: []string{"application/x-awk"},
}, },
Rules{ awkRules,
))
func awkRules() Rules {
return Rules{
"commentsandwhitespace": { "commentsandwhitespace": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`#.*$`, CommentSingle, nil}, {`#.*$`, CommentSingle, nil},
@ -30,19 +34,19 @@ var Awk = internal.Register(MustNewLexer(
"root": { "root": {
{`^(?=\s|/)`, Text, Push("slashstartsregex")}, {`^(?=\s|/)`, Text, Push("slashstartsregex")},
Include("commentsandwhitespace"), Include("commentsandwhitespace"),
{`\+\+|--|\|\||&&|in\b|\$|!?~|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, {`\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, {`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
{`[})\].]`, Punctuation, nil}, {`[})\].]`, Punctuation, nil},
{`(break|continue|do|while|exit|for|if|else|return)\b`, Keyword, Push("slashstartsregex")}, {`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")},
{`function\b`, KeywordDeclaration, Push("slashstartsregex")}, {`function\b`, KeywordDeclaration, Push("slashstartsregex")},
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next|nextfile|print|printf|strftime|systime|delete|system)\b`, KeywordReserved, nil}, {`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil},
{`(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|SUBSEP)\b`, NameBuiltin, nil}, {`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil},
{`[$a-zA-Z_]\w*`, NameOther, nil}, {`[@$a-zA-Z_]\w*`, NameOther, nil},
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
{`[0-9]+`, LiteralNumberInteger, nil}, {`[0-9]+`, LiteralNumberInteger, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Ballerina lexer. // Ballerina lexer.
var Ballerina = internal.Register(MustNewLexer( var Ballerina = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Ballerina", Name: "Ballerina",
Aliases: []string{"ballerina"}, Aliases: []string{"ballerina"},
@ -14,7 +14,11 @@ var Ballerina = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-ballerina"}, MimeTypes: []string{"text/x-ballerina"},
DotAll: true, DotAll: true,
}, },
Rules{ ballerinaRules,
))
func ballerinaRules() Rules {
return Rules{
"root": { "root": {
{`[^\S\n]+`, Text, nil}, {`[^\S\n]+`, Text, nil},
{`//.*?\n`, CommentSingle, nil}, {`//.*?\n`, CommentSingle, nil},
@ -42,5 +46,5 @@ var Ballerina = internal.Register(MustNewLexer(
"import": { "import": {
{`[\w.]+`, NameNamespace, Pop(1)}, {`[\w.]+`, NameNamespace, Pop(1)},
}, },
}, }
)) }

View File

@ -7,17 +7,27 @@ import (
"github.com/alecthomas/chroma/lexers/internal" "github.com/alecthomas/chroma/lexers/internal"
) )
// TODO(moorereason): can this be factored away?
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`) var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`)
// Bash lexer. // Bash lexer.
var Bash = internal.Register(MustNewLexer( var Bash = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Bash", Name: "Bash",
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"}, Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"},
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"}, Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
MimeTypes: []string{"application/x-sh", "application/x-shellscript"}, MimeTypes: []string{"application/x-sh", "application/x-shellscript"},
}, },
Rules{ bashRules,
).SetAnalyser(func(text string) float32 {
if bashAnalyserRe.FindString(text) != "" {
return 1.0
}
return 0.0
}))
func bashRules() Rules {
return Rules{
"root": { "root": {
Include("basic"), Include("basic"),
{"`", LiteralStringBacktick, Push("backticks")}, {"`", LiteralStringBacktick, Push("backticks")},
@ -86,10 +96,5 @@ var Bash = internal.Register(MustNewLexer(
{"`", LiteralStringBacktick, Pop(1)}, {"`", LiteralStringBacktick, Pop(1)},
Include("root"), Include("root"),
}, },
},
).SetAnalyser(func(text string) float32 {
if bashAnalyserRe.FindString(text) != "" {
return 1.0
} }
return 0.0 }
}))

View File

@ -6,7 +6,7 @@ import (
) )
// Batchfile lexer. // Batchfile lexer.
var Batchfile = internal.Register(MustNewLexer( var Batchfile = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Batchfile", Name: "Batchfile",
Aliases: []string{"bat", "batch", "dosbatch", "winbatch"}, Aliases: []string{"bat", "batch", "dosbatch", "winbatch"},
@ -14,7 +14,11 @@ var Batchfile = internal.Register(MustNewLexer(
MimeTypes: []string{"application/x-dos-batch"}, MimeTypes: []string{"application/x-dos-batch"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ batchfileRules,
))
func batchfileRules() Rules {
return Rules{
"root": { "root": {
{`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil}, {`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil},
{`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")}, {`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")},
@ -190,5 +194,5 @@ var Batchfile = internal.Register(MustNewLexer(
{`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)}, {`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)},
Default(Pop(1)), Default(Pop(1)),
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Bibtex lexer. // Bibtex lexer.
var Bibtex = internal.Register(MustNewLexer( var Bibtex = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "BibTeX", Name: "BibTeX",
Aliases: []string{"bib", "bibtex"}, Aliases: []string{"bib", "bibtex"},
@ -15,7 +15,11 @@ var Bibtex = internal.Register(MustNewLexer(
NotMultiline: true, NotMultiline: true,
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ bibtexRules,
))
func bibtexRules() Rules {
return Rules{
"root": { "root": {
Include("whitespace"), Include("whitespace"),
{`@comment`, Comment, nil}, {`@comment`, Comment, nil},
@ -72,5 +76,5 @@ var Bibtex = internal.Register(MustNewLexer(
"whitespace": { "whitespace": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Blitzbasic lexer. // Blitzbasic lexer.
var Blitzbasic = internal.Register(MustNewLexer( var Blitzbasic = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "BlitzBasic", Name: "BlitzBasic",
Aliases: []string{"blitzbasic", "b3d", "bplus"}, Aliases: []string{"blitzbasic", "b3d", "bplus"},
@ -14,7 +14,11 @@ var Blitzbasic = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-bb"}, MimeTypes: []string{"text/x-bb"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ blitzbasicRules,
))
func blitzbasicRules() Rules {
return Rules{
"root": { "root": {
{`[ \t]+`, Text, nil}, {`[ \t]+`, Text, nil},
{`;.*?\n`, CommentSingle, nil}, {`;.*?\n`, CommentSingle, nil},
@ -44,5 +48,5 @@ var Blitzbasic = internal.Register(MustNewLexer(
{`"C?`, LiteralStringDouble, Pop(1)}, {`"C?`, LiteralStringDouble, Pop(1)},
{`[^"]+`, LiteralStringDouble, nil}, {`[^"]+`, LiteralStringDouble, nil},
}, },
}, }
)) }

View File

@ -6,19 +6,23 @@ import (
) )
// Bnf lexer. // Bnf lexer.
var Bnf = internal.Register(MustNewLexer( var Bnf = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "BNF", Name: "BNF",
Aliases: []string{"bnf"}, Aliases: []string{"bnf"},
Filenames: []string{"*.bnf"}, Filenames: []string{"*.bnf"},
MimeTypes: []string{"text/x-bnf"}, MimeTypes: []string{"text/x-bnf"},
}, },
Rules{ bnfRules,
))
func bnfRules() Rules {
return Rules{
"root": { "root": {
{`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil}, {`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil},
{`::=`, Operator, nil}, {`::=`, Operator, nil},
{`[^<>:]+`, Text, nil}, {`[^<>:]+`, Text, nil},
{`.`, Text, nil}, {`.`, Text, nil},
}, },
}, }
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// Brainfuck lexer. // Brainfuck lexer.
var Brainfuck = internal.Register(MustNewLexer( var Brainfuck = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Brainfuck", Name: "Brainfuck",
Aliases: []string{"brainfuck", "bf"}, Aliases: []string{"brainfuck", "bf"},
Filenames: []string{"*.bf", "*.b"}, Filenames: []string{"*.bf", "*.b"},
MimeTypes: []string{"application/x-brainfuck"}, MimeTypes: []string{"application/x-brainfuck"},
}, },
Rules{ brainfuckRules,
))
func brainfuckRules() Rules {
return Rules{
"common": { "common": {
{`[.,]+`, NameTag, nil}, {`[.,]+`, NameTag, nil},
{`[+-]+`, NameBuiltin, nil}, {`[+-]+`, NameBuiltin, nil},
@ -30,5 +34,5 @@ var Brainfuck = internal.Register(MustNewLexer(
{`\]`, Keyword, Pop(1)}, {`\]`, Keyword, Pop(1)},
Include("common"), Include("common"),
}, },
}, }
)) }

View File

@ -6,14 +6,19 @@ import (
) )
// C lexer. // C lexer.
var C = internal.Register(MustNewLexer( var C = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "C", Name: "C",
Aliases: []string{"c"}, Aliases: []string{"c"},
Filenames: []string{"*.c", "*.h", "*.idc"}, Filenames: []string{"*.c", "*.h", "*.idc"},
MimeTypes: []string{"text/x-chdr", "text/x-csrc"}, MimeTypes: []string{"text/x-chdr", "text/x-csrc"},
EnsureNL: true,
}, },
Rules{ cRules,
))
func cRules() Rules {
return Rules{
"whitespace": { "whitespace": {
{`^#if\s+0`, CommentPreproc, Push("if0")}, {`^#if\s+0`, CommentPreproc, Push("if0")},
{`^#`, CommentPreproc, Push("macro")}, {`^#`, CommentPreproc, Push("macro")},
@ -87,5 +92,5 @@ var C = internal.Register(MustNewLexer(
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)}, {`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil}, {`.*?\n`, Comment, nil},
}, },
}, }
)) }

View File

@ -6,143 +6,149 @@ import (
) )
// caddyfileCommon are the rules common to both of the lexer variants // caddyfileCommon are the rules common to both of the lexer variants
var caddyfileCommon = Rules{ func caddyfileCommonRules() Rules {
"site_block_common": { return Rules{
// Import keyword "site_block_common": {
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil}, // Import keyword
// Matcher definition {`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, // Matcher definition
// Matcher token stub for docs {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")}, // Matcher token stub for docs
// These cannot have matchers but may have things that look like {`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// matchers in their arguments, so we just parse as a subdirective. // These cannot have matchers but may have things that look like
{`try_files`, Keyword, Push("subdirective")}, // matchers in their arguments, so we just parse as a subdirective.
// These are special, they can nest more directives {`try_files`, Keyword, Push("subdirective")},
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")}, // These are special, they can nest more directives
// Any other directive {`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
{`[^\s#]+`, Keyword, Push("directive")}, // Any other directive
Include("base"), {`[^\s#]+`, Keyword, Push("directive")},
}, Include("base"),
"matcher": { },
{`\{`, Punctuation, Push("block")}, "matcher": {
// Not can be one-liner {`\{`, Punctuation, Push("block")},
{`not`, Keyword, Push("deep_not_matcher")}, // Not can be one-liner
// Any other same-line matcher {`not`, Keyword, Push("deep_not_matcher")},
{`[^\s#]+`, Keyword, Push("arguments")}, // Any other same-line matcher
// Terminators {`[^\s#]+`, Keyword, Push("arguments")},
{`\n`, Text, Pop(1)}, // Terminators
{`\}`, Punctuation, Pop(1)}, {`\n`, Text, Pop(1)},
Include("base"), {`\}`, Punctuation, Pop(1)},
}, Include("base"),
"block": { },
{`\}`, Punctuation, Pop(2)}, "block": {
// Not can be one-liner {`\}`, Punctuation, Pop(2)},
{`not`, Keyword, Push("not_matcher")}, // Not can be one-liner
// Any other subdirective {`not`, Keyword, Push("not_matcher")},
{`[^\s#]+`, Keyword, Push("subdirective")}, // Any other subdirective
Include("base"), {`[^\s#]+`, Keyword, Push("subdirective")},
}, Include("base"),
"nested_block": { },
{`\}`, Punctuation, Pop(2)}, "nested_block": {
// Matcher definition {`\}`, Punctuation, Pop(2)},
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, // Matcher definition
// Something that starts with literally < is probably a docs stub {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
{`\<[^#]+\>`, Keyword, Push("nested_directive")}, // Something that starts with literally < is probably a docs stub
// Any other directive {`\<[^#]+\>`, Keyword, Push("nested_directive")},
{`[^\s#]+`, Keyword, Push("nested_directive")}, // Any other directive
Include("base"), {`[^\s#]+`, Keyword, Push("nested_directive")},
}, Include("base"),
"not_matcher": { },
{`\}`, Punctuation, Pop(2)}, "not_matcher": {
{`\{(?=\s)`, Punctuation, Push("block")}, {`\}`, Punctuation, Pop(2)},
{`[^\s#]+`, Keyword, Push("arguments")}, {`\{(?=\s)`, Punctuation, Push("block")},
{`\s+`, Text, nil}, {`[^\s#]+`, Keyword, Push("arguments")},
}, {`\s+`, Text, nil},
"deep_not_matcher": { },
{`\}`, Punctuation, Pop(2)}, "deep_not_matcher": {
{`\{(?=\s)`, Punctuation, Push("block")}, {`\}`, Punctuation, Pop(2)},
{`[^\s#]+`, Keyword, Push("deep_subdirective")}, {`\{(?=\s)`, Punctuation, Push("block")},
{`\s+`, Text, nil}, {`[^\s#]+`, Keyword, Push("deep_subdirective")},
}, {`\s+`, Text, nil},
"directive": { },
{`\{(?=\s)`, Punctuation, Push("block")}, "directive": {
Include("matcher_token"), {`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"), Include("matcher_token"),
{`\n`, Text, Pop(1)}, Include("comments_pop_1"),
Include("base"), {`\n`, Text, Pop(1)},
}, Include("base"),
"nested_directive": { },
{`\{(?=\s)`, Punctuation, Push("nested_block")}, "nested_directive": {
Include("matcher_token"), {`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("comments_pop_1"), Include("matcher_token"),
{`\n`, Text, Pop(1)}, Include("comments_pop_1"),
Include("base"), {`\n`, Text, Pop(1)},
}, Include("base"),
"subdirective": { },
{`\{(?=\s)`, Punctuation, Push("block")}, "subdirective": {
Include("comments_pop_1"), {`\{(?=\s)`, Punctuation, Push("block")},
{`\n`, Text, Pop(1)}, Include("comments_pop_1"),
Include("base"), {`\n`, Text, Pop(1)},
}, Include("base"),
"arguments": { },
{`\{(?=\s)`, Punctuation, Push("block")}, "arguments": {
Include("comments_pop_2"), {`\{(?=\s)`, Punctuation, Push("block")},
{`\\\n`, Text, nil}, // Skip escaped newlines Include("comments_pop_2"),
{`\n`, Text, Pop(2)}, {`\\\n`, Text, nil}, // Skip escaped newlines
Include("base"), {`\n`, Text, Pop(2)},
}, Include("base"),
"deep_subdirective": { },
{`\{(?=\s)`, Punctuation, Push("block")}, "deep_subdirective": {
Include("comments_pop_3"), {`\{(?=\s)`, Punctuation, Push("block")},
{`\n`, Text, Pop(3)}, Include("comments_pop_3"),
Include("base"), {`\n`, Text, Pop(3)},
}, Include("base"),
"matcher_token": { },
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher "matcher_token": {
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher {`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher {`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs {`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
}, {`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
"comments": { },
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line "comments": {
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace {`^#.*\n`, CommentSingle, nil}, // Comment at start of line
}, {`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
"comments_pop_1": { },
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line "comments_pop_1": {
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace {`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
}, {`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
"comments_pop_2": { },
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line "comments_pop_2": {
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace {`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
}, {`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
"comments_pop_3": { },
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line "comments_pop_3": {
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace {`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
}, {`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
"base": { },
Include("comments"), "base": {
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil}, Include("comments"),
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil}, {`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil}, {`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, {`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder {`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`\[(?=[^#{}$]+\])`, Punctuation, nil}, {`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\]|\|`, Punctuation, nil}, {`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`[^\s#{}$\]]+`, LiteralString, nil}, {`\]|\|`, Punctuation, nil},
{`/[^\s#]*`, Name, nil}, {`[^\s#{}$\]]+`, LiteralString, nil},
{`\s+`, Text, nil}, {`/[^\s#]*`, Name, nil},
}, {`\s+`, Text, nil},
},
}
} }
// Caddyfile lexer. // Caddyfile lexer.
var Caddyfile = internal.Register(MustNewLexer( var Caddyfile = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Caddyfile", Name: "Caddyfile",
Aliases: []string{"caddyfile", "caddy"}, Aliases: []string{"caddyfile", "caddy"},
Filenames: []string{"Caddyfile*"}, Filenames: []string{"Caddyfile*"},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
Rules{ caddyfileRules,
))
func caddyfileRules() Rules {
return Rules{
"root": { "root": {
Include("comments"), Include("comments"),
// Global options block // Global options block
@ -186,21 +192,25 @@ var Caddyfile = internal.Register(MustNewLexer(
{`\}`, Punctuation, Pop(2)}, {`\}`, Punctuation, Pop(2)},
Include("site_block_common"), Include("site_block_common"),
}, },
}.Merge(caddyfileCommon), }.Merge(caddyfileCommonRules())
)) }
// Caddyfile directive-only lexer. // Caddyfile directive-only lexer.
var CaddyfileDirectives = internal.Register(MustNewLexer( var CaddyfileDirectives = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Caddyfile Directives", Name: "Caddyfile Directives",
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"}, Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
Filenames: []string{}, Filenames: []string{},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
Rules{ caddyfileDirectivesRules,
))
func caddyfileDirectivesRules() Rules {
return Rules{
// Same as "site_block" in Caddyfile // Same as "site_block" in Caddyfile
"root": { "root": {
Include("site_block_common"), Include("site_block_common"),
}, },
}.Merge(caddyfileCommon), }.Merge(caddyfileCommonRules())
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// Cap'N'Proto Proto lexer. // Cap'N'Proto Proto lexer.
var CapNProto = internal.Register(MustNewLexer( var CapNProto = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Cap'n Proto", Name: "Cap'n Proto",
Aliases: []string{"capnp"}, Aliases: []string{"capnp"},
Filenames: []string{"*.capnp"}, Filenames: []string{"*.capnp"},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
Rules{ capNProtoRules,
))
func capNProtoRules() Rules {
return Rules{
"root": { "root": {
{`#.*?$`, CommentSingle, nil}, {`#.*?$`, CommentSingle, nil},
{`@[0-9a-zA-Z]*`, NameDecorator, nil}, {`@[0-9a-zA-Z]*`, NameDecorator, nil},
@ -57,5 +61,5 @@ var CapNProto = internal.Register(MustNewLexer(
{`[])]`, NameAttribute, Pop(1)}, {`[])]`, NameAttribute, Pop(1)},
Default(Pop(1)), Default(Pop(1)),
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Ceylon lexer. // Ceylon lexer.
var Ceylon = internal.Register(MustNewLexer( var Ceylon = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Ceylon", Name: "Ceylon",
Aliases: []string{"ceylon"}, Aliases: []string{"ceylon"},
@ -14,7 +14,11 @@ var Ceylon = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-ceylon"}, MimeTypes: []string{"text/x-ceylon"},
DotAll: true, DotAll: true,
}, },
Rules{ ceylonRules,
))
func ceylonRules() Rules {
return Rules{
"root": { "root": {
{`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, {`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil},
{`[^\S\n]+`, Text, nil}, {`[^\S\n]+`, Text, nil},
@ -59,5 +63,5 @@ var Ceylon = internal.Register(MustNewLexer(
{`\*/`, CommentMultiline, Pop(1)}, {`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil}, {`[*/]`, CommentMultiline, nil},
}, },
}, }
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// Cfengine3 lexer. // Cfengine3 lexer.
var Cfengine3 = internal.Register(MustNewLexer( var Cfengine3 = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "CFEngine3", Name: "CFEngine3",
Aliases: []string{"cfengine3", "cf3"}, Aliases: []string{"cfengine3", "cf3"},
Filenames: []string{"*.cf"}, Filenames: []string{"*.cf"},
MimeTypes: []string{}, MimeTypes: []string{},
}, },
Rules{ cfengine3Rules,
))
func cfengine3Rules() Rules {
return Rules{
"root": { "root": {
{`#.*?\n`, Comment, nil}, {`#.*?\n`, Comment, nil},
{`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil}, {`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil},
@ -52,5 +56,5 @@ var Cfengine3 = internal.Register(MustNewLexer(
{`\w+`, NameVariable, nil}, {`\w+`, NameVariable, nil},
{`\s+`, Text, nil}, {`\s+`, Text, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Chaiscript lexer. // Chaiscript lexer.
var Chaiscript = internal.Register(MustNewLexer( var Chaiscript = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "ChaiScript", Name: "ChaiScript",
Aliases: []string{"chai", "chaiscript"}, Aliases: []string{"chai", "chaiscript"},
@ -14,7 +14,11 @@ var Chaiscript = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"}, MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"},
DotAll: true, DotAll: true,
}, },
Rules{ chaiscriptRules,
))
func chaiscriptRules() Rules {
return Rules{
"commentsandwhitespace": { "commentsandwhitespace": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`//.*?\n`, CommentSingle, nil}, {`//.*?\n`, CommentSingle, nil},
@ -59,5 +63,5 @@ var Chaiscript = internal.Register(MustNewLexer(
{`[^\\"$]+`, LiteralStringDouble, nil}, {`[^\\"$]+`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)}, {`"`, LiteralStringDouble, Pop(1)},
}, },
}, }
)) }

View File

@ -7,14 +7,18 @@ import (
) )
// Cheetah lexer. // Cheetah lexer.
var Cheetah = internal.Register(MustNewLexer( var Cheetah = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Cheetah", Name: "Cheetah",
Aliases: []string{"cheetah", "spitfire"}, Aliases: []string{"cheetah", "spitfire"},
Filenames: []string{"*.tmpl", "*.spt"}, Filenames: []string{"*.tmpl", "*.spt"},
MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"}, MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"},
}, },
Rules{ cheetahRules,
))
func cheetahRules() Rules {
return Rules{
"root": { "root": {
{`(##[^\n]*)$`, ByGroups(Comment), nil}, {`(##[^\n]*)$`, ByGroups(Comment), nil},
{`#[*](.|\n)*?[*]#`, Comment, nil}, {`#[*](.|\n)*?[*]#`, Comment, nil},
@ -33,5 +37,5 @@ var Cheetah = internal.Register(MustNewLexer(
`, Other, nil}, `, Other, nil},
{`\s+`, Text, nil}, {`\s+`, Text, nil},
}, },
}, }
)) }

View File

@ -230,7 +230,7 @@ var (
) )
// Common Lisp lexer. // Common Lisp lexer.
var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer( var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
&Config{ &Config{
Name: "Common Lisp", Name: "Common Lisp",
Aliases: []string{"common-lisp", "cl", "lisp"}, Aliases: []string{"common-lisp", "cl", "lisp"},
@ -238,7 +238,19 @@ var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
MimeTypes: []string{"text/x-common-lisp"}, MimeTypes: []string{"text/x-common-lisp"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ commonLispRules,
), TypeMapping{
{NameVariable, NameFunction, clBuiltinFunctions},
{NameVariable, Keyword, clSpecialForms},
{NameVariable, NameBuiltin, clMacros},
{NameVariable, Keyword, clLambdaListKeywords},
{NameVariable, Keyword, clDeclarations},
{NameVariable, KeywordType, clBuiltinTypes},
{NameVariable, NameClass, clBuiltinClasses},
}))
func commonLispRules() Rules {
return Rules{
"root": { "root": {
Default(Push("body")), Default(Push("body")),
}, },
@ -294,13 +306,5 @@ var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
{`\(`, Punctuation, Push("body")}, {`\(`, Punctuation, Push("body")},
{`\)`, Punctuation, Pop(1)}, {`\)`, Punctuation, Pop(1)},
}, },
}, }
), TypeMapping{ }
{NameVariable, NameFunction, clBuiltinFunctions},
{NameVariable, Keyword, clSpecialForms},
{NameVariable, NameBuiltin, clMacros},
{NameVariable, Keyword, clLambdaListKeywords},
{NameVariable, Keyword, clDeclarations},
{NameVariable, KeywordType, clBuiltinTypes},
{NameVariable, NameClass, clBuiltinClasses},
}))

View File

@ -6,14 +6,18 @@ import (
) )
// Clojure lexer. // Clojure lexer.
var Clojure = internal.Register(MustNewLexer( var Clojure = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Clojure", Name: "Clojure",
Aliases: []string{"clojure", "clj"}, Aliases: []string{"clojure", "clj"},
Filenames: []string{"*.clj"}, Filenames: []string{"*.clj"},
MimeTypes: []string{"text/x-clojure", "application/x-clojure"}, MimeTypes: []string{"text/x-clojure", "application/x-clojure"},
}, },
Rules{ clojureRules,
))
func clojureRules() Rules {
return Rules{
"root": { "root": {
{`;.*$`, CommentSingle, nil}, {`;.*$`, CommentSingle, nil},
{`[,\s]+`, Text, nil}, {`[,\s]+`, Text, nil},
@ -34,5 +38,5 @@ var Clojure = internal.Register(MustNewLexer(
{`(\{|\})`, Punctuation, nil}, {`(\{|\})`, Punctuation, nil},
{`(\(|\))`, Punctuation, nil}, {`(\(|\))`, Punctuation, nil},
}, },
}, }
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// Cmake lexer. // Cmake lexer.
var Cmake = internal.Register(MustNewLexer( var Cmake = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "CMake", Name: "CMake",
Aliases: []string{"cmake"}, Aliases: []string{"cmake"},
Filenames: []string{"*.cmake", "CMakeLists.txt"}, Filenames: []string{"*.cmake", "CMakeLists.txt"},
MimeTypes: []string{"text/x-cmake"}, MimeTypes: []string{"text/x-cmake"},
}, },
Rules{ cmakeRules,
))
func cmakeRules() Rules {
return Rules{
"root": { "root": {
{`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")}, {`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")},
Include("keywords"), Include("keywords"),
@ -40,5 +44,5 @@ var Cmake = internal.Register(MustNewLexer(
{`[ \t]+`, Text, nil}, {`[ \t]+`, Text, nil},
{`#.*\n`, Comment, nil}, {`#.*\n`, Comment, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Cobol lexer. // Cobol lexer.
var Cobol = internal.Register(MustNewLexer( var Cobol = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "COBOL", Name: "COBOL",
Aliases: []string{"cobol"}, Aliases: []string{"cobol"},
@ -14,7 +14,11 @@ var Cobol = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-cobol"}, MimeTypes: []string{"text/x-cobol"},
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ cobolRules,
))
func cobolRules() Rules {
return Rules{
"root": { "root": {
Include("comment"), Include("comment"),
Include("strings"), Include("strings"),
@ -47,5 +51,5 @@ var Cobol = internal.Register(MustNewLexer(
{`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil}, {`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil},
{`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil}, {`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil},
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Coffeescript lexer. // Coffeescript lexer.
var Coffeescript = internal.Register(MustNewLexer( var Coffeescript = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "CoffeeScript", Name: "CoffeeScript",
Aliases: []string{"coffee-script", "coffeescript", "coffee"}, Aliases: []string{"coffee-script", "coffeescript", "coffee"},
@ -15,7 +15,11 @@ var Coffeescript = internal.Register(MustNewLexer(
NotMultiline: true, NotMultiline: true,
DotAll: true, DotAll: true,
}, },
Rules{ coffeescriptRules,
))
func coffeescriptRules() Rules {
return Rules{
"commentsandwhitespace": { "commentsandwhitespace": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`###[^#].*?###`, CommentMultiline, nil}, {`###[^#].*?###`, CommentMultiline, nil},
@ -87,5 +91,5 @@ var Coffeescript = internal.Register(MustNewLexer(
{`#|\\.|\'|"`, LiteralString, nil}, {`#|\\.|\'|"`, LiteralString, nil},
Include("strings"), Include("strings"),
}, },
}, }
)) }

View File

@ -6,7 +6,7 @@ import (
) )
// Cfstatement lexer. // Cfstatement lexer.
var Cfstatement = internal.Register(MustNewLexer( var Cfstatement = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "cfstatement", Name: "cfstatement",
Aliases: []string{"cfs"}, Aliases: []string{"cfs"},
@ -15,7 +15,11 @@ var Cfstatement = internal.Register(MustNewLexer(
NotMultiline: true, NotMultiline: true,
CaseInsensitive: true, CaseInsensitive: true,
}, },
Rules{ cfstatementRules,
))
func cfstatementRules() Rules {
return Rules{
"root": { "root": {
{`//.*?\n`, CommentSingle, nil}, {`//.*?\n`, CommentSingle, nil},
{`/\*(?:.|\n)*?\*/`, CommentMultiline, nil}, {`/\*(?:.|\n)*?\*/`, CommentMultiline, nil},
@ -44,5 +48,5 @@ var Cfstatement = internal.Register(MustNewLexer(
{`#`, LiteralStringDouble, nil}, {`#`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)}, {`"`, LiteralStringDouble, Pop(1)},
}, },
}, }
)) }

View File

@ -6,14 +6,18 @@ import (
) )
// Coq lexer. // Coq lexer.
var Coq = internal.Register(MustNewLexer( var Coq = internal.Register(MustNewLazyLexer(
&Config{ &Config{
Name: "Coq", Name: "Coq",
Aliases: []string{"coq"}, Aliases: []string{"coq"},
Filenames: []string{"*.v"}, Filenames: []string{"*.v"},
MimeTypes: []string{"text/x-coq"}, MimeTypes: []string{"text/x-coq"},
}, },
Rules{ coqRules,
))
func coqRules() Rules {
return Rules{
"root": { "root": {
{`\s+`, Text, nil}, {`\s+`, Text, nil},
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil}, {`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil},
@ -59,5 +63,5 @@ var Coq = internal.Register(MustNewLexer(
{`[a-z][a-z0-9_\']*`, Name, Pop(1)}, {`[a-z][a-z0-9_\']*`, Name, Pop(1)},
Default(Pop(1)), Default(Pop(1)),
}, },
}, }
)) }

Some files were not shown because too many files have changed in this diff Show More