mirror of
				https://gitea.com/gitea/tea.git
				synced 2025-10-30 16:55:25 +01:00 
			
		
		
		
	Detect markdown line width, resolve relative URLs (#332)
~~this is semi-blocked by https://github.com/charmbracelet/glamour/pull/96, but behaviour isn't really worse than the previous behaviour (most links work, some are still broken)~~ #### testcase for link resolver ``` tea pr 332 tea checkout 332 && make install && tea pr 332 ``` - [rel](./332) - [abs](/gitea/tea/pulls/332) - [full](https://gitea.com/gitea/tea/pulls/332) Co-authored-by: Norwin Roosen <git@nroo.de> Co-authored-by: 6543 <6543@obermui.de> Reviewed-on: https://gitea.com/gitea/tea/pulls/332 Reviewed-by: 6543 <6543@obermui.de> Reviewed-by: Andrew Thornton <art27@cantab.net> Co-authored-by: Norwin <noerw@noreply.gitea.io> Co-committed-by: Norwin <noerw@noreply.gitea.io>
This commit is contained in:
		
							
								
								
									
										2
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								go.mod
									
									
									
									
									
								
							| @@ -34,3 +34,5 @@ require ( | ||||
| 	golang.org/x/tools v0.1.0 // indirect | ||||
| 	gopkg.in/yaml.v2 v2.4.0 | ||||
| ) | ||||
|  | ||||
| replace github.com/charmbracelet/glamour => github.com/noerw/glamour v0.2.1-0.20210305125354-f0a29f1de0c2 | ||||
|   | ||||
							
								
								
									
										30
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										30
									
								
								go.sum
									
									
									
									
									
								
							| @@ -18,8 +18,8 @@ github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBb | ||||
| github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= | ||||
| github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U= | ||||
| github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= | ||||
| github.com/alecthomas/chroma v0.7.3 h1:NfdAERMy+esYQs8OXk0I868/qDxxCEo7FMz1WIqMAeI= | ||||
| github.com/alecthomas/chroma v0.7.3/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM= | ||||
| github.com/alecthomas/chroma v0.8.1 h1:ym20sbvyC6RXz45u4qDglcgr8E313oPROshcuCHqiEE= | ||||
| github.com/alecthomas/chroma v0.8.1/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM= | ||||
| github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo= | ||||
| github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0= | ||||
| github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE= | ||||
| @@ -31,8 +31,10 @@ github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e h1:OjdSMCht0ZVX7 | ||||
| github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw= | ||||
| github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= | ||||
| github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= | ||||
| github.com/charmbracelet/glamour v0.2.0 h1:mTgaiNiumpqTZp3qVM6DH9UB0NlbY17wejoMf1kM8Pg= | ||||
| github.com/charmbracelet/glamour v0.2.0/go.mod h1:UA27Kwj3QHialP74iU6C+Gpc8Y7IOAKupeKMLLBURWM= | ||||
| github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= | ||||
| github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= | ||||
| github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU= | ||||
| github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE= | ||||
| github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= | ||||
| github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= | ||||
| github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= | ||||
| @@ -60,7 +62,8 @@ github.com/go-git/go-git/v5 v5.2.0 h1:YPBLG/3UK1we1ohRkncLjaXWLW+HKp5QNM/jTli2Jg | ||||
| github.com/go-git/go-git/v5 v5.2.0/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs= | ||||
| github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= | ||||
| github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= | ||||
| github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f/go.mod h1:nOFQdrUlIlx6M6ODdSpBj1NVA+VgLC6kmw60mkw34H4= | ||||
| github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= | ||||
| github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= | ||||
| github.com/hashicorp/go-version v1.2.1 h1:zEfKbn2+PDgroKdiOzqiE8rsmLqU2uwi5PB5pBJ3TkI= | ||||
| github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= | ||||
| github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174 h1:WlZsjVhE8Af9IcZDGgJGQpNflI3+MJSBhsgT5PCtzBQ= | ||||
| @@ -101,17 +104,18 @@ github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRC | ||||
| github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= | ||||
| github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= | ||||
| github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= | ||||
| github.com/microcosm-cc/bluemonday v1.0.2 h1:5lPfLTTAvAbtS0VqT+94yOtFnGfUWYyx0+iToC3Os3s= | ||||
| github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= | ||||
| github.com/microcosm-cc/bluemonday v1.0.4 h1:p0L+CTpo/PLFdkoPcJemLXG+fpMD7pYOoDEq1axMbGg= | ||||
| github.com/microcosm-cc/bluemonday v1.0.4/go.mod h1:8iwZnFn2CDDNZ0r6UXhF4xawGvzaqzCRa1n3/lO3W2w= | ||||
| github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= | ||||
| github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= | ||||
| github.com/muesli/reflow v0.1.0 h1:oQdpLfO56lr5pgLvqD0TcjW85rDjSYSBVdiG1Ch1ddM= | ||||
| github.com/muesli/reflow v0.1.0/go.mod h1:I9bWAt7QTg/que/qmUCJBGlj7wEq8OAFBjPNjc6xK4I= | ||||
| github.com/muesli/termenv v0.6.0/go.mod h1:SohX91w6swWA4AYU+QmPx+aSgXhWO0juiyID9UZmbpA= | ||||
| github.com/muesli/reflow v0.2.0 h1:2o0UBJPHHH4fa2GCXU4Rg4DwOtWPMekCeyc5EWbAQp0= | ||||
| github.com/muesli/reflow v0.2.0/go.mod h1:qT22vjVmM9MIUeLgsVYe/Ye7eZlbv9dZjL3dVhUqLX8= | ||||
| github.com/muesli/termenv v0.7.4 h1:/pBqvU5CpkY53tU0vVn+xgs2ZTX63aH5nY+SSps5Xa8= | ||||
| github.com/muesli/termenv v0.7.4/go.mod h1:pZ7qY9l3F7e5xsAOS0zCew2tME+p7bWeBkotCEcIIcc= | ||||
| github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= | ||||
| github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= | ||||
| github.com/noerw/glamour v0.2.1-0.20210305125354-f0a29f1de0c2 h1:ACjOTGUGi7rt3JQU9GIFFs8sueFGShy6GcGjQhMmKjs= | ||||
| github.com/noerw/glamour v0.2.1-0.20210305125354-f0a29f1de0c2/go.mod h1:WIVFX8Y2VIK1Y/1qtXYL/Vvzqlcbo3VgVop9i2piPkE= | ||||
| github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA= | ||||
| github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= | ||||
| github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= | ||||
| @@ -151,9 +155,11 @@ github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0B | ||||
| github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI= | ||||
| github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0= | ||||
| github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
| github.com/yuin/goldmark v1.2.0/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
| github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM= | ||||
| github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
| github.com/yuin/goldmark v1.3.1 h1:eVwehsLsZlCJCwXyGLgg+Q4iFWE/eTIMG0e8waCmm/I= | ||||
| github.com/yuin/goldmark v1.3.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= | ||||
| github.com/yuin/goldmark-emoji v1.0.1 h1:ctuWEyzGBwiucEqxzwe0SOYDXPAucOrE9NQC18Wa1os= | ||||
| github.com/yuin/goldmark-emoji v1.0.1/go.mod h1:2w1E6FEWLcDQkoTE+7HU6QF1F6SLlNGjRIBbIZQFqkQ= | ||||
| golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= | ||||
| golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= | ||||
| golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||
|   | ||||
| @@ -13,20 +13,27 @@ import ( | ||||
|  | ||||
| // Comments renders a list of comments to stdout | ||||
| func Comments(comments []*gitea.Comment) { | ||||
| 	var baseURL string | ||||
| 	if len(comments) != 0 { | ||||
| 		baseURL = comments[0].HTMLURL | ||||
| 	} | ||||
|  | ||||
| 	var out = make([]string, len(comments)) | ||||
| 	for i, c := range comments { | ||||
| 		out[i] = formatComment(c) | ||||
| 		baseURL = comments[i].HTMLURL | ||||
| 	} | ||||
|  | ||||
| 	outputMarkdown(fmt.Sprintf( | ||||
| 		// this will become a heading by means of the first --- from a comment | ||||
| 		"Comments\n%s", | ||||
| 		strings.Join(out, "\n"), | ||||
| 	)) | ||||
| 	), baseURL) | ||||
| } | ||||
|  | ||||
| // Comment renders a comment to stdout | ||||
| func Comment(c *gitea.Comment) { | ||||
| 	outputMarkdown(formatComment(c)) | ||||
| 	outputMarkdown(formatComment(c), c.HTMLURL) | ||||
| } | ||||
|  | ||||
| func formatComment(c *gitea.Comment) string { | ||||
|   | ||||
| @@ -21,7 +21,7 @@ func IssueDetails(issue *gitea.Issue) { | ||||
| 		issue.Poster.UserName, | ||||
| 		FormatTime(issue.Created), | ||||
| 		issue.Body, | ||||
| 	)) | ||||
| 	), issue.HTMLURL) | ||||
| } | ||||
|  | ||||
| // IssuesPullsList prints a listing of issues & pulls | ||||
|   | ||||
| @@ -28,7 +28,7 @@ func LoginDetails(login *config.Login) { | ||||
| 	} | ||||
| 	in += fmt.Sprintf("\nCreated: %s", time.Unix(login.Created, 0).Format(time.RFC822)) | ||||
|  | ||||
| 	outputMarkdown(in) | ||||
| 	outputMarkdown(in, "") | ||||
| } | ||||
|  | ||||
| // LoginsList prints a listing of logins | ||||
|   | ||||
| @@ -6,15 +6,27 @@ package print | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"os" | ||||
|  | ||||
| 	"github.com/charmbracelet/glamour" | ||||
| 	"golang.org/x/crypto/ssh/terminal" | ||||
| ) | ||||
|  | ||||
| // outputMarkdown prints markdown to stdout, formatted for terminals. | ||||
| // If the input could not be parsed, it is printed unformatted, the error | ||||
| // is returned anyway. | ||||
| func outputMarkdown(markdown string) error { | ||||
| 	out, err := glamour.Render(markdown, "auto") | ||||
| func outputMarkdown(markdown string, baseURL string) error { | ||||
| 	renderer, err := glamour.NewTermRenderer( | ||||
| 		glamour.WithAutoStyle(), | ||||
| 		glamour.WithBaseURL(baseURL), | ||||
| 		glamour.WithWordWrap(getWordWrap()), | ||||
| 	) | ||||
| 	if err != nil { | ||||
| 		fmt.Printf(markdown) | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	out, err := renderer.Render(markdown) | ||||
| 	if err != nil { | ||||
| 		fmt.Printf(markdown) | ||||
| 		return err | ||||
| @@ -22,3 +34,18 @@ func outputMarkdown(markdown string) error { | ||||
| 	fmt.Print(out) | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // stolen from https://github.com/charmbracelet/glow/blob/e9d728c/main.go#L152-L165 | ||||
| func getWordWrap() int { | ||||
| 	fd := int(os.Stdout.Fd()) | ||||
| 	width := 80 | ||||
| 	if terminal.IsTerminal(fd) { | ||||
| 		if w, _, err := terminal.GetSize(fd); err == nil { | ||||
| 			width = w | ||||
| 		} | ||||
| 	} | ||||
| 	if width > 120 { | ||||
| 		width = 120 | ||||
| 	} | ||||
| 	return width | ||||
| } | ||||
|   | ||||
| @@ -76,7 +76,7 @@ func PullDetails(pr *gitea.PullRequest, reviews []*gitea.PullReview, ciStatus *g | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	outputMarkdown(out) | ||||
| 	outputMarkdown(out, pr.HTMLURL) | ||||
| } | ||||
|  | ||||
| func formatReviews(reviews []*gitea.PullReview) string { | ||||
|   | ||||
| @@ -87,7 +87,7 @@ func RepoDetails(repo *gitea.Repository, topics []string) { | ||||
| 		urls, | ||||
| 		perm, | ||||
| 		tops, | ||||
| 	)) | ||||
| 	), repo.HTMLURL) | ||||
| } | ||||
|  | ||||
| // RepoFields are the available fields to print with ReposList() | ||||
|   | ||||
							
								
								
									
										5
									
								
								vendor/github.com/alecthomas/chroma/.golangci.yml
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								vendor/github.com/alecthomas/chroma/.golangci.yml
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -20,6 +20,11 @@ linters: | ||||
|     - wsl | ||||
|     - gomnd | ||||
|     - gocognit | ||||
|     - goerr113 | ||||
|     - nolintlint | ||||
|     - testpackage | ||||
|     - godot | ||||
|     - nestif | ||||
|  | ||||
| linters-settings: | ||||
|   govet: | ||||
|   | ||||
							
								
								
									
										2
									
								
								vendor/github.com/alecthomas/chroma/.travis.yml
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/alecthomas/chroma/.travis.yml
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -4,7 +4,7 @@ go: | ||||
|   - "1.13.x" | ||||
| script: | ||||
|   - go test -v ./... | ||||
|   - curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.22.2 | ||||
|   - curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0 | ||||
|   - ./bin/golangci-lint run | ||||
|   - git clean -fdx . | ||||
| after_success: | ||||
|   | ||||
							
								
								
									
										10
									
								
								vendor/github.com/alecthomas/chroma/lexers/a/awk.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								vendor/github.com/alecthomas/chroma/lexers/a/awk.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -30,14 +30,14 @@ var Awk = internal.Register(MustNewLexer( | ||||
| 		"root": { | ||||
| 			{`^(?=\s|/)`, Text, Push("slashstartsregex")}, | ||||
| 			Include("commentsandwhitespace"), | ||||
| 			{`\+\+|--|\|\||&&|in\b|\$|!?~|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, | ||||
| 			{`\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, | ||||
| 			{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, | ||||
| 			{`[})\].]`, Punctuation, nil}, | ||||
| 			{`(break|continue|do|while|exit|for|if|else|return)\b`, Keyword, Push("slashstartsregex")}, | ||||
| 			{`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")}, | ||||
| 			{`function\b`, KeywordDeclaration, Push("slashstartsregex")}, | ||||
| 			{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next|nextfile|print|printf|strftime|systime|delete|system)\b`, KeywordReserved, nil}, | ||||
| 			{`(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|SUBSEP)\b`, NameBuiltin, nil}, | ||||
| 			{`[$a-zA-Z_]\w*`, NameOther, nil}, | ||||
| 			{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil}, | ||||
| 			{`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil}, | ||||
| 			{`[@$a-zA-Z_]\w*`, NameOther, nil}, | ||||
| 			{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, | ||||
| 			{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, | ||||
| 			{`[0-9]+`, LiteralNumberInteger, nil}, | ||||
|   | ||||
							
								
								
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/b/bash.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/b/bash.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -36,7 +36,7 @@ var Bash = internal.Register(MustNewLexer( | ||||
| 			{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil}, | ||||
| 			{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil}, | ||||
| 			{`\A#!.+\n`, CommentPreproc, nil}, | ||||
| 			{`#.*\S`, CommentSingle, nil}, | ||||
| 			{`#.*(\S|$)`, CommentSingle, nil}, | ||||
| 			{`\\[\w\W]`, LiteralStringEscape, nil}, | ||||
| 			{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil}, | ||||
| 			{`[\[\]{}()=]`, Operator, nil}, | ||||
|   | ||||
							
								
								
									
										206
									
								
								vendor/github.com/alecthomas/chroma/lexers/c/caddyfile.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										206
									
								
								vendor/github.com/alecthomas/chroma/lexers/c/caddyfile.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,206 @@ | ||||
| package c | ||||
|  | ||||
| import ( | ||||
| 	. "github.com/alecthomas/chroma" // nolint | ||||
| 	"github.com/alecthomas/chroma/lexers/internal" | ||||
| ) | ||||
|  | ||||
| // caddyfileCommon are the rules common to both of the lexer variants | ||||
| var caddyfileCommon = Rules{ | ||||
| 	"site_block_common": { | ||||
| 		// Import keyword | ||||
| 		{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil}, | ||||
| 		// Matcher definition | ||||
| 		{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, | ||||
| 		// Matcher token stub for docs | ||||
| 		{`\[\<matcher\>\]`, NameDecorator, Push("matcher")}, | ||||
| 		// These cannot have matchers but may have things that look like | ||||
| 		// matchers in their arguments, so we just parse as a subdirective. | ||||
| 		{`try_files`, Keyword, Push("subdirective")}, | ||||
| 		// These are special, they can nest more directives | ||||
| 		{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")}, | ||||
| 		// Any other directive | ||||
| 		{`[^\s#]+`, Keyword, Push("directive")}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"matcher": { | ||||
| 		{`\{`, Punctuation, Push("block")}, | ||||
| 		// Not can be one-liner | ||||
| 		{`not`, Keyword, Push("deep_not_matcher")}, | ||||
| 		// Any other same-line matcher | ||||
| 		{`[^\s#]+`, Keyword, Push("arguments")}, | ||||
| 		// Terminators | ||||
| 		{`\n`, Text, Pop(1)}, | ||||
| 		{`\}`, Punctuation, Pop(1)}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"block": { | ||||
| 		{`\}`, Punctuation, Pop(2)}, | ||||
| 		// Not can be one-liner | ||||
| 		{`not`, Keyword, Push("not_matcher")}, | ||||
| 		// Any other subdirective | ||||
| 		{`[^\s#]+`, Keyword, Push("subdirective")}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"nested_block": { | ||||
| 		{`\}`, Punctuation, Pop(2)}, | ||||
| 		// Matcher definition | ||||
| 		{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, | ||||
| 		// Something that starts with literally < is probably a docs stub | ||||
| 		{`\<[^#]+\>`, Keyword, Push("nested_directive")}, | ||||
| 		// Any other directive | ||||
| 		{`[^\s#]+`, Keyword, Push("nested_directive")}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"not_matcher": { | ||||
| 		{`\}`, Punctuation, Pop(2)}, | ||||
| 		{`\{(?=\s)`, Punctuation, Push("block")}, | ||||
| 		{`[^\s#]+`, Keyword, Push("arguments")}, | ||||
| 		{`\s+`, Text, nil}, | ||||
| 	}, | ||||
| 	"deep_not_matcher": { | ||||
| 		{`\}`, Punctuation, Pop(2)}, | ||||
| 		{`\{(?=\s)`, Punctuation, Push("block")}, | ||||
| 		{`[^\s#]+`, Keyword, Push("deep_subdirective")}, | ||||
| 		{`\s+`, Text, nil}, | ||||
| 	}, | ||||
| 	"directive": { | ||||
| 		{`\{(?=\s)`, Punctuation, Push("block")}, | ||||
| 		Include("matcher_token"), | ||||
| 		Include("comments_pop_1"), | ||||
| 		{`\n`, Text, Pop(1)}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"nested_directive": { | ||||
| 		{`\{(?=\s)`, Punctuation, Push("nested_block")}, | ||||
| 		Include("matcher_token"), | ||||
| 		Include("comments_pop_1"), | ||||
| 		{`\n`, Text, Pop(1)}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"subdirective": { | ||||
| 		{`\{(?=\s)`, Punctuation, Push("block")}, | ||||
| 		Include("comments_pop_1"), | ||||
| 		{`\n`, Text, Pop(1)}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"arguments": { | ||||
| 		{`\{(?=\s)`, Punctuation, Push("block")}, | ||||
| 		Include("comments_pop_2"), | ||||
| 		{`\\\n`, Text, nil}, // Skip escaped newlines | ||||
| 		{`\n`, Text, Pop(2)}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"deep_subdirective": { | ||||
| 		{`\{(?=\s)`, Punctuation, Push("block")}, | ||||
| 		Include("comments_pop_3"), | ||||
| 		{`\n`, Text, Pop(3)}, | ||||
| 		Include("base"), | ||||
| 	}, | ||||
| 	"matcher_token": { | ||||
| 		{`@[^\s]+`, NameDecorator, Push("arguments")},         // Named matcher | ||||
| 		{`/[^\s]+`, NameDecorator, Push("arguments")},         // Path matcher | ||||
| 		{`\*`, NameDecorator, Push("arguments")},              // Wildcard path matcher | ||||
| 		{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs | ||||
| 	}, | ||||
| 	"comments": { | ||||
| 		{`^#.*\n`, CommentSingle, nil},   // Comment at start of line | ||||
| 		{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace | ||||
| 	}, | ||||
| 	"comments_pop_1": { | ||||
| 		{`^#.*\n`, CommentSingle, Pop(1)},   // Comment at start of line | ||||
| 		{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace | ||||
| 	}, | ||||
| 	"comments_pop_2": { | ||||
| 		{`^#.*\n`, CommentSingle, Pop(2)},   // Comment at start of line | ||||
| 		{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace | ||||
| 	}, | ||||
| 	"comments_pop_3": { | ||||
| 		{`^#.*\n`, CommentSingle, Pop(3)},   // Comment at start of line | ||||
| 		{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace | ||||
| 	}, | ||||
| 	"base": { | ||||
| 		Include("comments"), | ||||
| 		{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil}, | ||||
| 		{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil}, | ||||
| 		{`[a-z-]+/[a-z-+]+`, LiteralString, nil}, | ||||
| 		{`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, | ||||
| 		{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder | ||||
| 		{`\[(?=[^#{}$]+\])`, Punctuation, nil}, | ||||
| 		{`\]|\|`, Punctuation, nil}, | ||||
| 		{`[^\s#{}$\]]+`, LiteralString, nil}, | ||||
| 		{`/[^\s#]*`, Name, nil}, | ||||
| 		{`\s+`, Text, nil}, | ||||
| 	}, | ||||
| } | ||||
|  | ||||
| // Caddyfile lexer. | ||||
| var Caddyfile = internal.Register(MustNewLexer( | ||||
| 	&Config{ | ||||
| 		Name:      "Caddyfile", | ||||
| 		Aliases:   []string{"caddyfile", "caddy"}, | ||||
| 		Filenames: []string{"Caddyfile*"}, | ||||
| 		MimeTypes: []string{}, | ||||
| 	}, | ||||
| 	Rules{ | ||||
| 		"root": { | ||||
| 			Include("comments"), | ||||
| 			// Global options block | ||||
| 			{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")}, | ||||
| 			// Snippets | ||||
| 			{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")}, | ||||
| 			// Site label | ||||
| 			{`[^#{(\s,]+`, GenericHeading, Push("label")}, | ||||
| 			// Site label with placeholder | ||||
| 			{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")}, | ||||
| 			{`\s+`, Text, nil}, | ||||
| 		}, | ||||
| 		"globals": { | ||||
| 			{`\}`, Punctuation, Pop(1)}, | ||||
| 			{`[^\s#]+`, Keyword, Push("directive")}, | ||||
| 			Include("base"), | ||||
| 		}, | ||||
| 		"snippet": { | ||||
| 			{`\}`, Punctuation, Pop(1)}, | ||||
| 			// Matcher definition | ||||
| 			{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, | ||||
| 			// Any directive | ||||
| 			{`[^\s#]+`, Keyword, Push("directive")}, | ||||
| 			Include("base"), | ||||
| 		}, | ||||
| 		"label": { | ||||
| 			// Allow multiple labels, comma separated, newlines after | ||||
| 			// a comma means another label is coming | ||||
| 			{`,\s*\n?`, Text, nil}, | ||||
| 			{` `, Text, nil}, | ||||
| 			// Site label with placeholder | ||||
| 			{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, | ||||
| 			// Site label | ||||
| 			{`[^#{(\s,]+`, GenericHeading, nil}, | ||||
| 			// Comment after non-block label (hack because comments end in \n) | ||||
| 			{`#.*\n`, CommentSingle, Push("site_block")}, | ||||
| 			// Note: if \n, we'll never pop out of the site_block, it's valid | ||||
| 			{`\{(?=\s)|\n`, Punctuation, Push("site_block")}, | ||||
| 		}, | ||||
| 		"site_block": { | ||||
| 			{`\}`, Punctuation, Pop(2)}, | ||||
| 			Include("site_block_common"), | ||||
| 		}, | ||||
| 	}.Merge(caddyfileCommon), | ||||
| )) | ||||
|  | ||||
| // Caddyfile directive-only lexer. | ||||
| var CaddyfileDirectives = internal.Register(MustNewLexer( | ||||
| 	&Config{ | ||||
| 		Name:      "Caddyfile Directives", | ||||
| 		Aliases:   []string{"caddyfile-directives", "caddyfile-d", "caddy-d"}, | ||||
| 		Filenames: []string{}, | ||||
| 		MimeTypes: []string{}, | ||||
| 	}, | ||||
| 	Rules{ | ||||
| 		// Same as "site_block" in Caddyfile | ||||
| 		"root": { | ||||
| 			Include("site_block_common"), | ||||
| 		}, | ||||
| 	}.Merge(caddyfileCommon), | ||||
| )) | ||||
							
								
								
									
										23
									
								
								vendor/github.com/alecthomas/chroma/lexers/circular/php.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								vendor/github.com/alecthomas/chroma/lexers/circular/php.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -1,15 +1,12 @@ | ||||
| package circular | ||||
|  | ||||
| import ( | ||||
| 	"strings" | ||||
|  | ||||
| 	. "github.com/alecthomas/chroma" // nolint | ||||
| 	"github.com/alecthomas/chroma/lexers/h" | ||||
| 	"github.com/alecthomas/chroma/lexers/internal" | ||||
| ) | ||||
|  | ||||
| // PHP lexer. | ||||
| var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer( | ||||
| // PHP lexer for pure PHP code (not embedded in HTML). | ||||
| var PHP = internal.Register(MustNewLexer( | ||||
| 	&Config{ | ||||
| 		Name:            "PHP", | ||||
| 		Aliases:         []string{"php", "php3", "php4", "php5"}, | ||||
| @@ -19,12 +16,10 @@ var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer( | ||||
| 		CaseInsensitive: true, | ||||
| 		EnsureNL:        true, | ||||
| 	}, | ||||
| 	Rules{ | ||||
| 		"root": { | ||||
| 			{`<\?(php)?`, CommentPreproc, Push("php")}, | ||||
| 			{`[^<]+`, Other, nil}, | ||||
| 			{`<`, Other, nil}, | ||||
| 		}, | ||||
| 	phpCommonRules.Rename("php", "root"), | ||||
| )) | ||||
|  | ||||
| var phpCommonRules = Rules{ | ||||
| 	"php": { | ||||
| 		{`\?>`, CommentPreproc, Pop(1)}, | ||||
| 		{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil}, | ||||
| @@ -82,10 +77,4 @@ var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer( | ||||
| 		{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil}, | ||||
| 		{`[${\\]`, LiteralStringDouble, nil}, | ||||
| 	}, | ||||
| 	}, | ||||
| ).SetAnalyser(func(text string) float32 { | ||||
| 	if strings.Contains(text, "<?php") { | ||||
| 		return 0.5 | ||||
| } | ||||
| 	return 0.0 | ||||
| }))) | ||||
|   | ||||
							
								
								
									
										34
									
								
								vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,34 @@ | ||||
| package circular | ||||
|  | ||||
| import ( | ||||
| 	"strings" | ||||
|  | ||||
| 	. "github.com/alecthomas/chroma" // nolint | ||||
| 	"github.com/alecthomas/chroma/lexers/h" | ||||
| 	"github.com/alecthomas/chroma/lexers/internal" | ||||
| ) | ||||
|  | ||||
| // PHTML lexer is PHP in HTML. | ||||
| var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer( | ||||
| 	&Config{ | ||||
| 		Name:            "PHTML", | ||||
| 		Aliases:         []string{"phtml"}, | ||||
| 		Filenames:       []string{"*.phtml"}, | ||||
| 		MimeTypes:       []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5"}, | ||||
| 		DotAll:          true, | ||||
| 		CaseInsensitive: true, | ||||
| 		EnsureNL:        true, | ||||
| 	}, | ||||
| 	Rules{ | ||||
| 		"root": { | ||||
| 			{`<\?(php)?`, CommentPreproc, Push("php")}, | ||||
| 			{`[^<]+`, Other, nil}, | ||||
| 			{`<`, Other, nil}, | ||||
| 		}, | ||||
| 	}.Merge(phpCommonRules), | ||||
| ).SetAnalyser(func(text string) float32 { | ||||
| 	if strings.Contains(text, "<?php") { | ||||
| 		return 0.5 | ||||
| 	} | ||||
| 	return 0.0 | ||||
| }))) | ||||
							
								
								
									
										7
									
								
								vendor/github.com/alecthomas/chroma/lexers/e/elixir.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								vendor/github.com/alecthomas/chroma/lexers/e/elixir.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -28,6 +28,13 @@ var Elixir = internal.Register(MustNewLexer( | ||||
| 			{`:"`, LiteralStringSymbol, Push("string_double_atom")}, | ||||
| 			{`:'`, LiteralStringSymbol, Push("string_single_atom")}, | ||||
| 			{`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, | ||||
| 			{`(fn|do|end|after|else|rescue|catch)\b`, Keyword, nil}, | ||||
| 			{`(not|and|or|when|in)\b`, OperatorWord, nil}, | ||||
| 			{`(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b`, Keyword, nil}, | ||||
| 			{`(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b`, KeywordDeclaration, nil}, | ||||
| 			{`(import|require|use|alias)\b`, KeywordNamespace, nil}, | ||||
| 			{`(nil|true|false)\b`, NameConstant, nil}, | ||||
| 			{`(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b`, NamePseudo, nil}, | ||||
| 			{`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil}, | ||||
| 			{`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil}, | ||||
| 			{`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil}, | ||||
|   | ||||
							
								
								
									
										1
									
								
								vendor/github.com/alecthomas/chroma/lexers/g/go.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								vendor/github.com/alecthomas/chroma/lexers/g/go.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -15,6 +15,7 @@ var Go = internal.Register(MustNewLexer( | ||||
| 		Aliases:   []string{"go", "golang"}, | ||||
| 		Filenames: []string{"*.go"}, | ||||
| 		MimeTypes: []string{"text/x-gosrc"}, | ||||
| 		EnsureNL:  true, | ||||
| 	}, | ||||
| 	Rules{ | ||||
| 		"root": { | ||||
|   | ||||
							
								
								
									
										4
									
								
								vendor/github.com/alecthomas/chroma/lexers/h/http.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								vendor/github.com/alecthomas/chroma/lexers/h/http.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -19,8 +19,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer( | ||||
| 	}, | ||||
| 	Rules{ | ||||
| 		"root": { | ||||
| 			{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")}, | ||||
| 			{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")}, | ||||
| 			{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")}, | ||||
| 			{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")}, | ||||
| 		}, | ||||
| 		"headers": { | ||||
| 			{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil}, | ||||
|   | ||||
							
								
								
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/i/ini.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/i/ini.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -10,7 +10,7 @@ var Ini = internal.Register(MustNewLexer( | ||||
| 	&Config{ | ||||
| 		Name:      "INI", | ||||
| 		Aliases:   []string{"ini", "cfg", "dosini"}, | ||||
| 		Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig"}, | ||||
| 		Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"}, | ||||
| 		MimeTypes: []string{"text/x-ini", "text/inf"}, | ||||
| 	}, | ||||
| 	Rules{ | ||||
|   | ||||
							
								
								
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/j/javascript.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/j/javascript.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
							
								
								
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/j/jsx.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/j/jsx.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
							
								
								
									
										59
									
								
								vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										59
									
								
								vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -24,32 +24,71 @@ var Kotlin = internal.Register(MustNewLexer( | ||||
| 			{`//[^\n]*\n?`, CommentSingle, nil}, | ||||
| 			{`/[*].*?[*]/`, CommentMultiline, nil}, | ||||
| 			{`\n`, Text, nil}, | ||||
| 			{`::|!!|\?[:.]`, Operator, nil}, | ||||
| 			{`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil}, | ||||
| 			{`!==|!in|!is|===`, Operator, nil}, | ||||
| 			{`%=|&&|\*=|\+\+|\+=|--|-=|->|\.\.|\/=|::|<=|==|>=|!!|!=|\|\||\?[:.]`, Operator, nil}, | ||||
| 			{`[~!%^&*()+=|\[\]:;,.<>\/?-]`, Punctuation, nil}, | ||||
| 			{`[{}]`, Punctuation, nil}, | ||||
| 			{`"""[^"]*"""`, LiteralString, nil}, | ||||
| 			{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, | ||||
| 			{`"""`, LiteralString, Push("rawstring")}, | ||||
| 			{`"`, LiteralStringDouble, Push("string")}, | ||||
| 			{`(')(\\u[0-9a-fA-F]{4})(')`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralStringChar), nil}, | ||||
| 			{`'\\.'|'[^\\]'`, LiteralStringChar, nil}, | ||||
| 			{`0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?`, LiteralNumber, nil}, | ||||
| 			{`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil}, | ||||
| 			{`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")}, | ||||
| 			{`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")}, | ||||
| 			{`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")}, | ||||
| 			{`(fun)(\s+)(<[^>]*>\s+)?`, ByGroups(Keyword, Text, Text), Push("function")}, | ||||
| 			{`(abstract|actual|annotation|as|break|by|catch|class|companion|const|constructor|continue|crossinline|data|do|dynamic|else|enum|expect|external|false|final|finally|for|fun|get|if|import|in|infix|inline|inner|interface|internal|is|lateinit|noinline|null|object|open|operator|out|override|package|private|protected|public|reified|return|sealed|set|super|suspend|tailrec|this|throw|true|try|val|var|vararg|when|where|while)\b`, Keyword, nil}, | ||||
| 			{"(@?[" + kotlinIdentifier + "]*`)", Name, nil}, | ||||
| 			{`(fun)(\s+)`, ByGroups(Keyword, Text), Push("function")}, | ||||
| 			{`(abstract|actual|annotation|as|as\?|break|by|catch|class|companion|const|constructor|continue|crossinline|data|delegate|do|dynamic|else|enum|expect|external|false|field|file|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|it|lateinit|noinline|null|object|open|operator|out|override|package|param|private|property|protected|public|receiver|reified|return|sealed|set|setparam|super|suspend|tailrec|this|throw|true|try|typealias|typeof|val|var|vararg|when|where|while)\b`, Keyword, nil}, | ||||
| 			{`@[` + kotlinIdentifier + `]+`, NameDecorator, nil}, | ||||
| 			{`[` + kotlinIdentifier + `]+`, Name, nil}, | ||||
| 		}, | ||||
| 		"package": { | ||||
| 			{`\S+`, NameNamespace, Pop(1)}, | ||||
| 		}, | ||||
| 		"class": { | ||||
| 			{"(@?[" + kotlinIdentifier + "]*`)", NameClass, Pop(1)}, | ||||
| 			// \x60 is the back tick character (`) | ||||
| 			{`\x60[^\x60]+?\x60`, NameClass, Pop(1)}, | ||||
| 			{`[` + kotlinIdentifier + `]+`, NameClass, Pop(1)}, | ||||
| 		}, | ||||
| 		"property": { | ||||
| 			{"(@?[" + kotlinIdentifier + " ]*`)", NameProperty, Pop(1)}, | ||||
| 			{`\x60[^\x60]+?\x60`, NameProperty, Pop(1)}, | ||||
| 			{`[` + kotlinIdentifier + `]+`, NameProperty, Pop(1)}, | ||||
| 		}, | ||||
| 		"generics-specification": { | ||||
| 			{`<`, Punctuation, Push("generics-specification")}, // required for generics inside generics e.g. <T : List<Int> > | ||||
| 			{`>`, Punctuation, Pop(1)}, | ||||
| 			{`[,:*?]`, Punctuation, nil}, | ||||
| 			{`(in|out|reified)`, Keyword, nil}, | ||||
| 			{`\x60[^\x60]+?\x60`, NameClass, nil}, | ||||
| 			{`[` + kotlinIdentifier + `]+`, NameClass, nil}, | ||||
| 			{`\s+`, Text, nil}, | ||||
| 		}, | ||||
| 		"function": { | ||||
| 			{"(@?[" + kotlinIdentifier + " ]*`)", NameFunction, Pop(1)}, | ||||
| 			{`<`, Punctuation, Push("generics-specification")}, | ||||
| 			{`\x60[^\x60]+?\x60`, NameFunction, Pop(1)}, | ||||
| 			{`[` + kotlinIdentifier + `]+`, NameFunction, Pop(1)}, | ||||
| 			{`\s+`, Text, nil}, | ||||
| 		}, | ||||
| 		"rawstring": { | ||||
| 			// raw strings don't allow character escaping | ||||
| 			{`"""`, LiteralString, Pop(1)}, | ||||
| 			{`(?:[^$"]+|\"{1,2}[^"])+`, LiteralString, nil}, | ||||
| 			Include("string-interpol"), | ||||
| 			// remaining dollar signs are just a string | ||||
| 			{`\$`, LiteralString, nil}, | ||||
| 		}, | ||||
| 		"string": { | ||||
| 			{`\\[tbnr'"\\\$]`, LiteralStringEscape, nil}, | ||||
| 			{`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, | ||||
| 			{`"`, LiteralStringDouble, Pop(1)}, | ||||
| 			Include("string-interpol"), | ||||
| 			{`[^\n\\"$]+`, LiteralStringDouble, nil}, | ||||
| 			// remaining dollar signs are just a string | ||||
| 			{`\$`, LiteralStringDouble, nil}, | ||||
| 		}, | ||||
| 		"string-interpol": { | ||||
| 			{`\$[` + kotlinIdentifier + `]+`, LiteralStringInterpol, nil}, | ||||
| 			{`\${[^}\n]*}`, LiteralStringInterpol, nil}, | ||||
| 		}, | ||||
| 	}, | ||||
| )) | ||||
|   | ||||
							
								
								
									
										1
									
								
								vendor/github.com/alecthomas/chroma/lexers/lexers.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								vendor/github.com/alecthomas/chroma/lexers/lexers.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -32,6 +32,7 @@ import ( | ||||
| 	_ "github.com/alecthomas/chroma/lexers/w" | ||||
| 	_ "github.com/alecthomas/chroma/lexers/x" | ||||
| 	_ "github.com/alecthomas/chroma/lexers/y" | ||||
| 	_ "github.com/alecthomas/chroma/lexers/z" | ||||
| ) | ||||
|  | ||||
| // Registry of Lexers. | ||||
|   | ||||
							
								
								
									
										59
									
								
								vendor/github.com/alecthomas/chroma/lexers/p/pony.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								vendor/github.com/alecthomas/chroma/lexers/p/pony.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | ||||
| package p | ||||
|  | ||||
| import ( | ||||
| 	. "github.com/alecthomas/chroma" // nolint | ||||
| 	"github.com/alecthomas/chroma/lexers/internal" | ||||
| ) | ||||
|  | ||||
| // Pony lexer. | ||||
| var Pony = internal.Register(MustNewLexer( | ||||
| 	&Config{ | ||||
| 		Name:      "Pony", | ||||
| 		Aliases:   []string{"pony"}, | ||||
| 		Filenames: []string{"*.pony"}, | ||||
| 		MimeTypes: []string{}, | ||||
| 	}, | ||||
| 	Rules{ | ||||
| 		"root": { | ||||
| 			{`\n`, Text, nil}, | ||||
| 			{`[^\S\n]+`, Text, nil}, | ||||
| 			{`//.*\n`, CommentSingle, nil}, | ||||
| 			{`/\*`, CommentMultiline, Push("nested_comment")}, | ||||
| 			{`"""(?:.|\n)*?"""`, LiteralStringDoc, nil}, | ||||
| 			{`"`, LiteralString, Push("string")}, | ||||
| 			{`\'.*\'`, LiteralStringChar, nil}, | ||||
| 			{`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil}, | ||||
| 			{Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil}, | ||||
| 			{`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil}, | ||||
| 			{Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil}, | ||||
| 			{`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")}, | ||||
| 			{`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")}, | ||||
| 			{Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil}, | ||||
| 			{`_?[A-Z]\w*`, NameClass, nil}, | ||||
| 			{`string\(\)`, NameOther, nil}, | ||||
| 			{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil}, | ||||
| 			{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, | ||||
| 			{`\d+`, LiteralNumberInteger, nil}, | ||||
| 			{`(true|false)\b`, Keyword, nil}, | ||||
| 			{`_\d*`, Name, nil}, | ||||
| 			{`_?[a-z][\w\'_]*`, Name, nil}, | ||||
| 		}, | ||||
| 		"typename": { | ||||
| 			{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)}, | ||||
| 		}, | ||||
| 		"methodname": { | ||||
| 			{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)}, | ||||
| 		}, | ||||
| 		"nested_comment": { | ||||
| 			{`[^*/]+`, CommentMultiline, nil}, | ||||
| 			{`/\*`, CommentMultiline, Push()}, | ||||
| 			{`\*/`, CommentMultiline, Pop(1)}, | ||||
| 			{`[*/]`, CommentMultiline, nil}, | ||||
| 		}, | ||||
| 		"string": { | ||||
| 			{`"`, LiteralString, Pop(1)}, | ||||
| 			{`\\"`, LiteralString, nil}, | ||||
| 			{`[^\\"]+`, LiteralString, nil}, | ||||
| 		}, | ||||
| 	}, | ||||
| )) | ||||
							
								
								
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/t/toml.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/alecthomas/chroma/lexers/t/toml.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -22,7 +22,7 @@ var TOML = internal.Register(MustNewLexer( | ||||
| 			{`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil}, | ||||
| 			{`"(\\\\|\\"|[^"])*"`, StringDouble, nil}, | ||||
| 			{`'(\\\\|\\'|[^'])*'`, StringSingle, nil}, | ||||
| 			{`[.,=\[\]]`, Punctuation, nil}, | ||||
| 			{`[.,=\[\]{}]`, Punctuation, nil}, | ||||
| 			{`[^\W\d]\w*`, NameOther, nil}, | ||||
| 		}, | ||||
| 	}, | ||||
|   | ||||
							
								
								
									
										10
									
								
								vendor/github.com/alecthomas/chroma/lexers/t/typescript.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								vendor/github.com/alecthomas/chroma/lexers/t/typescript.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -38,14 +38,14 @@ var TypeScript = internal.Register(MustNewLexer( | ||||
| 			{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")}, | ||||
| 			{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, | ||||
| 			{`[})\].]`, Punctuation, nil}, | ||||
| 			{`(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b`, Keyword, Push("slashstartsregex")}, | ||||
| 			{`(for|in|of|while|do|break|return|yield|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|keyof|asserts|is|infer|await|void|this)\b`, Keyword, Push("slashstartsregex")}, | ||||
| 			{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")}, | ||||
| 			{`(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b`, KeywordReserved, nil}, | ||||
| 			{`(abstract|async|boolean|class|const|debugger|enum|export|extends|from|get|global|goto|implements|import|interface|namespace|package|private|protected|public|readonly|require|set|static|super|type)\b`, KeywordReserved, nil}, | ||||
| 			{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil}, | ||||
| 			{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil}, | ||||
| 			{`(Array|Boolean|Date|Error|Function|Math|Number|Object|Packages|RegExp|String|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil}, | ||||
| 			{`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")}, | ||||
| 			{`\b(string|bool|number)\b`, KeywordType, nil}, | ||||
| 			{`\b(constructor|declare|interface|as|AS)\b`, KeywordReserved, nil}, | ||||
| 			{`\b(string|bool|number|any|never|object|symbol|unique|unknown|bigint)\b`, KeywordType, nil}, | ||||
| 			{`\b(constructor|declare|interface|as)\b`, KeywordReserved, nil}, | ||||
| 			{`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")}, | ||||
| 			{`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")}, | ||||
| 			{`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil}, | ||||
|   | ||||
							
								
								
									
										20
									
								
								vendor/github.com/alecthomas/chroma/lexers/y/yaml.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										20
									
								
								vendor/github.com/alecthomas/chroma/lexers/y/yaml.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -15,32 +15,36 @@ var YAML = internal.Register(MustNewLexer( | ||||
| 	Rules{ | ||||
| 		"root": { | ||||
| 			Include("whitespace"), | ||||
| 			{`^---`, Text, nil}, | ||||
| 			{`^---`, NameNamespace, nil}, | ||||
| 			{`^\.\.\.`, NameNamespace, nil}, | ||||
| 			{`[\n?]?\s*- `, Text, nil}, | ||||
| 			{`#.*$`, Comment, nil}, | ||||
| 			{`!![^\s]+`, CommentPreproc, nil}, | ||||
| 			{`&[^\s]+`, CommentPreproc, nil}, | ||||
| 			{`\*[^\s]+`, CommentPreproc, nil}, | ||||
| 			{`^%include\s+[^\n\r]+`, CommentPreproc, nil}, | ||||
| 			{`([>|+-]\s+)(\s+)((?:(?:.*?$)(?:[\n\r]*?)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil}, | ||||
| 			Include("key"), | ||||
| 			Include("value"), | ||||
| 			{`[?:,\[\]]`, Punctuation, nil}, | ||||
| 			{`.`, Text, nil}, | ||||
| 		}, | ||||
| 		"value": { | ||||
| 			{Words(``, `\b`, "true", "false", "null"), KeywordConstant, nil}, | ||||
| 			{`([>|](?:[+-])?)(\n(^ {1,})(?:.*\n*(?:^\3 *).*)*)`, ByGroups(Punctuation, StringDoc, Whitespace), nil}, | ||||
| 			{Words(``, `\b`, "true", "True", "TRUE", "false", "False", "FALSE", "null", | ||||
| 				"y", "Y", "yes", "Yes", "YES", "n", "N", "no", "No", "NO", | ||||
| 				"on", "On", "ON", "off", "Off", "OFF"), KeywordConstant, nil}, | ||||
| 			{`"(?:\\.|[^"])*"`, StringDouble, nil}, | ||||
| 			{`'(?:\\.|[^'])*'`, StringSingle, nil}, | ||||
| 			{`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil}, | ||||
| 			{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil}, | ||||
| 			{`\b[\w]+\b`, Text, nil}, | ||||
| 			{`([^\{\}\[\]\?,\:\!\-\*&\@].*)( )+(#.*)`, ByGroups(Literal, Whitespace, Comment), nil}, | ||||
| 			{`[^\{\}\[\]\?,\:\!\-\*&\@].*`, Literal, nil}, | ||||
| 		}, | ||||
| 		"key": { | ||||
| 			{`"[^"\n].*": `, Keyword, nil}, | ||||
| 			{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, Keyword, Punctuation, Whitespace), nil}, | ||||
| 			{`([^"\n{]*)(:)( )`, ByGroups(Keyword, Punctuation, Whitespace), nil}, | ||||
| 			{`([^"\n{]*)(:)(\n)`, ByGroups(Keyword, Punctuation, Whitespace), nil}, | ||||
| 			{`"[^"\n].*": `, NameTag, nil}, | ||||
| 			{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, NameTag, Punctuation, Whitespace), nil}, | ||||
| 			{`([^"\n{]*)(:)( )`, ByGroups(NameTag, Punctuation, Whitespace), nil}, | ||||
| 			{`([^"\n{]*)(:)(\n)`, ByGroups(NameTag, Punctuation, Whitespace), nil}, | ||||
| 		}, | ||||
| 		"whitespace": { | ||||
| 			{`\s+`, Whitespace, nil}, | ||||
|   | ||||
							
								
								
									
										54
									
								
								vendor/github.com/alecthomas/chroma/lexers/z/zig.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								vendor/github.com/alecthomas/chroma/lexers/z/zig.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | ||||
| package z | ||||
|  | ||||
| import ( | ||||
| 	. "github.com/alecthomas/chroma" // nolint | ||||
| 	"github.com/alecthomas/chroma/lexers/internal" | ||||
| ) | ||||
|  | ||||
| // Zig lexer. | ||||
| var Zig = internal.Register(MustNewLexer( | ||||
| 	&Config{ | ||||
| 		Name:      "Zig", | ||||
| 		Aliases:   []string{"zig"}, | ||||
| 		Filenames: []string{"*.zig"}, | ||||
| 		MimeTypes: []string{"text/zig"}, | ||||
| 	}, | ||||
| 	Rules{ | ||||
| 		"root": { | ||||
| 			{`\n`, TextWhitespace, nil}, | ||||
| 			{`\s+`, TextWhitespace, nil}, | ||||
| 			{`//.*?\n`, CommentSingle, nil}, | ||||
| 			{Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil}, | ||||
| 			{Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil}, | ||||
| 			{Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil}, | ||||
| 			{Words(``, `\b`, `while`, `for`), Keyword, nil}, | ||||
| 			{Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil}, | ||||
| 			{Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil}, | ||||
| 			{Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil}, | ||||
| 			{Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil}, | ||||
| 			{`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil}, | ||||
| 			{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil}, | ||||
| 			{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil}, | ||||
| 			{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil}, | ||||
| 			{`0b[01]+`, LiteralNumberBin, nil}, | ||||
| 			{`0o[0-7]+`, LiteralNumberOct, nil}, | ||||
| 			{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, | ||||
| 			{`[0-9]+`, LiteralNumberInteger, nil}, | ||||
| 			{`@[a-zA-Z_]\w*`, NameBuiltin, nil}, | ||||
| 			{`[a-zA-Z_]\w*`, Name, nil}, | ||||
| 			{`\'\\\'\'`, LiteralStringEscape, nil}, | ||||
| 			{`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil}, | ||||
| 			{`\'[^\\\']\'`, LiteralString, nil}, | ||||
| 			{`\\\\[^\n]*`, LiteralStringHeredoc, nil}, | ||||
| 			{`c\\\\[^\n]*`, LiteralStringHeredoc, nil}, | ||||
| 			{`c?"`, LiteralString, Push("string")}, | ||||
| 			{`[+%=><|^!?/\-*&~:]`, Operator, nil}, | ||||
| 			{`[{}()\[\],.;]`, Punctuation, nil}, | ||||
| 		}, | ||||
| 		"string": { | ||||
| 			{`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil}, | ||||
| 			{`[^\\"\n]+`, LiteralString, nil}, | ||||
| 			{`"`, LiteralString, Pop(1)}, | ||||
| 		}, | ||||
| 	}, | ||||
| )) | ||||
							
								
								
									
										19
									
								
								vendor/github.com/alecthomas/chroma/regexp.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								vendor/github.com/alecthomas/chroma/regexp.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -6,6 +6,7 @@ import ( | ||||
| 	"regexp" | ||||
| 	"strings" | ||||
| 	"sync" | ||||
| 	"time" | ||||
| 	"unicode/utf8" | ||||
|  | ||||
| 	"github.com/dlclark/regexp2" | ||||
| @@ -160,6 +161,14 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro | ||||
| // Rules maps from state to a sequence of Rules. | ||||
| type Rules map[string][]Rule | ||||
|  | ||||
| // Rename clones rules then a rule. | ||||
| func (r Rules) Rename(old, new string) Rules { | ||||
| 	r = r.Clone() | ||||
| 	r[new] = r[old] | ||||
| 	delete(r, old) | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| // Clone returns a clone of the Rules. | ||||
| func (r Rules) Clone() Rules { | ||||
| 	out := map[string][]Rule{} | ||||
| @@ -170,6 +179,15 @@ func (r Rules) Clone() Rules { | ||||
| 	return out | ||||
| } | ||||
|  | ||||
| // Merge creates a clone of "r" then merges "rules" into the clone. | ||||
| func (r Rules) Merge(rules Rules) Rules { | ||||
| 	out := r.Clone() | ||||
| 	for k, v := range rules.Clone() { | ||||
| 		out[k] = v | ||||
| 	} | ||||
| 	return out | ||||
| } | ||||
|  | ||||
| // MustNewLexer creates a new Lexer or panics. | ||||
| func MustNewLexer(config *Config, rules Rules) *RegexLexer { | ||||
| 	lexer, err := NewLexer(config, rules) | ||||
| @@ -376,6 +394,7 @@ func (r *RegexLexer) maybeCompile() (err error) { | ||||
| 				if err != nil { | ||||
| 					return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err) | ||||
| 				} | ||||
| 				rule.Regexp.MatchTimeout = time.Millisecond * 250 | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|   | ||||
							
								
								
									
										22
									
								
								vendor/github.com/aymerick/douceur/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								vendor/github.com/aymerick/douceur/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| The MIT License (MIT) | ||||
|  | ||||
| Copyright (c) 2015 Aymerick JEHANNE | ||||
|  | ||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| of this software and associated documentation files (the "Software"), to deal | ||||
| in the Software without restriction, including without limitation the rights | ||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| copies of the Software, and to permit persons to whom the Software is | ||||
| furnished to do so, subject to the following conditions: | ||||
|  | ||||
| The above copyright notice and this permission notice shall be included in all | ||||
| copies or substantial portions of the Software. | ||||
|  | ||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||||
| SOFTWARE. | ||||
|  | ||||
							
								
								
									
										60
									
								
								vendor/github.com/aymerick/douceur/css/declaration.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								vendor/github.com/aymerick/douceur/css/declaration.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,60 @@ | ||||
| package css | ||||
|  | ||||
| import "fmt" | ||||
|  | ||||
| // Declaration represents a parsed style property | ||||
| type Declaration struct { | ||||
| 	Property  string | ||||
| 	Value     string | ||||
| 	Important bool | ||||
| } | ||||
|  | ||||
| // NewDeclaration instanciates a new Declaration | ||||
| func NewDeclaration() *Declaration { | ||||
| 	return &Declaration{} | ||||
| } | ||||
|  | ||||
| // Returns string representation of the Declaration | ||||
| func (decl *Declaration) String() string { | ||||
| 	return decl.StringWithImportant(true) | ||||
| } | ||||
|  | ||||
| // StringWithImportant returns string representation with optional !important part | ||||
| func (decl *Declaration) StringWithImportant(option bool) string { | ||||
| 	result := fmt.Sprintf("%s: %s", decl.Property, decl.Value) | ||||
|  | ||||
| 	if option && decl.Important { | ||||
| 		result += " !important" | ||||
| 	} | ||||
|  | ||||
| 	result += ";" | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Equal returns true if both Declarations are equals | ||||
| func (decl *Declaration) Equal(other *Declaration) bool { | ||||
| 	return (decl.Property == other.Property) && (decl.Value == other.Value) && (decl.Important == other.Important) | ||||
| } | ||||
|  | ||||
| // | ||||
| // DeclarationsByProperty | ||||
| // | ||||
|  | ||||
| // DeclarationsByProperty represents sortable style declarations | ||||
| type DeclarationsByProperty []*Declaration | ||||
|  | ||||
| // Implements sort.Interface | ||||
| func (declarations DeclarationsByProperty) Len() int { | ||||
| 	return len(declarations) | ||||
| } | ||||
|  | ||||
| // Implements sort.Interface | ||||
| func (declarations DeclarationsByProperty) Swap(i, j int) { | ||||
| 	declarations[i], declarations[j] = declarations[j], declarations[i] | ||||
| } | ||||
|  | ||||
| // Implements sort.Interface | ||||
| func (declarations DeclarationsByProperty) Less(i, j int) bool { | ||||
| 	return declarations[i].Property < declarations[j].Property | ||||
| } | ||||
							
								
								
									
										230
									
								
								vendor/github.com/aymerick/douceur/css/rule.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										230
									
								
								vendor/github.com/aymerick/douceur/css/rule.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,230 @@ | ||||
| package css | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	indentSpace = 2 | ||||
| ) | ||||
|  | ||||
| // RuleKind represents a Rule kind | ||||
| type RuleKind int | ||||
|  | ||||
| // Rule kinds | ||||
| const ( | ||||
| 	QualifiedRule RuleKind = iota | ||||
| 	AtRule | ||||
| ) | ||||
|  | ||||
| // At Rules than have Rules inside their block instead of Declarations | ||||
| var atRulesWithRulesBlock = []string{ | ||||
| 	"@document", "@font-feature-values", "@keyframes", "@media", "@supports", | ||||
| } | ||||
|  | ||||
| // Rule represents a parsed CSS rule | ||||
| type Rule struct { | ||||
| 	Kind RuleKind | ||||
|  | ||||
| 	// At Rule name (eg: "@media") | ||||
| 	Name string | ||||
|  | ||||
| 	// Raw prelude | ||||
| 	Prelude string | ||||
|  | ||||
| 	// Qualified Rule selectors parsed from prelude | ||||
| 	Selectors []string | ||||
|  | ||||
| 	// Style properties | ||||
| 	Declarations []*Declaration | ||||
|  | ||||
| 	// At Rule embedded rules | ||||
| 	Rules []*Rule | ||||
|  | ||||
| 	// Current rule embedding level | ||||
| 	EmbedLevel int | ||||
| } | ||||
|  | ||||
| // NewRule instanciates a new Rule | ||||
| func NewRule(kind RuleKind) *Rule { | ||||
| 	return &Rule{ | ||||
| 		Kind: kind, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Returns string representation of rule kind | ||||
| func (kind RuleKind) String() string { | ||||
| 	switch kind { | ||||
| 	case QualifiedRule: | ||||
| 		return "Qualified Rule" | ||||
| 	case AtRule: | ||||
| 		return "At Rule" | ||||
| 	default: | ||||
| 		return "WAT" | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // EmbedsRules returns true if this rule embeds another rules | ||||
| func (rule *Rule) EmbedsRules() bool { | ||||
| 	if rule.Kind == AtRule { | ||||
| 		for _, atRuleName := range atRulesWithRulesBlock { | ||||
| 			if rule.Name == atRuleName { | ||||
| 				return true | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // Equal returns true if both rules are equals | ||||
| func (rule *Rule) Equal(other *Rule) bool { | ||||
| 	if (rule.Kind != other.Kind) || | ||||
| 		(rule.Prelude != other.Prelude) || | ||||
| 		(rule.Name != other.Name) { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	if (len(rule.Selectors) != len(other.Selectors)) || | ||||
| 		(len(rule.Declarations) != len(other.Declarations)) || | ||||
| 		(len(rule.Rules) != len(other.Rules)) { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	for i, sel := range rule.Selectors { | ||||
| 		if sel != other.Selectors[i] { | ||||
| 			return false | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	for i, decl := range rule.Declarations { | ||||
| 		if !decl.Equal(other.Declarations[i]) { | ||||
| 			return false | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	for i, rule := range rule.Rules { | ||||
| 		if !rule.Equal(other.Rules[i]) { | ||||
| 			return false | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| // Diff returns a string representation of rules differences | ||||
| func (rule *Rule) Diff(other *Rule) []string { | ||||
| 	result := []string{} | ||||
|  | ||||
| 	if rule.Kind != other.Kind { | ||||
| 		result = append(result, fmt.Sprintf("Kind: %s | %s", rule.Kind.String(), other.Kind.String())) | ||||
| 	} | ||||
|  | ||||
| 	if rule.Prelude != other.Prelude { | ||||
| 		result = append(result, fmt.Sprintf("Prelude: \"%s\" | \"%s\"", rule.Prelude, other.Prelude)) | ||||
| 	} | ||||
|  | ||||
| 	if rule.Name != other.Name { | ||||
| 		result = append(result, fmt.Sprintf("Name: \"%s\" | \"%s\"", rule.Name, other.Name)) | ||||
| 	} | ||||
|  | ||||
| 	if len(rule.Selectors) != len(other.Selectors) { | ||||
| 		result = append(result, fmt.Sprintf("Selectors: %v | %v", strings.Join(rule.Selectors, ", "), strings.Join(other.Selectors, ", "))) | ||||
| 	} else { | ||||
| 		for i, sel := range rule.Selectors { | ||||
| 			if sel != other.Selectors[i] { | ||||
| 				result = append(result, fmt.Sprintf("Selector: \"%s\" | \"%s\"", sel, other.Selectors[i])) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if len(rule.Declarations) != len(other.Declarations) { | ||||
| 		result = append(result, fmt.Sprintf("Declarations Nb: %d | %d", len(rule.Declarations), len(other.Declarations))) | ||||
| 	} else { | ||||
| 		for i, decl := range rule.Declarations { | ||||
| 			if !decl.Equal(other.Declarations[i]) { | ||||
| 				result = append(result, fmt.Sprintf("Declaration: \"%s\" | \"%s\"", decl.String(), other.Declarations[i].String())) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if len(rule.Rules) != len(other.Rules) { | ||||
| 		result = append(result, fmt.Sprintf("Rules Nb: %d | %d", len(rule.Rules), len(other.Rules))) | ||||
| 	} else { | ||||
|  | ||||
| 		for i, rule := range rule.Rules { | ||||
| 			if !rule.Equal(other.Rules[i]) { | ||||
| 				result = append(result, fmt.Sprintf("Rule: \"%s\" | \"%s\"", rule.String(), other.Rules[i].String())) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Returns the string representation of a rule | ||||
| func (rule *Rule) String() string { | ||||
| 	result := "" | ||||
|  | ||||
| 	if rule.Kind == QualifiedRule { | ||||
| 		for i, sel := range rule.Selectors { | ||||
| 			if i != 0 { | ||||
| 				result += ", " | ||||
| 			} | ||||
| 			result += sel | ||||
| 		} | ||||
| 	} else { | ||||
| 		// AtRule | ||||
| 		result += fmt.Sprintf("%s", rule.Name) | ||||
|  | ||||
| 		if rule.Prelude != "" { | ||||
| 			if result != "" { | ||||
| 				result += " " | ||||
| 			} | ||||
| 			result += fmt.Sprintf("%s", rule.Prelude) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if (len(rule.Declarations) == 0) && (len(rule.Rules) == 0) { | ||||
| 		result += ";" | ||||
| 	} else { | ||||
| 		result += " {\n" | ||||
|  | ||||
| 		if rule.EmbedsRules() { | ||||
| 			for _, subRule := range rule.Rules { | ||||
| 				result += fmt.Sprintf("%s%s\n", rule.indent(), subRule.String()) | ||||
| 			} | ||||
| 		} else { | ||||
| 			for _, decl := range rule.Declarations { | ||||
| 				result += fmt.Sprintf("%s%s\n", rule.indent(), decl.String()) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		result += fmt.Sprintf("%s}", rule.indentEndBlock()) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Returns identation spaces for declarations and rules | ||||
| func (rule *Rule) indent() string { | ||||
| 	result := "" | ||||
|  | ||||
| 	for i := 0; i < ((rule.EmbedLevel + 1) * indentSpace); i++ { | ||||
| 		result += " " | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Returns identation spaces for end of block character | ||||
| func (rule *Rule) indentEndBlock() string { | ||||
| 	result := "" | ||||
|  | ||||
| 	for i := 0; i < (rule.EmbedLevel * indentSpace); i++ { | ||||
| 		result += " " | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
							
								
								
									
										25
									
								
								vendor/github.com/aymerick/douceur/css/stylesheet.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								vendor/github.com/aymerick/douceur/css/stylesheet.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,25 @@ | ||||
| package css | ||||
|  | ||||
| // Stylesheet represents a parsed stylesheet | ||||
| type Stylesheet struct { | ||||
| 	Rules []*Rule | ||||
| } | ||||
|  | ||||
| // NewStylesheet instanciate a new Stylesheet | ||||
| func NewStylesheet() *Stylesheet { | ||||
| 	return &Stylesheet{} | ||||
| } | ||||
|  | ||||
| // Returns string representation of the Stylesheet | ||||
| func (sheet *Stylesheet) String() string { | ||||
| 	result := "" | ||||
|  | ||||
| 	for _, rule := range sheet.Rules { | ||||
| 		if result != "" { | ||||
| 			result += "\n" | ||||
| 		} | ||||
| 		result += rule.String() | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
							
								
								
									
										26
									
								
								vendor/github.com/charmbracelet/glamour/.golangci.yml
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								vendor/github.com/charmbracelet/glamour/.golangci.yml
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| run: | ||||
|   tests: false | ||||
|  | ||||
| issues: | ||||
|   max-issues-per-linter: 0 | ||||
|   max-same-issues: 0 | ||||
|  | ||||
| linters: | ||||
|   enable: | ||||
|     - bodyclose | ||||
|     - dupl | ||||
|     - exportloopref | ||||
|     - goconst | ||||
|     - godot | ||||
|     - godox | ||||
|     - goimports | ||||
|     - gomnd | ||||
|     - goprintffuncname | ||||
|     - gosec | ||||
|     - misspell | ||||
|     - prealloc | ||||
|     - rowserrcheck | ||||
|     - sqlclosecheck | ||||
|     - unconvert | ||||
|     - unparam | ||||
|     - whitespace | ||||
							
								
								
									
										31
									
								
								vendor/github.com/charmbracelet/glamour/README.md
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										31
									
								
								vendor/github.com/charmbracelet/glamour/README.md
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -1,10 +1,13 @@ | ||||
| # Glamour | ||||
|  | ||||
| [](https://github.com/charmbracelet/glamour/releases) | ||||
| [](https://pkg.go.dev/github.com/charmbracelet/glamour?tab=doc) | ||||
| [](https://github.com/charmbracelet/glamour/actions) | ||||
| [](https://coveralls.io/github/charmbracelet/glamour?branch=master) | ||||
| [](http://goreportcard.com/report/charmbracelet/glamour) | ||||
| <p> | ||||
|     <img src="https://stuff.charm.sh/glamour/glamour-github-header.png" width="245" alt="Glamour Title Treatment"><br> | ||||
|     <a href="https://github.com/charmbracelet/glamour/releases"><img src="https://img.shields.io/github/release/charmbracelet/glamour.svg" alt="Latest Release"></a> | ||||
|     <a href="https://pkg.go.dev/github.com/charmbracelet/glamour?tab=doc"><img src="https://godoc.org/github.com/golang/gddo?status.svg" alt="GoDoc"></a> | ||||
|     <a href="https://github.com/charmbracelet/glamour/actions"><img src="https://github.com/charmbracelet/glamour/workflows/build/badge.svg" alt="Build Status"></a> | ||||
|     <a href="https://coveralls.io/github/charmbracelet/glamour?branch=master"><img src="https://coveralls.io/repos/github/charmbracelet/glamour/badge.svg?branch=master" alt="Coverage Status"></a> | ||||
|     <a href="http://goreportcard.com/report/charmbracelet/glamour"><img src="http://goreportcard.com/badge/charmbracelet/glamour" alt="Go ReportCard"></a> | ||||
| </p> | ||||
|  | ||||
| Write handsome command-line tools with *glamour*! | ||||
|  | ||||
| @@ -63,10 +66,22 @@ There are a few options for using a custom style: | ||||
|  | ||||
| ## Glamourous Projects | ||||
|  | ||||
| Check out [Glow](https://github.com/charmbracelet/glow), a markdown renderer for | ||||
| the command-line, which uses `glamour`. | ||||
|  | ||||
| Check out these projects, which use `glamour`: | ||||
| - [Glow](https://github.com/charmbracelet/glow), a markdown renderer for | ||||
| the command-line. | ||||
| - [GitHub CLI](https://github.com/cli/cli), GitHub’s official command line tool. | ||||
| - [GLab](https://github.com/profclems/glab), An open source GitLab command line tool. | ||||
|  | ||||
| ## License | ||||
|  | ||||
| [MIT](https://github.com/charmbracelet/glamour/raw/master/LICENSE) | ||||
|  | ||||
|  | ||||
| *** | ||||
|  | ||||
| Part of [Charm](https://charm.sh). | ||||
|  | ||||
| <a href="https://charm.sh/"><img alt="the Charm logo" src="https://stuff.charm.sh/charm-badge.jpg" width="400"></a> | ||||
|  | ||||
| Charm热爱开源! / Charm loves open source! | ||||
|  | ||||
|   | ||||
							
								
								
									
										9
									
								
								vendor/github.com/charmbracelet/glamour/ansi/elements.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								vendor/github.com/charmbracelet/glamour/ansi/elements.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -7,6 +7,7 @@ import ( | ||||
| 	"io" | ||||
| 	"strings" | ||||
|  | ||||
| 	east "github.com/yuin/goldmark-emoji/ast" | ||||
| 	"github.com/yuin/goldmark/ast" | ||||
| 	astext "github.com/yuin/goldmark/extension/ast" | ||||
| ) | ||||
| @@ -375,6 +376,14 @@ func (tr *ANSIRenderer) NewElement(node ast.Node, source []byte) Element { | ||||
| 	case ast.KindTextBlock: | ||||
| 		return Element{} | ||||
|  | ||||
| 	case east.KindEmoji: | ||||
| 		n := node.(*east.Emoji) | ||||
| 		return Element{ | ||||
| 			Renderer: &BaseElement{ | ||||
| 				Token: string(n.Value.Unicode), | ||||
| 			}, | ||||
| 		} | ||||
|  | ||||
| 	// Unknown case | ||||
| 	default: | ||||
| 		fmt.Println("Warning: unhandled element", node.Kind().String()) | ||||
|   | ||||
							
								
								
									
										2
									
								
								vendor/github.com/charmbracelet/glamour/ansi/image.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/charmbracelet/glamour/ansi/image.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -25,7 +25,7 @@ func (e *ImageElement) Render(w io.Writer, ctx RenderContext) error { | ||||
| 	} | ||||
| 	if len(e.URL) > 0 { | ||||
| 		el := &BaseElement{ | ||||
| 			Token:  resolveRelativeURL(e.BaseURL, e.URL), | ||||
| 			Token:  resolveURL(e.BaseURL, e.URL), | ||||
| 			Prefix: " ", | ||||
| 			Style:  ctx.options.Styles.Image, | ||||
| 		} | ||||
|   | ||||
							
								
								
									
										2
									
								
								vendor/github.com/charmbracelet/glamour/ansi/link.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/charmbracelet/glamour/ansi/link.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -64,7 +64,7 @@ func (e *LinkElement) Render(w io.Writer, ctx RenderContext) error { | ||||
| 		} | ||||
|  | ||||
| 		el := &BaseElement{ | ||||
| 			Token:  resolveRelativeURL(e.BaseURL, e.URL), | ||||
| 			Token:  resolveURL(e.BaseURL, e.URL), | ||||
| 			Prefix: pre, | ||||
| 			Style:  style, | ||||
| 		} | ||||
|   | ||||
							
								
								
									
										10
									
								
								vendor/github.com/charmbracelet/glamour/ansi/renderer.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								vendor/github.com/charmbracelet/glamour/ansi/renderer.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -3,9 +3,9 @@ package ansi | ||||
| import ( | ||||
| 	"io" | ||||
| 	"net/url" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/muesli/termenv" | ||||
| 	east "github.com/yuin/goldmark-emoji/ast" | ||||
| 	"github.com/yuin/goldmark/ast" | ||||
| 	astext "github.com/yuin/goldmark/extension/ast" | ||||
| 	"github.com/yuin/goldmark/renderer" | ||||
| @@ -72,13 +72,16 @@ func (r *ANSIRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) { | ||||
| 	reg.Register(astext.KindFootnote, r.renderNode) | ||||
| 	reg.Register(astext.KindFootnoteList, r.renderNode) | ||||
| 	reg.Register(astext.KindFootnoteLink, r.renderNode) | ||||
| 	reg.Register(astext.KindFootnoteBackLink, r.renderNode) | ||||
| 	reg.Register(astext.KindFootnoteBacklink, r.renderNode) | ||||
|  | ||||
| 	// checkboxes | ||||
| 	reg.Register(astext.KindTaskCheckBox, r.renderNode) | ||||
|  | ||||
| 	// strikethrough | ||||
| 	reg.Register(astext.KindStrikethrough, r.renderNode) | ||||
|  | ||||
| 	// emoji | ||||
| 	reg.Register(east.KindEmoji, r.renderNode) | ||||
| } | ||||
|  | ||||
| func (r *ANSIRenderer) renderNode(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { | ||||
| @@ -145,7 +148,7 @@ func isChild(node ast.Node) bool { | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func resolveRelativeURL(baseURL string, rel string) string { | ||||
| func resolveURL(baseURL string, rel string) string { | ||||
| 	u, err := url.Parse(rel) | ||||
| 	if err != nil { | ||||
| 		return rel | ||||
| @@ -153,7 +156,6 @@ func resolveRelativeURL(baseURL string, rel string) string { | ||||
| 	if u.IsAbs() { | ||||
| 		return rel | ||||
| 	} | ||||
| 	u.Path = strings.TrimPrefix(u.Path, "/") | ||||
|  | ||||
| 	base, err := url.Parse(baseURL) | ||||
| 	if err != nil { | ||||
|   | ||||
							
								
								
									
										22
									
								
								vendor/github.com/charmbracelet/glamour/glamour.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								vendor/github.com/charmbracelet/glamour/glamour.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -9,6 +9,7 @@ import ( | ||||
|  | ||||
| 	"github.com/muesli/termenv" | ||||
| 	"github.com/yuin/goldmark" | ||||
| 	emoji "github.com/yuin/goldmark-emoji" | ||||
| 	"github.com/yuin/goldmark/extension" | ||||
| 	"github.com/yuin/goldmark/parser" | ||||
| 	"github.com/yuin/goldmark/renderer" | ||||
| @@ -135,20 +136,17 @@ func WithEnvironmentConfig() TermRendererOption { | ||||
| // standard style. | ||||
| func WithStylePath(stylePath string) TermRendererOption { | ||||
| 	return func(tr *TermRenderer) error { | ||||
| 		jsonBytes, err := ioutil.ReadFile(stylePath) | ||||
| 		switch { | ||||
| 		case err == nil: | ||||
| 			return json.Unmarshal(jsonBytes, &tr.ansiOptions.Styles) | ||||
| 		case os.IsNotExist(err): | ||||
| 		styles, err := getDefaultStyle(stylePath) | ||||
| 		if err != nil { | ||||
| 			jsonBytes, err := ioutil.ReadFile(stylePath) | ||||
| 			if err != nil { | ||||
| 				return err | ||||
| 			} | ||||
|  | ||||
| 			return json.Unmarshal(jsonBytes, &tr.ansiOptions.Styles) | ||||
| 		} | ||||
| 		tr.ansiOptions.Styles = *styles | ||||
| 		return nil | ||||
| 		default: | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -187,6 +185,14 @@ func WithWordWrap(wordWrap int) TermRendererOption { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // WithEmoji sets a TermRenderer's emoji rendering. | ||||
| func WithEmoji() TermRendererOption { | ||||
| 	return func(tr *TermRenderer) error { | ||||
| 		emoji.New().Extend(tr.md) | ||||
| 		return nil | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (tr *TermRenderer) Read(b []byte) (int, error) { | ||||
| 	return tr.renderBuf.Read(b) | ||||
| } | ||||
|   | ||||
							
								
								
									
										11
									
								
								vendor/github.com/charmbracelet/glamour/go.mod
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								vendor/github.com/charmbracelet/glamour/go.mod
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -3,10 +3,11 @@ module github.com/charmbracelet/glamour | ||||
| go 1.13 | ||||
|  | ||||
| require ( | ||||
| 	github.com/alecthomas/chroma v0.7.3 | ||||
| 	github.com/microcosm-cc/bluemonday v1.0.2 | ||||
| 	github.com/muesli/reflow v0.1.0 | ||||
| 	github.com/muesli/termenv v0.6.0 | ||||
| 	github.com/alecthomas/chroma v0.8.1 | ||||
| 	github.com/microcosm-cc/bluemonday v1.0.4 | ||||
| 	github.com/muesli/reflow v0.2.0 | ||||
| 	github.com/muesli/termenv v0.7.4 | ||||
| 	github.com/olekukonko/tablewriter v0.0.4 | ||||
| 	github.com/yuin/goldmark v1.2.0 | ||||
| 	github.com/yuin/goldmark v1.3.1 | ||||
| 	github.com/yuin/goldmark-emoji v1.0.1 | ||||
| ) | ||||
|   | ||||
							
								
								
									
										32
									
								
								vendor/github.com/charmbracelet/glamour/go.sum
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								vendor/github.com/charmbracelet/glamour/go.sum
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -1,12 +1,16 @@ | ||||
| github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U= | ||||
| github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= | ||||
| github.com/alecthomas/chroma v0.7.3 h1:NfdAERMy+esYQs8OXk0I868/qDxxCEo7FMz1WIqMAeI= | ||||
| github.com/alecthomas/chroma v0.7.3/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM= | ||||
| github.com/alecthomas/chroma v0.8.1 h1:ym20sbvyC6RXz45u4qDglcgr8E313oPROshcuCHqiEE= | ||||
| github.com/alecthomas/chroma v0.8.1/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM= | ||||
| github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo= | ||||
| github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0= | ||||
| github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE= | ||||
| github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY= | ||||
| github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ= | ||||
| github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= | ||||
| github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= | ||||
| github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU= | ||||
| github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE= | ||||
| github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ= | ||||
| github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk= | ||||
| github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||
| @@ -14,8 +18,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c | ||||
| github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | ||||
| github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk= | ||||
| github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= | ||||
| github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f h1:5CjVwnuUcp5adK4gmY6i72gpVFVnZDP2h5TmPScB6u4= | ||||
| github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f/go.mod h1:nOFQdrUlIlx6M6ODdSpBj1NVA+VgLC6kmw60mkw34H4= | ||||
| github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= | ||||
| github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= | ||||
| github.com/lucasb-eyer/go-colorful v1.0.3 h1:QIbQXiugsb+q10B+MI+7DI1oQLdmnep86tWFlaaUAac= | ||||
| github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= | ||||
| github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= | ||||
| @@ -25,12 +29,12 @@ github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+tw | ||||
| github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= | ||||
| github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= | ||||
| github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= | ||||
| github.com/microcosm-cc/bluemonday v1.0.2 h1:5lPfLTTAvAbtS0VqT+94yOtFnGfUWYyx0+iToC3Os3s= | ||||
| github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= | ||||
| github.com/muesli/reflow v0.1.0 h1:oQdpLfO56lr5pgLvqD0TcjW85rDjSYSBVdiG1Ch1ddM= | ||||
| github.com/muesli/reflow v0.1.0/go.mod h1:I9bWAt7QTg/que/qmUCJBGlj7wEq8OAFBjPNjc6xK4I= | ||||
| github.com/muesli/termenv v0.6.0 h1:zxvzTBmo4ZcxhNGGWeMz+Tttm51eF5bmPjfy4MCRYlk= | ||||
| github.com/muesli/termenv v0.6.0/go.mod h1:SohX91w6swWA4AYU+QmPx+aSgXhWO0juiyID9UZmbpA= | ||||
| github.com/microcosm-cc/bluemonday v1.0.4 h1:p0L+CTpo/PLFdkoPcJemLXG+fpMD7pYOoDEq1axMbGg= | ||||
| github.com/microcosm-cc/bluemonday v1.0.4/go.mod h1:8iwZnFn2CDDNZ0r6UXhF4xawGvzaqzCRa1n3/lO3W2w= | ||||
| github.com/muesli/reflow v0.2.0 h1:2o0UBJPHHH4fa2GCXU4Rg4DwOtWPMekCeyc5EWbAQp0= | ||||
| github.com/muesli/reflow v0.2.0/go.mod h1:qT22vjVmM9MIUeLgsVYe/Ye7eZlbv9dZjL3dVhUqLX8= | ||||
| github.com/muesli/termenv v0.7.4 h1:/pBqvU5CpkY53tU0vVn+xgs2ZTX63aH5nY+SSps5Xa8= | ||||
| github.com/muesli/termenv v0.7.4/go.mod h1:pZ7qY9l3F7e5xsAOS0zCew2tME+p7bWeBkotCEcIIcc= | ||||
| github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8= | ||||
| github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA= | ||||
| github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= | ||||
| @@ -43,8 +47,12 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ | ||||
| github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= | ||||
| github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= | ||||
| github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | ||||
| github.com/yuin/goldmark v1.2.0 h1:WOOcyaJPlzb8fZ8TloxFe8QZkhOOJx87leDa9MIT9dc= | ||||
| github.com/yuin/goldmark v1.2.0/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
| github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM= | ||||
| github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
| github.com/yuin/goldmark v1.3.1 h1:eVwehsLsZlCJCwXyGLgg+Q4iFWE/eTIMG0e8waCmm/I= | ||||
| github.com/yuin/goldmark v1.3.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= | ||||
| github.com/yuin/goldmark-emoji v1.0.1 h1:ctuWEyzGBwiucEqxzwe0SOYDXPAucOrE9NQC18Wa1os= | ||||
| github.com/yuin/goldmark-emoji v1.0.1/go.mod h1:2w1E6FEWLcDQkoTE+7HU6QF1F6SLlNGjRIBbIZQFqkQ= | ||||
| golang.org/x/net v0.0.0-20181220203305-927f97764cc3 h1:eH6Eip3UpmR+yM/qI9Ijluzb1bNv/cAU/n+6l8tRSis= | ||||
| golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | ||||
| golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg= | ||||
|   | ||||
							
								
								
									
										22
									
								
								vendor/github.com/chris-ramon/douceur/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								vendor/github.com/chris-ramon/douceur/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| The MIT License (MIT) | ||||
|  | ||||
| Copyright (c) 2015 Aymerick JEHANNE | ||||
|  | ||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| of this software and associated documentation files (the "Software"), to deal | ||||
| in the Software without restriction, including without limitation the rights | ||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| copies of the Software, and to permit persons to whom the Software is | ||||
| furnished to do so, subject to the following conditions: | ||||
|  | ||||
| The above copyright notice and this permission notice shall be included in all | ||||
| copies or substantial portions of the Software. | ||||
|  | ||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||||
| SOFTWARE. | ||||
|  | ||||
							
								
								
									
										409
									
								
								vendor/github.com/chris-ramon/douceur/parser/parser.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										409
									
								
								vendor/github.com/chris-ramon/douceur/parser/parser.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,409 @@ | ||||
| package parser | ||||
|  | ||||
| import ( | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"regexp" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/gorilla/css/scanner" | ||||
|  | ||||
| 	"github.com/aymerick/douceur/css" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	importantSuffixRegexp = `(?i)\s*!important\s*$` | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	importantRegexp *regexp.Regexp | ||||
| ) | ||||
|  | ||||
| // Parser represents a CSS parser | ||||
| type Parser struct { | ||||
| 	scan *scanner.Scanner // Tokenizer | ||||
|  | ||||
| 	// Tokens parsed but not consumed yet | ||||
| 	tokens []*scanner.Token | ||||
|  | ||||
| 	// Rule embedding level | ||||
| 	embedLevel int | ||||
| } | ||||
|  | ||||
| func init() { | ||||
| 	importantRegexp = regexp.MustCompile(importantSuffixRegexp) | ||||
| } | ||||
|  | ||||
| // NewParser instanciates a new parser | ||||
| func NewParser(txt string) *Parser { | ||||
| 	return &Parser{ | ||||
| 		scan: scanner.New(txt), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Parse parses a whole stylesheet | ||||
| func Parse(text string) (*css.Stylesheet, error) { | ||||
| 	result, err := NewParser(text).ParseStylesheet() | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return result, nil | ||||
| } | ||||
|  | ||||
| // ParseDeclarations parses CSS declarations | ||||
| func ParseDeclarations(text string) ([]*css.Declaration, error) { | ||||
| 	result, err := NewParser(text).ParseDeclarations() | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return result, nil | ||||
| } | ||||
|  | ||||
| // ParseStylesheet parses a stylesheet | ||||
| func (parser *Parser) ParseStylesheet() (*css.Stylesheet, error) { | ||||
| 	result := css.NewStylesheet() | ||||
|  | ||||
| 	// Parse BOM | ||||
| 	if _, err := parser.parseBOM(); err != nil { | ||||
| 		return result, err | ||||
| 	} | ||||
|  | ||||
| 	// Parse list of rules | ||||
| 	rules, err := parser.ParseRules() | ||||
| 	if err != nil { | ||||
| 		return result, err | ||||
| 	} | ||||
|  | ||||
| 	result.Rules = rules | ||||
|  | ||||
| 	return result, nil | ||||
| } | ||||
|  | ||||
| // ParseRules parses a list of rules | ||||
| func (parser *Parser) ParseRules() ([]*css.Rule, error) { | ||||
| 	result := []*css.Rule{} | ||||
|  | ||||
| 	inBlock := false | ||||
| 	if parser.tokenChar("{") { | ||||
| 		// parsing a block of rules | ||||
| 		inBlock = true | ||||
| 		parser.embedLevel++ | ||||
|  | ||||
| 		parser.shiftToken() | ||||
| 	} | ||||
|  | ||||
| 	for parser.tokenParsable() { | ||||
| 		if parser.tokenIgnorable() { | ||||
| 			parser.shiftToken() | ||||
| 		} else if parser.tokenChar("}") { | ||||
| 			if !inBlock { | ||||
| 				errMsg := fmt.Sprintf("Unexpected } character: %s", parser.nextToken().String()) | ||||
| 				return result, errors.New(errMsg) | ||||
| 			} | ||||
|  | ||||
| 			parser.shiftToken() | ||||
| 			parser.embedLevel-- | ||||
|  | ||||
| 			// finished | ||||
| 			break | ||||
| 		} else { | ||||
| 			rule, err := parser.ParseRule() | ||||
| 			if err != nil { | ||||
| 				return result, err | ||||
| 			} | ||||
|  | ||||
| 			rule.EmbedLevel = parser.embedLevel | ||||
| 			result = append(result, rule) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result, parser.err() | ||||
| } | ||||
|  | ||||
| // ParseRule parses a rule | ||||
| func (parser *Parser) ParseRule() (*css.Rule, error) { | ||||
| 	if parser.tokenAtKeyword() { | ||||
| 		return parser.parseAtRule() | ||||
| 	} | ||||
|  | ||||
| 	return parser.parseQualifiedRule() | ||||
| } | ||||
|  | ||||
| // ParseDeclarations parses a list of declarations | ||||
| func (parser *Parser) ParseDeclarations() ([]*css.Declaration, error) { | ||||
| 	result := []*css.Declaration{} | ||||
|  | ||||
| 	if parser.tokenChar("{") { | ||||
| 		parser.shiftToken() | ||||
| 	} | ||||
|  | ||||
| 	for parser.tokenParsable() { | ||||
| 		if parser.tokenIgnorable() { | ||||
| 			parser.shiftToken() | ||||
| 		} else if parser.tokenChar("}") { | ||||
| 			// end of block | ||||
| 			parser.shiftToken() | ||||
| 			break | ||||
| 		} else { | ||||
| 			declaration, err := parser.ParseDeclaration() | ||||
| 			if err != nil { | ||||
| 				return result, err | ||||
| 			} | ||||
|  | ||||
| 			result = append(result, declaration) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result, parser.err() | ||||
| } | ||||
|  | ||||
| // ParseDeclaration parses a declaration | ||||
| func (parser *Parser) ParseDeclaration() (*css.Declaration, error) { | ||||
| 	result := css.NewDeclaration() | ||||
| 	curValue := "" | ||||
|  | ||||
| 	for parser.tokenParsable() { | ||||
| 		if parser.tokenChar(":") { | ||||
| 			result.Property = strings.TrimSpace(curValue) | ||||
| 			curValue = "" | ||||
|  | ||||
| 			parser.shiftToken() | ||||
| 		} else if parser.tokenChar(";") || parser.tokenChar("}") { | ||||
| 			if result.Property == "" { | ||||
| 				errMsg := fmt.Sprintf("Unexpected ; character: %s", parser.nextToken().String()) | ||||
| 				return result, errors.New(errMsg) | ||||
| 			} | ||||
|  | ||||
| 			if importantRegexp.MatchString(curValue) { | ||||
| 				result.Important = true | ||||
| 				curValue = importantRegexp.ReplaceAllString(curValue, "") | ||||
| 			} | ||||
|  | ||||
| 			result.Value = strings.TrimSpace(curValue) | ||||
|  | ||||
| 			if parser.tokenChar(";") { | ||||
| 				parser.shiftToken() | ||||
| 			} | ||||
|  | ||||
| 			// finished | ||||
| 			break | ||||
| 		} else { | ||||
| 			token := parser.shiftToken() | ||||
| 			curValue += token.Value | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	// log.Printf("[parsed] Declaration: %s", result.String()) | ||||
|  | ||||
| 	return result, parser.err() | ||||
| } | ||||
|  | ||||
| // Parse an At Rule | ||||
| func (parser *Parser) parseAtRule() (*css.Rule, error) { | ||||
| 	// parse rule name (eg: "@import") | ||||
| 	token := parser.shiftToken() | ||||
|  | ||||
| 	result := css.NewRule(css.AtRule) | ||||
| 	result.Name = token.Value | ||||
|  | ||||
| 	for parser.tokenParsable() { | ||||
| 		if parser.tokenChar(";") { | ||||
| 			parser.shiftToken() | ||||
|  | ||||
| 			// finished | ||||
| 			break | ||||
| 		} else if parser.tokenChar("{") { | ||||
| 			if result.EmbedsRules() { | ||||
| 				// parse rules block | ||||
| 				rules, err := parser.ParseRules() | ||||
| 				if err != nil { | ||||
| 					return result, err | ||||
| 				} | ||||
|  | ||||
| 				result.Rules = rules | ||||
| 			} else { | ||||
| 				// parse declarations block | ||||
| 				declarations, err := parser.ParseDeclarations() | ||||
| 				if err != nil { | ||||
| 					return result, err | ||||
| 				} | ||||
|  | ||||
| 				result.Declarations = declarations | ||||
| 			} | ||||
|  | ||||
| 			// finished | ||||
| 			break | ||||
| 		} else { | ||||
| 			// parse prelude | ||||
| 			prelude, err := parser.parsePrelude() | ||||
| 			if err != nil { | ||||
| 				return result, err | ||||
| 			} | ||||
|  | ||||
| 			result.Prelude = prelude | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	// log.Printf("[parsed] Rule: %s", result.String()) | ||||
|  | ||||
| 	return result, parser.err() | ||||
| } | ||||
|  | ||||
| // Parse a Qualified Rule | ||||
| func (parser *Parser) parseQualifiedRule() (*css.Rule, error) { | ||||
| 	result := css.NewRule(css.QualifiedRule) | ||||
|  | ||||
| 	for parser.tokenParsable() { | ||||
| 		if parser.tokenChar("{") { | ||||
| 			if result.Prelude == "" { | ||||
| 				errMsg := fmt.Sprintf("Unexpected { character: %s", parser.nextToken().String()) | ||||
| 				return result, errors.New(errMsg) | ||||
| 			} | ||||
|  | ||||
| 			// parse declarations block | ||||
| 			declarations, err := parser.ParseDeclarations() | ||||
| 			if err != nil { | ||||
| 				return result, err | ||||
| 			} | ||||
|  | ||||
| 			result.Declarations = declarations | ||||
|  | ||||
| 			// finished | ||||
| 			break | ||||
| 		} else { | ||||
| 			// parse prelude | ||||
| 			prelude, err := parser.parsePrelude() | ||||
| 			if err != nil { | ||||
| 				return result, err | ||||
| 			} | ||||
|  | ||||
| 			result.Prelude = prelude | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	result.Selectors = strings.Split(result.Prelude, ",") | ||||
| 	for i, sel := range result.Selectors { | ||||
| 		result.Selectors[i] = strings.TrimSpace(sel) | ||||
| 	} | ||||
|  | ||||
| 	// log.Printf("[parsed] Rule: %s", result.String()) | ||||
|  | ||||
| 	return result, parser.err() | ||||
| } | ||||
|  | ||||
| // Parse Rule prelude | ||||
| func (parser *Parser) parsePrelude() (string, error) { | ||||
| 	result := "" | ||||
|  | ||||
| 	for parser.tokenParsable() && !parser.tokenEndOfPrelude() { | ||||
| 		token := parser.shiftToken() | ||||
| 		result += token.Value | ||||
| 	} | ||||
|  | ||||
| 	result = strings.TrimSpace(result) | ||||
|  | ||||
| 	// log.Printf("[parsed] prelude: %s", result) | ||||
|  | ||||
| 	return result, parser.err() | ||||
| } | ||||
|  | ||||
| // Parse BOM | ||||
| func (parser *Parser) parseBOM() (bool, error) { | ||||
| 	if parser.nextToken().Type == scanner.TokenBOM { | ||||
| 		parser.shiftToken() | ||||
| 		return true, nil | ||||
| 	} | ||||
|  | ||||
| 	return false, parser.err() | ||||
| } | ||||
|  | ||||
| // Returns next token without removing it from tokens buffer | ||||
| func (parser *Parser) nextToken() *scanner.Token { | ||||
| 	if len(parser.tokens) == 0 { | ||||
| 		// fetch next token | ||||
| 		nextToken := parser.scan.Next() | ||||
|  | ||||
| 		// log.Printf("[token] %s => %v", nextToken.Type.String(), nextToken.Value) | ||||
|  | ||||
| 		// queue it | ||||
| 		parser.tokens = append(parser.tokens, nextToken) | ||||
| 	} | ||||
|  | ||||
| 	return parser.tokens[0] | ||||
| } | ||||
|  | ||||
| // Returns next token and remove it from the tokens buffer | ||||
| func (parser *Parser) shiftToken() *scanner.Token { | ||||
| 	var result *scanner.Token | ||||
|  | ||||
| 	result, parser.tokens = parser.tokens[0], parser.tokens[1:] | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Returns tokenizer error, or nil if no error | ||||
| func (parser *Parser) err() error { | ||||
| 	if parser.tokenError() { | ||||
| 		token := parser.nextToken() | ||||
| 		return fmt.Errorf("Tokenizer error: %s", token.String()) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // Returns true if next token is Error | ||||
| func (parser *Parser) tokenError() bool { | ||||
| 	return parser.nextToken().Type == scanner.TokenError | ||||
| } | ||||
|  | ||||
| // Returns true if next token is EOF | ||||
| func (parser *Parser) tokenEOF() bool { | ||||
| 	return parser.nextToken().Type == scanner.TokenEOF | ||||
| } | ||||
|  | ||||
| // Returns true if next token is a whitespace | ||||
| func (parser *Parser) tokenWS() bool { | ||||
| 	return parser.nextToken().Type == scanner.TokenS | ||||
| } | ||||
|  | ||||
| // Returns true if next token is a comment | ||||
| func (parser *Parser) tokenComment() bool { | ||||
| 	return parser.nextToken().Type == scanner.TokenComment | ||||
| } | ||||
|  | ||||
| // Returns true if next token is a CDO or a CDC | ||||
| func (parser *Parser) tokenCDOorCDC() bool { | ||||
| 	switch parser.nextToken().Type { | ||||
| 	case scanner.TokenCDO, scanner.TokenCDC: | ||||
| 		return true | ||||
| 	default: | ||||
| 		return false | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Returns true if next token is ignorable | ||||
| func (parser *Parser) tokenIgnorable() bool { | ||||
| 	return parser.tokenWS() || parser.tokenComment() || parser.tokenCDOorCDC() | ||||
| } | ||||
|  | ||||
| // Returns true if next token is parsable | ||||
| func (parser *Parser) tokenParsable() bool { | ||||
| 	return !parser.tokenEOF() && !parser.tokenError() | ||||
| } | ||||
|  | ||||
| // Returns true if next token is an At Rule keyword | ||||
| func (parser *Parser) tokenAtKeyword() bool { | ||||
| 	return parser.nextToken().Type == scanner.TokenAtKeyword | ||||
| } | ||||
|  | ||||
| // Returns true if next token is given character | ||||
| func (parser *Parser) tokenChar(value string) bool { | ||||
| 	token := parser.nextToken() | ||||
| 	return (token.Type == scanner.TokenChar) && (token.Value == value) | ||||
| } | ||||
|  | ||||
| // Returns true if next token marks the end of a prelude | ||||
| func (parser *Parser) tokenEndOfPrelude() bool { | ||||
| 	return parser.tokenChar(";") || parser.tokenChar("{") | ||||
| } | ||||
							
								
								
									
										27
									
								
								vendor/github.com/gorilla/css/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								vendor/github.com/gorilla/css/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,27 @@ | ||||
| Copyright (c) 2013, Gorilla web toolkit | ||||
| All rights reserved. | ||||
|  | ||||
| Redistribution and use in source and binary forms, with or without modification, | ||||
| are permitted provided that the following conditions are met: | ||||
|  | ||||
|   Redistributions of source code must retain the above copyright notice, this | ||||
|   list of conditions and the following disclaimer. | ||||
|  | ||||
|   Redistributions in binary form must reproduce the above copyright notice, this | ||||
|   list of conditions and the following disclaimer in the documentation and/or | ||||
|   other materials provided with the distribution. | ||||
|  | ||||
|   Neither the name of the {organization} nor the names of its | ||||
|   contributors may be used to endorse or promote products derived from | ||||
|   this software without specific prior written permission. | ||||
|  | ||||
| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND | ||||
| ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | ||||
| WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | ||||
| DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR | ||||
| ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | ||||
| (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | ||||
| LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | ||||
| ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||
| (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | ||||
| SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
							
								
								
									
										33
									
								
								vendor/github.com/gorilla/css/scanner/doc.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								vendor/github.com/gorilla/css/scanner/doc.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | ||||
| // Copyright 2012 The Gorilla Authors. All rights reserved. | ||||
| // Use of this source code is governed by a BSD-style | ||||
| // license that can be found in the LICENSE file. | ||||
|  | ||||
| /* | ||||
| Package gorilla/css/scanner generates tokens for a CSS3 input. | ||||
|  | ||||
| It follows the CSS3 specification located at: | ||||
|  | ||||
| 	http://www.w3.org/TR/css3-syntax/ | ||||
|  | ||||
| To use it, create a new scanner for a given CSS string and call Next() until | ||||
| the token returned has type TokenEOF or TokenError: | ||||
|  | ||||
| 	s := scanner.New(myCSS) | ||||
| 	for { | ||||
| 		token := s.Next() | ||||
| 		if token.Type == scanner.TokenEOF || token.Type == scanner.TokenError { | ||||
| 			break | ||||
| 		} | ||||
| 		// Do something with the token... | ||||
| 	} | ||||
|  | ||||
| Following the CSS3 specification, an error can only occur when the scanner | ||||
| finds an unclosed quote or unclosed comment. In these cases the text becomes | ||||
| "untokenizable". Everything else is tokenizable and it is up to a parser | ||||
| to make sense of the token stream (or ignore nonsensical token sequences). | ||||
|  | ||||
| Note: the scanner doesn't perform lexical analysis or, in other words, it | ||||
| doesn't care about the token context. It is intended to be used by a | ||||
| lexer or parser. | ||||
| */ | ||||
| package scanner | ||||
							
								
								
									
										356
									
								
								vendor/github.com/gorilla/css/scanner/scanner.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										356
									
								
								vendor/github.com/gorilla/css/scanner/scanner.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,356 @@ | ||||
| // Copyright 2012 The Gorilla Authors. All rights reserved. | ||||
| // Use of this source code is governed by a BSD-style | ||||
| // license that can be found in the LICENSE file. | ||||
|  | ||||
| package scanner | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"regexp" | ||||
| 	"strings" | ||||
| 	"unicode" | ||||
| 	"unicode/utf8" | ||||
| ) | ||||
|  | ||||
| // tokenType identifies the type of lexical tokens. | ||||
| type tokenType int | ||||
|  | ||||
| // String returns a string representation of the token type. | ||||
| func (t tokenType) String() string { | ||||
| 	return tokenNames[t] | ||||
| } | ||||
|  | ||||
| // Token represents a token and the corresponding string. | ||||
| type Token struct { | ||||
| 	Type   tokenType | ||||
| 	Value  string | ||||
| 	Line   int | ||||
| 	Column int | ||||
| } | ||||
|  | ||||
| // String returns a string representation of the token. | ||||
| func (t *Token) String() string { | ||||
| 	if len(t.Value) > 10 { | ||||
| 		return fmt.Sprintf("%s (line: %d, column: %d): %.10q...", | ||||
| 			t.Type, t.Line, t.Column, t.Value) | ||||
| 	} | ||||
| 	return fmt.Sprintf("%s (line: %d, column: %d): %q", | ||||
| 		t.Type, t.Line, t.Column, t.Value) | ||||
| } | ||||
|  | ||||
| // All tokens ----------------------------------------------------------------- | ||||
|  | ||||
| // The complete list of tokens in CSS3. | ||||
| const ( | ||||
| 	// Scanner flags. | ||||
| 	TokenError tokenType = iota | ||||
| 	TokenEOF | ||||
| 	// From now on, only tokens from the CSS specification. | ||||
| 	TokenIdent | ||||
| 	TokenAtKeyword | ||||
| 	TokenString | ||||
| 	TokenHash | ||||
| 	TokenNumber | ||||
| 	TokenPercentage | ||||
| 	TokenDimension | ||||
| 	TokenURI | ||||
| 	TokenUnicodeRange | ||||
| 	TokenCDO | ||||
| 	TokenCDC | ||||
| 	TokenS | ||||
| 	TokenComment | ||||
| 	TokenFunction | ||||
| 	TokenIncludes | ||||
| 	TokenDashMatch | ||||
| 	TokenPrefixMatch | ||||
| 	TokenSuffixMatch | ||||
| 	TokenSubstringMatch | ||||
| 	TokenChar | ||||
| 	TokenBOM | ||||
| ) | ||||
|  | ||||
| // tokenNames maps tokenType's to their names. Used for conversion to string. | ||||
| var tokenNames = map[tokenType]string{ | ||||
| 	TokenError:          "error", | ||||
| 	TokenEOF:            "EOF", | ||||
| 	TokenIdent:          "IDENT", | ||||
| 	TokenAtKeyword:      "ATKEYWORD", | ||||
| 	TokenString:         "STRING", | ||||
| 	TokenHash:           "HASH", | ||||
| 	TokenNumber:         "NUMBER", | ||||
| 	TokenPercentage:     "PERCENTAGE", | ||||
| 	TokenDimension:      "DIMENSION", | ||||
| 	TokenURI:            "URI", | ||||
| 	TokenUnicodeRange:   "UNICODE-RANGE", | ||||
| 	TokenCDO:            "CDO", | ||||
| 	TokenCDC:            "CDC", | ||||
| 	TokenS:              "S", | ||||
| 	TokenComment:        "COMMENT", | ||||
| 	TokenFunction:       "FUNCTION", | ||||
| 	TokenIncludes:       "INCLUDES", | ||||
| 	TokenDashMatch:      "DASHMATCH", | ||||
| 	TokenPrefixMatch:    "PREFIXMATCH", | ||||
| 	TokenSuffixMatch:    "SUFFIXMATCH", | ||||
| 	TokenSubstringMatch: "SUBSTRINGMATCH", | ||||
| 	TokenChar:           "CHAR", | ||||
| 	TokenBOM:            "BOM", | ||||
| } | ||||
|  | ||||
| // Macros and productions ----------------------------------------------------- | ||||
| // http://www.w3.org/TR/css3-syntax/#tokenization | ||||
|  | ||||
| var macroRegexp = regexp.MustCompile(`\{[a-z]+\}`) | ||||
|  | ||||
| // macros maps macro names to patterns to be expanded. | ||||
| var macros = map[string]string{ | ||||
| 	// must be escaped: `\.+*?()|[]{}^$` | ||||
| 	"ident":      `-?{nmstart}{nmchar}*`, | ||||
| 	"name":       `{nmchar}+`, | ||||
| 	"nmstart":    `[a-zA-Z_]|{nonascii}|{escape}`, | ||||
| 	"nonascii":   "[\u0080-\uD7FF\uE000-\uFFFD\U00010000-\U0010FFFF]", | ||||
| 	"unicode":    `\\[0-9a-fA-F]{1,6}{wc}?`, | ||||
| 	"escape":     "{unicode}|\\\\[\u0020-\u007E\u0080-\uD7FF\uE000-\uFFFD\U00010000-\U0010FFFF]", | ||||
| 	"nmchar":     `[a-zA-Z0-9_-]|{nonascii}|{escape}`, | ||||
| 	"num":        `[0-9]*\.[0-9]+|[0-9]+`, | ||||
| 	"string":     `"(?:{stringchar}|')*"|'(?:{stringchar}|")*'`, | ||||
| 	"stringchar": `{urlchar}|[ ]|\\{nl}`, | ||||
| 	"nl":         `[\n\r\f]|\r\n`, | ||||
| 	"w":          `{wc}*`, | ||||
| 	"wc":         `[\t\n\f\r ]`, | ||||
|  | ||||
| 	// urlchar should accept [(ascii characters minus those that need escaping)|{nonascii}|{escape}] | ||||
| 	// ASCII characters range = `[\u0020-\u007e]` | ||||
| 	// Skip space \u0020 = `[\u0021-\u007e]` | ||||
| 	// Skip quotation mark \0022 = `[\u0021\u0023-\u007e]` | ||||
| 	// Skip apostrophe \u0027 = `[\u0021\u0023-\u0026\u0028-\u007e]` | ||||
| 	// Skip reverse solidus \u005c = `[\u0021\u0023-\u0026\u0028-\u005b\u005d\u007e]` | ||||
| 	// Finally, the left square bracket (\u005b) and right (\u005d) needs escaping themselves | ||||
| 	"urlchar": "[\u0021\u0023-\u0026\u0028-\\\u005b\\\u005d-\u007E]|{nonascii}|{escape}", | ||||
| } | ||||
|  | ||||
| // productions maps the list of tokens to patterns to be expanded. | ||||
| var productions = map[tokenType]string{ | ||||
| 	// Unused regexps (matched using other methods) are commented out. | ||||
| 	TokenIdent:        `{ident}`, | ||||
| 	TokenAtKeyword:    `@{ident}`, | ||||
| 	TokenString:       `{string}`, | ||||
| 	TokenHash:         `#{name}`, | ||||
| 	TokenNumber:       `{num}`, | ||||
| 	TokenPercentage:   `{num}%`, | ||||
| 	TokenDimension:    `{num}{ident}`, | ||||
| 	TokenURI:          `url\({w}(?:{string}|{urlchar}*?){w}\)`, | ||||
| 	TokenUnicodeRange: `U\+[0-9A-F\?]{1,6}(?:-[0-9A-F]{1,6})?`, | ||||
| 	//TokenCDO:            `<!--`, | ||||
| 	TokenCDC:      `-->`, | ||||
| 	TokenS:        `{wc}+`, | ||||
| 	TokenComment:  `/\*[^\*]*[\*]+(?:[^/][^\*]*[\*]+)*/`, | ||||
| 	TokenFunction: `{ident}\(`, | ||||
| 	//TokenIncludes:       `~=`, | ||||
| 	//TokenDashMatch:      `\|=`, | ||||
| 	//TokenPrefixMatch:    `\^=`, | ||||
| 	//TokenSuffixMatch:    `\$=`, | ||||
| 	//TokenSubstringMatch: `\*=`, | ||||
| 	//TokenChar:           `[^"']`, | ||||
| 	//TokenBOM:            "\uFEFF", | ||||
| } | ||||
|  | ||||
| // matchers maps the list of tokens to compiled regular expressions. | ||||
| // | ||||
| // The map is filled on init() using the macros and productions defined in | ||||
| // the CSS specification. | ||||
| var matchers = map[tokenType]*regexp.Regexp{} | ||||
|  | ||||
| // matchOrder is the order to test regexps when first-char shortcuts | ||||
| // can't be used. | ||||
| var matchOrder = []tokenType{ | ||||
| 	TokenURI, | ||||
| 	TokenFunction, | ||||
| 	TokenUnicodeRange, | ||||
| 	TokenIdent, | ||||
| 	TokenDimension, | ||||
| 	TokenPercentage, | ||||
| 	TokenNumber, | ||||
| 	TokenCDC, | ||||
| } | ||||
|  | ||||
| func init() { | ||||
| 	// replace macros and compile regexps for productions. | ||||
| 	replaceMacro := func(s string) string { | ||||
| 		return "(?:" + macros[s[1:len(s)-1]] + ")" | ||||
| 	} | ||||
| 	for t, s := range productions { | ||||
| 		for macroRegexp.MatchString(s) { | ||||
| 			s = macroRegexp.ReplaceAllStringFunc(s, replaceMacro) | ||||
| 		} | ||||
| 		matchers[t] = regexp.MustCompile("^(?:" + s + ")") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Scanner -------------------------------------------------------------------- | ||||
|  | ||||
| // New returns a new CSS scanner for the given input. | ||||
| func New(input string) *Scanner { | ||||
| 	// Normalize newlines. | ||||
| 	input = strings.Replace(input, "\r\n", "\n", -1) | ||||
| 	return &Scanner{ | ||||
| 		input: input, | ||||
| 		row:   1, | ||||
| 		col:   1, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Scanner scans an input and emits tokens following the CSS3 specification. | ||||
| type Scanner struct { | ||||
| 	input string | ||||
| 	pos   int | ||||
| 	row   int | ||||
| 	col   int | ||||
| 	err   *Token | ||||
| } | ||||
|  | ||||
| // Next returns the next token from the input. | ||||
| // | ||||
| // At the end of the input the token type is TokenEOF. | ||||
| // | ||||
| // If the input can't be tokenized the token type is TokenError. This occurs | ||||
| // in case of unclosed quotation marks or comments. | ||||
| func (s *Scanner) Next() *Token { | ||||
| 	if s.err != nil { | ||||
| 		return s.err | ||||
| 	} | ||||
| 	if s.pos >= len(s.input) { | ||||
| 		s.err = &Token{TokenEOF, "", s.row, s.col} | ||||
| 		return s.err | ||||
| 	} | ||||
| 	if s.pos == 0 { | ||||
| 		// Test BOM only once, at the beginning of the file. | ||||
| 		if strings.HasPrefix(s.input, "\uFEFF") { | ||||
| 			return s.emitSimple(TokenBOM, "\uFEFF") | ||||
| 		} | ||||
| 	} | ||||
| 	// There's a lot we can guess based on the first byte so we'll take a | ||||
| 	// shortcut before testing multiple regexps. | ||||
| 	input := s.input[s.pos:] | ||||
| 	switch input[0] { | ||||
| 	case '\t', '\n', '\f', '\r', ' ': | ||||
| 		// Whitespace. | ||||
| 		return s.emitToken(TokenS, matchers[TokenS].FindString(input)) | ||||
| 	case '.': | ||||
| 		// Dot is too common to not have a quick check. | ||||
| 		// We'll test if this is a Char; if it is followed by a number it is a | ||||
| 		// dimension/percentage/number, and this will be matched later. | ||||
| 		if len(input) > 1 && !unicode.IsDigit(rune(input[1])) { | ||||
| 			return s.emitSimple(TokenChar, ".") | ||||
| 		} | ||||
| 	case '#': | ||||
| 		// Another common one: Hash or Char. | ||||
| 		if match := matchers[TokenHash].FindString(input); match != "" { | ||||
| 			return s.emitToken(TokenHash, match) | ||||
| 		} | ||||
| 		return s.emitSimple(TokenChar, "#") | ||||
| 	case '@': | ||||
| 		// Another common one: AtKeyword or Char. | ||||
| 		if match := matchers[TokenAtKeyword].FindString(input); match != "" { | ||||
| 			return s.emitSimple(TokenAtKeyword, match) | ||||
| 		} | ||||
| 		return s.emitSimple(TokenChar, "@") | ||||
| 	case ':', ',', ';', '%', '&', '+', '=', '>', '(', ')', '[', ']', '{', '}': | ||||
| 		// More common chars. | ||||
| 		return s.emitSimple(TokenChar, string(input[0])) | ||||
| 	case '"', '\'': | ||||
| 		// String or error. | ||||
| 		match := matchers[TokenString].FindString(input) | ||||
| 		if match != "" { | ||||
| 			return s.emitToken(TokenString, match) | ||||
| 		} | ||||
|  | ||||
| 		s.err = &Token{TokenError, "unclosed quotation mark", s.row, s.col} | ||||
| 		return s.err | ||||
| 	case '/': | ||||
| 		// Comment, error or Char. | ||||
| 		if len(input) > 1 && input[1] == '*' { | ||||
| 			match := matchers[TokenComment].FindString(input) | ||||
| 			if match != "" { | ||||
| 				return s.emitToken(TokenComment, match) | ||||
| 			} else { | ||||
| 				s.err = &Token{TokenError, "unclosed comment", s.row, s.col} | ||||
| 				return s.err | ||||
| 			} | ||||
| 		} | ||||
| 		return s.emitSimple(TokenChar, "/") | ||||
| 	case '~': | ||||
| 		// Includes or Char. | ||||
| 		return s.emitPrefixOrChar(TokenIncludes, "~=") | ||||
| 	case '|': | ||||
| 		// DashMatch or Char. | ||||
| 		return s.emitPrefixOrChar(TokenDashMatch, "|=") | ||||
| 	case '^': | ||||
| 		// PrefixMatch or Char. | ||||
| 		return s.emitPrefixOrChar(TokenPrefixMatch, "^=") | ||||
| 	case '$': | ||||
| 		// SuffixMatch or Char. | ||||
| 		return s.emitPrefixOrChar(TokenSuffixMatch, "$=") | ||||
| 	case '*': | ||||
| 		// SubstringMatch or Char. | ||||
| 		return s.emitPrefixOrChar(TokenSubstringMatch, "*=") | ||||
| 	case '<': | ||||
| 		// CDO or Char. | ||||
| 		return s.emitPrefixOrChar(TokenCDO, "<!--") | ||||
| 	} | ||||
| 	// Test all regexps, in order. | ||||
| 	for _, token := range matchOrder { | ||||
| 		if match := matchers[token].FindString(input); match != "" { | ||||
| 			return s.emitToken(token, match) | ||||
| 		} | ||||
| 	} | ||||
| 	// We already handled unclosed quotation marks and comments, | ||||
| 	// so this can only be a Char. | ||||
| 	r, width := utf8.DecodeRuneInString(input) | ||||
| 	token := &Token{TokenChar, string(r), s.row, s.col} | ||||
| 	s.col += width | ||||
| 	s.pos += width | ||||
| 	return token | ||||
| } | ||||
|  | ||||
| // updatePosition updates input coordinates based on the consumed text. | ||||
| func (s *Scanner) updatePosition(text string) { | ||||
| 	width := utf8.RuneCountInString(text) | ||||
| 	lines := strings.Count(text, "\n") | ||||
| 	s.row += lines | ||||
| 	if lines == 0 { | ||||
| 		s.col += width | ||||
| 	} else { | ||||
| 		s.col = utf8.RuneCountInString(text[strings.LastIndex(text, "\n"):]) | ||||
| 	} | ||||
| 	s.pos += len(text) // while col is a rune index, pos is a byte index | ||||
| } | ||||
|  | ||||
| // emitToken returns a Token for the string v and updates the scanner position. | ||||
| func (s *Scanner) emitToken(t tokenType, v string) *Token { | ||||
| 	token := &Token{t, v, s.row, s.col} | ||||
| 	s.updatePosition(v) | ||||
| 	return token | ||||
| } | ||||
|  | ||||
| // emitSimple returns a Token for the string v and updates the scanner | ||||
| // position in a simplified manner. | ||||
| // | ||||
| // The string is known to have only ASCII characters and to not have a newline. | ||||
| func (s *Scanner) emitSimple(t tokenType, v string) *Token { | ||||
| 	token := &Token{t, v, s.row, s.col} | ||||
| 	s.col += len(v) | ||||
| 	s.pos += len(v) | ||||
| 	return token | ||||
| } | ||||
|  | ||||
| // emitPrefixOrChar returns a Token for type t if the current position | ||||
| // matches the given prefix. Otherwise it returns a Char token using the | ||||
| // first character from the prefix. | ||||
| // | ||||
| // The prefix is known to have only ASCII characters and to not have a newline. | ||||
| func (s *Scanner) emitPrefixOrChar(t tokenType, prefix string) *Token { | ||||
| 	if strings.HasPrefix(s.input[s.pos:], prefix) { | ||||
| 		return s.emitSimple(t, prefix) | ||||
| 	} | ||||
| 	return s.emitSimple(TokenChar, string(prefix[0])) | ||||
| } | ||||
							
								
								
									
										15
									
								
								vendor/github.com/microcosm-cc/bluemonday/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								vendor/github.com/microcosm-cc/bluemonday/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
|  # Binaries for programs and plugins | ||||
| *.exe | ||||
| *.exe~ | ||||
| *.dll | ||||
| *.so | ||||
| *.dylib | ||||
|  | ||||
| # Test binary, built with `go test -c` | ||||
| *.test | ||||
|  | ||||
| # Output of the go coverage tool, specifically when used with LiteIDE | ||||
| *.out | ||||
|  | ||||
| # goland idea folder | ||||
| *.idea | ||||
							
								
								
									
										2
									
								
								vendor/github.com/microcosm-cc/bluemonday/.travis.yml
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/microcosm-cc/bluemonday/.travis.yml
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -1,6 +1,5 @@ | ||||
| language: go | ||||
| go: | ||||
|   - 1.1.x | ||||
|   - 1.2.x | ||||
|   - 1.3.x | ||||
|   - 1.4.x | ||||
| @@ -11,6 +10,7 @@ go: | ||||
|   - 1.9.x | ||||
|   - 1.10.x | ||||
|   - 1.11.x | ||||
|   - 1.12.x | ||||
|   - tip | ||||
| matrix: | ||||
|   allow_failures: | ||||
|   | ||||
							
								
								
									
										5
									
								
								vendor/github.com/microcosm-cc/bluemonday/CREDITS.md
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								vendor/github.com/microcosm-cc/bluemonday/CREDITS.md
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -1,6 +1,7 @@ | ||||
| 1. Andrew Krasichkov @buglloc https://github.com/buglloc | ||||
| 1. John Graham-Cumming http://jgc.org/ | ||||
| 1. Mohammad Gufran https://github.com/Gufran | ||||
| 1. Steven Gutzwiller https://github.com/StevenGutzwiller | ||||
| 1. Andrew Krasichkov @buglloc https://github.com/buglloc | ||||
| 1. Mike Samuel mikesamuel@gmail.com | ||||
| 1. Dmitri Shuralyov shurcooL@gmail.com | ||||
| 1. https://github.com/opennota | ||||
| 1. https://github.com/Gufran | ||||
							
								
								
									
										69
									
								
								vendor/github.com/microcosm-cc/bluemonday/README.md
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										69
									
								
								vendor/github.com/microcosm-cc/bluemonday/README.md
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -58,10 +58,12 @@ We expect to be supplied with well-formatted HTML (closing elements for every ap | ||||
|  | ||||
| ### Supported Go Versions | ||||
|  | ||||
| bluemonday is tested against Go 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, and tip. | ||||
| bluemonday is tested against Go 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 1.10, 1.11, 1.12, and tip. | ||||
|  | ||||
| We do not support Go 1.0 as we depend on `golang.org/x/net/html` which includes a reference to `io.ErrNoProgress` which did not exist in Go 1.0. | ||||
|  | ||||
| We support Go 1.1 but Travis no longer tests against it. | ||||
|  | ||||
| ## Is it production ready? | ||||
|  | ||||
| *Yes* | ||||
| @@ -167,12 +169,26 @@ To add elements to a policy either add just the elements: | ||||
| p.AllowElements("b", "strong") | ||||
| ``` | ||||
|  | ||||
| Or using a regex: | ||||
|  | ||||
| _Note: if an element is added by name as shown above, any matching regex will be ignored_ | ||||
|  | ||||
| It is also recommended to ensure multiple patterns don't overlap as order of execution is not guaranteed and can result in some rules being missed. | ||||
| ```go | ||||
| p.AllowElementsMatching(regex.MustCompile(`^my-element-`)) | ||||
| ``` | ||||
|  | ||||
| Or add elements as a virtue of adding an attribute: | ||||
| ```go | ||||
| // Not the recommended pattern, see the recommendation on using .Matching() below | ||||
| p.AllowAttrs("nowrap").OnElements("td", "th") | ||||
| ``` | ||||
|  | ||||
| Again, this also supports a regex pattern match alternative: | ||||
| ```go | ||||
| p.AllowAttrs("nowrap").OnElementsMatching(regex.MustCompile(`^my-element-`)) | ||||
| ``` | ||||
|  | ||||
| Attributes can either be added to all elements: | ||||
| ```go | ||||
| p.AllowAttrs("dir").Matching(regexp.MustCompile("(?i)rtl|ltr")).Globally() | ||||
| @@ -202,6 +218,49 @@ p := bluemonday.UGCPolicy() | ||||
| p.AllowElements("fieldset", "select", "option") | ||||
| ``` | ||||
|  | ||||
| ### Inline CSS | ||||
|  | ||||
| Although it's possible to handle inline CSS using `AllowAttrs` with a `Matching` rule, writing a single monolithic regular expression to safely process all inline CSS which you wish to allow is not a trivial task.  Instead of attempting to do so, you can whitelist the `style` attribute on whichever element(s) you desire and use style policies to control and sanitize inline styles. | ||||
|  | ||||
| It is suggested that you use `Matching` (with a suitable regular expression) | ||||
| `MatchingEnum`, or `MatchingHandler` to ensure each style matches your needs, | ||||
| but default handlers are supplied for most widely used styles. | ||||
|  | ||||
| Similar to attributes, you can allow specific CSS properties to be set inline: | ||||
| ```go | ||||
| p.AllowAttrs("style").OnElements("span", "p") | ||||
| // Allow the 'color' property with valid RGB(A) hex values only (on any element allowed a 'style' attribute) | ||||
| p.AllowStyles("color").Matching(regexp.MustCompile("(?i)^#([0-9a-f]{3,4}|[0-9a-f]{6}|[0-9a-f]{8})$")).Globally() | ||||
| ``` | ||||
|  | ||||
| Additionally, you can allow a CSS property to be set only to an allowed value: | ||||
| ```go | ||||
| p.AllowAttrs("style").OnElements("span", "p") | ||||
| // Allow the 'text-decoration' property to be set to 'underline', 'line-through' or 'none' | ||||
| // on 'span' elements only | ||||
| p.AllowStyles("text-decoration").MatchingEnum("underline", "line-through", "none").OnElements("span") | ||||
| ``` | ||||
|  | ||||
| Or you can specify elements based on a regex patterm match: | ||||
| ```go | ||||
| p.AllowAttrs("style").OnElementsMatching(regex.MustCompile(`^my-element-`)) | ||||
| // Allow the 'text-decoration' property to be set to 'underline', 'line-through' or 'none' | ||||
| // on 'span' elements only | ||||
| p.AllowStyles("text-decoration").MatchingEnum("underline", "line-through", "none").OnElementsMatching(regex.MustCompile(`^my-element-`)) | ||||
| ``` | ||||
|  | ||||
| If you need more specific checking, you can create a handler that takes in a string and returns a bool to | ||||
| validate the values for a given property. The string parameter has been | ||||
| converted to lowercase and unicode code points have been converted. | ||||
| ```go | ||||
| myHandler := func(value string) bool{ | ||||
| 	return true | ||||
| } | ||||
| p.AllowAttrs("style").OnElements("span", "p") | ||||
| // Allow the 'color' property with values validated by the handler (on any element allowed a 'style' attribute) | ||||
| p.AllowStyles("color").MatchingHandler(myHandler).Globally() | ||||
| ``` | ||||
|  | ||||
| ### Links | ||||
|  | ||||
| Links are difficult beasts to sanitise safely and also one of the biggest attack vectors for malicious content. | ||||
| @@ -236,6 +295,13 @@ Regardless of whether you have enabled parseable URLs, you can force all URLs to | ||||
| p.RequireNoFollowOnLinks(true) | ||||
| ``` | ||||
|  | ||||
| Similarly, you can force all URLs to have "noreferrer" in their rel attribute. | ||||
| ```go | ||||
| // This applies to "a" "area" "link" elements that have a "href" attribute | ||||
| p.RequireNoReferrerOnLinks(true) | ||||
| ``` | ||||
|  | ||||
|  | ||||
| We provide a convenience method that applies all of the above, but you will still need to whitelist the linkable elements for the URL rules to be applied to: | ||||
| ```go | ||||
| p.AllowStandardURLs() | ||||
| @@ -316,7 +382,6 @@ It is not the job of bluemonday to fix your bad HTML, it is merely the job of bl | ||||
|  | ||||
| ## TODO | ||||
|  | ||||
| * Add support for CSS sanitisation to allow some CSS properties based on a whitelist, possibly using the [Gorilla CSS3 scanner](http://www.gorillatoolkit.org/pkg/css/scanner) - PRs welcome so long as testing covers XSS and demonstrates safety first | ||||
| * Investigate whether devs want to blacklist elements and attributes. This would allow devs to take an existing policy (such as the `bluemonday.UGCPolicy()` ) that encapsulates 90% of what they're looking for but does more than they need, and to remove the extra things they do not want to make it 100% what they want | ||||
| * Investigate whether devs want a validating HTML mode, in which the HTML elements are not just transformed into a balanced tree (every start tag has a closing tag at the correct depth) but also that elements and character data appear only in their allowed context (i.e. that a `table` element isn't a descendent of a `caption`, that `colgroup`, `thead`, `tbody`, `tfoot` and `tr` are permitted, and that character data is not permitted) | ||||
|  | ||||
|   | ||||
							
								
								
									
										7
									
								
								vendor/github.com/microcosm-cc/bluemonday/go.mod
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								vendor/github.com/microcosm-cc/bluemonday/go.mod
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -2,4 +2,9 @@ module github.com/microcosm-cc/bluemonday | ||||
|  | ||||
| go 1.9 | ||||
|  | ||||
| require golang.org/x/net v0.0.0-20181220203305-927f97764cc3 | ||||
| require ( | ||||
| 	github.com/aymerick/douceur v0.2.0 // indirect | ||||
| 	github.com/chris-ramon/douceur v0.2.0 | ||||
| 	github.com/gorilla/css v1.0.0 // indirect | ||||
| 	golang.org/x/net v0.0.0-20181220203305-927f97764cc3 | ||||
| ) | ||||
|   | ||||
							
								
								
									
										6
									
								
								vendor/github.com/microcosm-cc/bluemonday/go.sum
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								vendor/github.com/microcosm-cc/bluemonday/go.sum
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -1,2 +1,8 @@ | ||||
| github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= | ||||
| github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= | ||||
| github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU= | ||||
| github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE= | ||||
| github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= | ||||
| github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= | ||||
| golang.org/x/net v0.0.0-20181220203305-927f97764cc3 h1:eH6Eip3UpmR+yM/qI9Ijluzb1bNv/cAU/n+6l8tRSis= | ||||
| golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | ||||
|   | ||||
							
								
								
									
										2085
									
								
								vendor/github.com/microcosm-cc/bluemonday/handlers.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										2085
									
								
								vendor/github.com/microcosm-cc/bluemonday/handlers.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										2
									
								
								vendor/github.com/microcosm-cc/bluemonday/helpers.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/microcosm-cc/bluemonday/helpers.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -135,7 +135,7 @@ func (p *Policy) AllowStandardURLs() { | ||||
| 	// Most common URL schemes only | ||||
| 	p.AllowURLSchemes("mailto", "http", "https") | ||||
|  | ||||
| 	// For all anchors we will add rel="nofollow" if it does not already exist | ||||
| 	// For linking elements we will add rel="nofollow" if it does not already exist | ||||
| 	// This applies to "a" "area" "link" | ||||
| 	p.RequireNoFollowOnLinks(true) | ||||
| } | ||||
|   | ||||
							
								
								
									
										271
									
								
								vendor/github.com/microcosm-cc/bluemonday/policy.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										271
									
								
								vendor/github.com/microcosm-cc/bluemonday/policy.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -29,6 +29,8 @@ | ||||
|  | ||||
| package bluemonday | ||||
|  | ||||
| //TODO sgutzwiller create map of styles to default handlers | ||||
| //TODO sgutzwiller create handlers for various attributes | ||||
| import ( | ||||
| 	"net/url" | ||||
| 	"regexp" | ||||
| @@ -51,14 +53,22 @@ type Policy struct { | ||||
| 	// tag is replaced by a space character. | ||||
| 	addSpaces bool | ||||
|  | ||||
| 	// When true, add rel="nofollow" to HTML anchors | ||||
| 	// When true, add rel="nofollow" to HTML a, area, and link tags | ||||
| 	requireNoFollow bool | ||||
|  | ||||
| 	// When true, add rel="nofollow" to HTML anchors | ||||
| 	// When true, add rel="nofollow" to HTML a, area, and link tags | ||||
| 	// Will add for href="http://foo" | ||||
| 	// Will skip for href="/foo" or href="foo" | ||||
| 	requireNoFollowFullyQualifiedLinks bool | ||||
|  | ||||
| 	// When true, add rel="noreferrer" to HTML a, area, and link tags | ||||
| 	requireNoReferrer bool | ||||
|  | ||||
| 	// When true, add rel="noreferrer" to HTML a, area, and link tags | ||||
| 	// Will add for href="http://foo" | ||||
| 	// Will skip for href="/foo" or href="foo" | ||||
| 	requireNoReferrerFullyQualifiedLinks bool | ||||
|  | ||||
| 	// When true add target="_blank" to fully qualified links | ||||
| 	// Will add for href="http://foo" | ||||
| 	// Will skip for href="/foo" or href="foo" | ||||
| @@ -76,9 +86,21 @@ type Policy struct { | ||||
| 	// map[htmlElementName]map[htmlAttributeName]attrPolicy | ||||
| 	elsAndAttrs map[string]map[string]attrPolicy | ||||
|  | ||||
| 	// elsMatchingAndAttrs stores regex based element matches along with attributes | ||||
| 	elsMatchingAndAttrs map[*regexp.Regexp]map[string]attrPolicy | ||||
|  | ||||
| 	// map[htmlAttributeName]attrPolicy | ||||
| 	globalAttrs map[string]attrPolicy | ||||
|  | ||||
| 	// map[htmlElementName]map[cssPropertyName]stylePolicy | ||||
| 	elsAndStyles map[string]map[string]stylePolicy | ||||
|  | ||||
| 	// map[regex]map[cssPropertyName]stylePolicy | ||||
| 	elsMatchingAndStyles map[*regexp.Regexp]map[string]stylePolicy | ||||
|  | ||||
| 	// map[cssPropertyName]stylePolicy | ||||
| 	globalStyles map[string]stylePolicy | ||||
|  | ||||
| 	// If urlPolicy is nil, all URLs with matching schema are allowed. | ||||
| 	// Otherwise, only the URLs with matching schema and urlPolicy(url) | ||||
| 	// returning true are allowed. | ||||
| @@ -93,6 +115,16 @@ type Policy struct { | ||||
| 	// be maintained in the output HTML. | ||||
| 	setOfElementsAllowedWithoutAttrs map[string]struct{} | ||||
|  | ||||
| 	// If an element has had all attributes removed as a result of a policy | ||||
| 	// being applied, then the element would be removed from the output. | ||||
| 	// | ||||
| 	// However some elements are valid and have strong layout meaning without | ||||
| 	// any attributes, i.e. <table>. | ||||
| 	// | ||||
| 	// In this case, any element matching a regular expression will be accepted without | ||||
| 	// attributes added. | ||||
| 	setOfElementsMatchingAllowedWithoutAttrs []*regexp.Regexp | ||||
|  | ||||
| 	setOfElementsToSkipContent map[string]struct{} | ||||
| } | ||||
|  | ||||
| @@ -103,6 +135,20 @@ type attrPolicy struct { | ||||
| 	regexp *regexp.Regexp | ||||
| } | ||||
|  | ||||
| type stylePolicy struct { | ||||
| 	// handler to validate | ||||
| 	handler func(string) bool | ||||
|  | ||||
| 	// optional pattern to match, when not nil the regexp needs to match | ||||
| 	// otherwise the property is removed | ||||
| 	regexp *regexp.Regexp | ||||
|  | ||||
| 	// optional list of allowed property values, for properties which | ||||
| 	// have a defined list of allowed values; property will be removed | ||||
| 	// if the value is not allowed | ||||
| 	enum []string | ||||
| } | ||||
|  | ||||
| type attrPolicyBuilder struct { | ||||
| 	p *Policy | ||||
|  | ||||
| @@ -111,13 +157,26 @@ type attrPolicyBuilder struct { | ||||
| 	allowEmpty bool | ||||
| } | ||||
|  | ||||
| type stylePolicyBuilder struct { | ||||
| 	p *Policy | ||||
|  | ||||
| 	propertyNames []string | ||||
| 	regexp        *regexp.Regexp | ||||
| 	enum          []string | ||||
| 	handler       func(string) bool | ||||
| } | ||||
|  | ||||
| type urlPolicy func(url *url.URL) (allowUrl bool) | ||||
|  | ||||
| // init initializes the maps if this has not been done already | ||||
| func (p *Policy) init() { | ||||
| 	if !p.initialized { | ||||
| 		p.elsAndAttrs = make(map[string]map[string]attrPolicy) | ||||
| 		p.elsMatchingAndAttrs = make(map[*regexp.Regexp]map[string]attrPolicy) | ||||
| 		p.globalAttrs = make(map[string]attrPolicy) | ||||
| 		p.elsAndStyles = make(map[string]map[string]stylePolicy) | ||||
| 		p.elsMatchingAndStyles = make(map[*regexp.Regexp]map[string]stylePolicy) | ||||
| 		p.globalStyles = make(map[string]stylePolicy) | ||||
| 		p.allowURLSchemes = make(map[string]urlPolicy) | ||||
| 		p.setOfElementsAllowedWithoutAttrs = make(map[string]struct{}) | ||||
| 		p.setOfElementsToSkipContent = make(map[string]struct{}) | ||||
| @@ -245,6 +304,30 @@ func (abp *attrPolicyBuilder) OnElements(elements ...string) *Policy { | ||||
| 	return abp.p | ||||
| } | ||||
|  | ||||
| // OnElementsMatching will bind an attribute policy to all elements matching a given regex | ||||
| // and return the updated policy | ||||
| func (abp *attrPolicyBuilder) OnElementsMatching(regex *regexp.Regexp) *Policy { | ||||
| 	for _, attr := range abp.attrNames { | ||||
| 		if _, ok := abp.p.elsMatchingAndAttrs[regex]; !ok { | ||||
| 			abp.p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy) | ||||
| 		} | ||||
| 		ap := attrPolicy{} | ||||
| 		if abp.regexp != nil { | ||||
| 			ap.regexp = abp.regexp | ||||
| 		} | ||||
| 		abp.p.elsMatchingAndAttrs[regex][attr] = ap | ||||
| 	} | ||||
|  | ||||
| 	if abp.allowEmpty { | ||||
| 		abp.p.setOfElementsMatchingAllowedWithoutAttrs = append(abp.p.setOfElementsMatchingAllowedWithoutAttrs, regex) | ||||
| 		if _, ok := abp.p.elsMatchingAndAttrs[regex]; !ok { | ||||
| 			abp.p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return abp.p | ||||
| } | ||||
|  | ||||
| // Globally will bind an attribute policy to all HTML elements and return the | ||||
| // updated policy | ||||
| func (abp *attrPolicyBuilder) Globally() *Policy { | ||||
| @@ -265,6 +348,139 @@ func (abp *attrPolicyBuilder) Globally() *Policy { | ||||
| 	return abp.p | ||||
| } | ||||
|  | ||||
| // AllowStyles takes a range of CSS property names and returns a | ||||
| // style policy builder that allows you to specify the pattern and scope of | ||||
| // the whitelisted property. | ||||
| // | ||||
| // The style policy is only added to the core policy when either Globally() | ||||
| // or OnElements(...) are called. | ||||
| func (p *Policy) AllowStyles(propertyNames ...string) *stylePolicyBuilder { | ||||
|  | ||||
| 	p.init() | ||||
|  | ||||
| 	abp := stylePolicyBuilder{ | ||||
| 		p: p, | ||||
| 	} | ||||
|  | ||||
| 	for _, propertyName := range propertyNames { | ||||
| 		abp.propertyNames = append(abp.propertyNames, strings.ToLower(propertyName)) | ||||
| 	} | ||||
|  | ||||
| 	return &abp | ||||
| } | ||||
|  | ||||
| // Matching allows a regular expression to be applied to a nascent style | ||||
| // policy, and returns the style policy. Calling this more than once will | ||||
| // replace the existing regexp. | ||||
| func (spb *stylePolicyBuilder) Matching(regex *regexp.Regexp) *stylePolicyBuilder { | ||||
|  | ||||
| 	spb.regexp = regex | ||||
|  | ||||
| 	return spb | ||||
| } | ||||
|  | ||||
| // MatchingEnum allows a list of allowed values to be applied to a nascent style | ||||
| // policy, and returns the style policy. Calling this more than once will | ||||
| // replace the existing list of allowed values. | ||||
| func (spb *stylePolicyBuilder) MatchingEnum(enum ...string) *stylePolicyBuilder { | ||||
|  | ||||
| 	spb.enum = enum | ||||
|  | ||||
| 	return spb | ||||
| } | ||||
|  | ||||
| // MatchingHandler allows a handler to be applied to a nascent style | ||||
| // policy, and returns the style policy. Calling this more than once will | ||||
| // replace the existing handler. | ||||
| func (spb *stylePolicyBuilder) MatchingHandler(handler func(string) bool) *stylePolicyBuilder { | ||||
|  | ||||
| 	spb.handler = handler | ||||
|  | ||||
| 	return spb | ||||
| } | ||||
|  | ||||
| // OnElements will bind a style policy to a given range of HTML elements | ||||
| // and return the updated policy | ||||
| func (spb *stylePolicyBuilder) OnElements(elements ...string) *Policy { | ||||
|  | ||||
| 	for _, element := range elements { | ||||
| 		element = strings.ToLower(element) | ||||
|  | ||||
| 		for _, attr := range spb.propertyNames { | ||||
|  | ||||
| 			if _, ok := spb.p.elsAndStyles[element]; !ok { | ||||
| 				spb.p.elsAndStyles[element] = make(map[string]stylePolicy) | ||||
| 			} | ||||
|  | ||||
| 			sp := stylePolicy{} | ||||
| 			if spb.handler != nil { | ||||
| 				sp.handler = spb.handler | ||||
| 			} else if len(spb.enum) > 0 { | ||||
| 				sp.enum = spb.enum | ||||
| 			} else if spb.regexp != nil { | ||||
| 				sp.regexp = spb.regexp | ||||
| 			} else { | ||||
| 				sp.handler = getDefaultHandler(attr) | ||||
| 			} | ||||
| 			spb.p.elsAndStyles[element][attr] = sp | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return spb.p | ||||
| } | ||||
|  | ||||
| // OnElementsMatching will bind a style policy to any HTML elements matching the pattern | ||||
| // and return the updated policy | ||||
| func (spb *stylePolicyBuilder) OnElementsMatching(regex *regexp.Regexp) *Policy { | ||||
|  | ||||
| 		for _, attr := range spb.propertyNames { | ||||
|  | ||||
| 			if _, ok := spb.p.elsMatchingAndStyles[regex]; !ok { | ||||
| 				spb.p.elsMatchingAndStyles[regex] = make(map[string]stylePolicy) | ||||
| 			} | ||||
|  | ||||
| 			sp := stylePolicy{} | ||||
| 			if spb.handler != nil { | ||||
| 				sp.handler = spb.handler | ||||
| 			} else if len(spb.enum) > 0 { | ||||
| 				sp.enum = spb.enum | ||||
| 			} else if spb.regexp != nil { | ||||
| 				sp.regexp = spb.regexp | ||||
| 			} else { | ||||
| 				sp.handler = getDefaultHandler(attr) | ||||
| 			} | ||||
| 			spb.p.elsMatchingAndStyles[regex][attr] = sp | ||||
| 		} | ||||
|  | ||||
| 	return spb.p | ||||
| } | ||||
|  | ||||
| // Globally will bind a style policy to all HTML elements and return the | ||||
| // updated policy | ||||
| func (spb *stylePolicyBuilder) Globally() *Policy { | ||||
|  | ||||
| 	for _, attr := range spb.propertyNames { | ||||
| 		if _, ok := spb.p.globalStyles[attr]; !ok { | ||||
| 			spb.p.globalStyles[attr] = stylePolicy{} | ||||
| 		} | ||||
|  | ||||
| 		// Use only one strategy for validating styles, fallback to default | ||||
| 		sp := stylePolicy{} | ||||
| 		if spb.handler != nil { | ||||
| 			sp.handler = spb.handler | ||||
| 		} else if len(spb.enum) > 0 { | ||||
| 			sp.enum = spb.enum | ||||
| 		} else if spb.regexp != nil { | ||||
| 			sp.regexp = spb.regexp | ||||
| 		} else { | ||||
| 			sp.handler = getDefaultHandler(attr) | ||||
| 		} | ||||
| 		spb.p.globalStyles[attr] = sp | ||||
| 	} | ||||
|  | ||||
| 	return spb.p | ||||
| } | ||||
|  | ||||
| // AllowElements will append HTML elements to the whitelist without applying an | ||||
| // attribute policy to those elements (the elements are permitted | ||||
| // sans-attributes) | ||||
| @@ -282,8 +498,16 @@ func (p *Policy) AllowElements(names ...string) *Policy { | ||||
| 	return p | ||||
| } | ||||
|  | ||||
| // RequireNoFollowOnLinks will result in all <a> tags having a rel="nofollow" | ||||
| // added to them if one does not already exist | ||||
| func (p *Policy) AllowElementsMatching(regex *regexp.Regexp) *Policy { | ||||
| 	p.init() | ||||
| 	if _, ok := p.elsMatchingAndAttrs[regex]; !ok { | ||||
| 		p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy) | ||||
| 	} | ||||
| 	return p | ||||
| } | ||||
|  | ||||
| // RequireNoFollowOnLinks will result in all a, area, link tags having a | ||||
| // rel="nofollow"added to them if one does not already exist | ||||
| // | ||||
| // Note: This requires p.RequireParseableURLs(true) and will enable it. | ||||
| func (p *Policy) RequireNoFollowOnLinks(require bool) *Policy { | ||||
| @@ -294,9 +518,10 @@ func (p *Policy) RequireNoFollowOnLinks(require bool) *Policy { | ||||
| 	return p | ||||
| } | ||||
|  | ||||
| // RequireNoFollowOnFullyQualifiedLinks will result in all <a> tags that point | ||||
| // to a non-local destination (i.e. starts with a protocol and has a host) | ||||
| // having a rel="nofollow" added to them if one does not already exist | ||||
| // RequireNoFollowOnFullyQualifiedLinks will result in all a, area, and link | ||||
| // tags that point to a non-local destination (i.e. starts with a protocol and | ||||
| // has a host) having a rel="nofollow" added to them if one does not already | ||||
| // exist | ||||
| // | ||||
| // Note: This requires p.RequireParseableURLs(true) and will enable it. | ||||
| func (p *Policy) RequireNoFollowOnFullyQualifiedLinks(require bool) *Policy { | ||||
| @@ -307,9 +532,35 @@ func (p *Policy) RequireNoFollowOnFullyQualifiedLinks(require bool) *Policy { | ||||
| 	return p | ||||
| } | ||||
|  | ||||
| // AddTargetBlankToFullyQualifiedLinks will result in all <a> tags that point | ||||
| // to a non-local destination (i.e. starts with a protocol and has a host) | ||||
| // having a target="_blank" added to them if one does not already exist | ||||
| // RequireNoReferrerOnLinks will result in all a, area, and link tags having a | ||||
| // rel="noreferrrer" added to them if one does not already exist | ||||
| // | ||||
| // Note: This requires p.RequireParseableURLs(true) and will enable it. | ||||
| func (p *Policy) RequireNoReferrerOnLinks(require bool) *Policy { | ||||
|  | ||||
| 	p.requireNoReferrer = require | ||||
| 	p.requireParseableURLs = true | ||||
|  | ||||
| 	return p | ||||
| } | ||||
|  | ||||
| // RequireNoReferrerOnFullyQualifiedLinks will result in all a, area, and link | ||||
| // tags that point to a non-local destination (i.e. starts with a protocol and | ||||
| // has a host) having a rel="noreferrer" added to them if one does not already | ||||
| // exist | ||||
| // | ||||
| // Note: This requires p.RequireParseableURLs(true) and will enable it. | ||||
| func (p *Policy) RequireNoReferrerOnFullyQualifiedLinks(require bool) *Policy { | ||||
|  | ||||
| 	p.requireNoReferrerFullyQualifiedLinks = require | ||||
| 	p.requireParseableURLs = true | ||||
|  | ||||
| 	return p | ||||
| } | ||||
|  | ||||
| // AddTargetBlankToFullyQualifiedLinks will result in all a, area and link tags | ||||
| // that point to a non-local destination (i.e. starts with a protocol and has a | ||||
| // host) having a target="_blank" added to them if one does not already exist | ||||
| // | ||||
| // Note: This requires p.RequireParseableURLs(true) and will enable it. | ||||
| func (p *Policy) AddTargetBlankToFullyQualifiedLinks(require bool) *Policy { | ||||
|   | ||||
							
								
								
									
										344
									
								
								vendor/github.com/microcosm-cc/bluemonday/sanitize.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										344
									
								
								vendor/github.com/microcosm-cc/bluemonday/sanitize.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -34,15 +34,19 @@ import ( | ||||
| 	"io" | ||||
| 	"net/url" | ||||
| 	"regexp" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
|  | ||||
| 	"golang.org/x/net/html" | ||||
|  | ||||
| 	cssparser "github.com/chris-ramon/douceur/parser" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	dataAttribute             = regexp.MustCompile("^data-.+") | ||||
| 	dataAttributeXMLPrefix    = regexp.MustCompile("^xml.+") | ||||
| 	dataAttributeInvalidChars = regexp.MustCompile("[A-Z;]+") | ||||
| 	cssUnicodeChar            = regexp.MustCompile(`\\[0-9a-f]{1,6} ?`) | ||||
| ) | ||||
|  | ||||
| // Sanitize takes a string that contains a HTML fragment or document and applies | ||||
| @@ -82,6 +86,98 @@ func (p *Policy) SanitizeReader(r io.Reader) *bytes.Buffer { | ||||
| 	return p.sanitize(r) | ||||
| } | ||||
|  | ||||
| const escapedURLChars = "'<>\"\r" | ||||
|  | ||||
| func escapeUrlComponent(val string) string { | ||||
| 	w := bytes.NewBufferString("") | ||||
| 	i := strings.IndexAny(val, escapedURLChars) | ||||
| 	for i != -1 { | ||||
| 		if _, err := w.WriteString(val[:i]); err != nil { | ||||
| 			return w.String() | ||||
| 		} | ||||
| 		var esc string | ||||
| 		switch val[i] { | ||||
| 		case '\'': | ||||
| 			// "'" is shorter than "'" and apos was not in HTML until HTML5. | ||||
| 			esc = "'" | ||||
| 		case '<': | ||||
| 			esc = "<" | ||||
| 		case '>': | ||||
| 			esc = ">" | ||||
| 		case '"': | ||||
| 			// """ is shorter than """. | ||||
| 			esc = """ | ||||
| 		case '\r': | ||||
| 			esc = "
" | ||||
| 		default: | ||||
| 			panic("unrecognized escape character") | ||||
| 		} | ||||
| 		val = val[i+1:] | ||||
| 		if _, err := w.WriteString(esc); err != nil { | ||||
| 			return w.String() | ||||
| 		} | ||||
| 		i = strings.IndexAny(val, escapedURLChars) | ||||
| 	} | ||||
| 	w.WriteString(val) | ||||
| 	return w.String() | ||||
| } | ||||
|  | ||||
| func sanitizedUrl(val string) (string, error) { | ||||
| 	u, err := url.Parse(val) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
| 	// sanitize the url query params | ||||
| 	sanitizedQueryValues := make(url.Values, 0) | ||||
| 	queryValues := u.Query() | ||||
| 	for k, vals := range queryValues { | ||||
| 		sk := html.EscapeString(k) | ||||
| 		for _, v := range vals { | ||||
| 			sv := v | ||||
| 			sanitizedQueryValues.Add(sk, sv) | ||||
| 		} | ||||
| 	} | ||||
| 	u.RawQuery = sanitizedQueryValues.Encode() | ||||
| 	// u.String() will also sanitize host/scheme/user/pass | ||||
| 	return u.String(), nil | ||||
| } | ||||
|  | ||||
| func (p *Policy) writeLinkableBuf(buff *bytes.Buffer, token *html.Token) { | ||||
| 	// do not escape multiple query parameters | ||||
| 	tokenBuff := bytes.NewBufferString("") | ||||
| 	tokenBuff.WriteString("<") | ||||
| 	tokenBuff.WriteString(token.Data) | ||||
| 	for _, attr := range token.Attr { | ||||
| 		tokenBuff.WriteByte(' ') | ||||
| 		tokenBuff.WriteString(attr.Key) | ||||
| 		tokenBuff.WriteString(`="`) | ||||
| 		switch attr.Key { | ||||
| 		case "href", "src": | ||||
| 			u, ok := p.validURL(attr.Val) | ||||
| 			if !ok { | ||||
| 				tokenBuff.WriteString(html.EscapeString(attr.Val)) | ||||
| 				continue | ||||
| 			} | ||||
| 			u, err := sanitizedUrl(u) | ||||
| 			if err == nil { | ||||
| 				tokenBuff.WriteString(u) | ||||
| 			} else { | ||||
| 				// fallthrough | ||||
| 				tokenBuff.WriteString(html.EscapeString(attr.Val)) | ||||
| 			} | ||||
| 		default: | ||||
| 			// re-apply | ||||
| 			tokenBuff.WriteString(html.EscapeString(attr.Val)) | ||||
| 		} | ||||
| 		tokenBuff.WriteByte('"') | ||||
| 	} | ||||
| 	if token.Type == html.SelfClosingTagToken { | ||||
| 		tokenBuff.WriteString("/") | ||||
| 	} | ||||
| 	tokenBuff.WriteString(">") | ||||
| 	buff.WriteString(tokenBuff.String()) | ||||
| } | ||||
|  | ||||
| // Performs the actual sanitization process. | ||||
| func (p *Policy) sanitize(r io.Reader) *bytes.Buffer { | ||||
|  | ||||
| @@ -133,10 +229,12 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer { | ||||
|  | ||||
| 		case html.StartTagToken: | ||||
|  | ||||
| 			mostRecentlyStartedToken = token.Data | ||||
| 			mostRecentlyStartedToken = strings.ToLower(token.Data) | ||||
|  | ||||
| 			aps, ok := p.elsAndAttrs[token.Data] | ||||
| 			if !ok { | ||||
| 				aa, matched := p.matchRegex(token.Data) | ||||
| 				if !matched { | ||||
| 					if _, ok := p.setOfElementsToSkipContent[token.Data]; ok { | ||||
| 						skipElementContent = true | ||||
| 						skippingElementsCount++ | ||||
| @@ -146,7 +244,8 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer { | ||||
| 					} | ||||
| 					break | ||||
| 				} | ||||
|  | ||||
| 				aps = aa | ||||
| 			} | ||||
| 			if len(token.Attr) != 0 { | ||||
| 				token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps) | ||||
| 			} | ||||
| @@ -163,12 +262,17 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer { | ||||
| 			} | ||||
|  | ||||
| 			if !skipElementContent { | ||||
| 				// do not escape multiple query parameters | ||||
| 				if linkable(token.Data) { | ||||
| 					p.writeLinkableBuf(&buff, &token) | ||||
| 				} else { | ||||
| 					buff.WriteString(token.String()) | ||||
| 				} | ||||
| 			} | ||||
|  | ||||
| 		case html.EndTagToken: | ||||
|  | ||||
| 			if mostRecentlyStartedToken == token.Data { | ||||
| 			if mostRecentlyStartedToken == strings.ToLower(token.Data) { | ||||
| 				mostRecentlyStartedToken = "" | ||||
| 			} | ||||
|  | ||||
| @@ -182,19 +286,28 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer { | ||||
| 				} | ||||
| 				break | ||||
| 			} | ||||
|  | ||||
| 			if _, ok := p.elsAndAttrs[token.Data]; !ok { | ||||
| 				if _, ok := p.setOfElementsToSkipContent[token.Data]; ok { | ||||
| 				match := false | ||||
| 				for regex := range p.elsMatchingAndAttrs { | ||||
| 					if regex.MatchString(token.Data) { | ||||
| 						skipElementContent = false | ||||
| 						match = true | ||||
| 						break | ||||
| 					} | ||||
| 				} | ||||
| 				if _, ok := p.setOfElementsToSkipContent[token.Data]; ok && !match { | ||||
| 					skippingElementsCount-- | ||||
| 					if skippingElementsCount == 0 { | ||||
| 						skipElementContent = false | ||||
| 					} | ||||
| 				} | ||||
| 				if !match { | ||||
| 					if p.addSpaces { | ||||
| 						buff.WriteString(" ") | ||||
| 					} | ||||
| 					break | ||||
| 				} | ||||
| 			} | ||||
|  | ||||
| 			if !skipElementContent { | ||||
| 				buff.WriteString(token.String()) | ||||
| @@ -204,11 +317,15 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer { | ||||
|  | ||||
| 			aps, ok := p.elsAndAttrs[token.Data] | ||||
| 			if !ok { | ||||
| 				if p.addSpaces { | ||||
| 				aa, matched := p.matchRegex(token.Data) | ||||
| 				if !matched { | ||||
| 					if p.addSpaces && !matched { | ||||
| 						buff.WriteString(" ") | ||||
| 					} | ||||
| 					break | ||||
| 				} | ||||
| 				aps = aa | ||||
| 			} | ||||
|  | ||||
| 			if len(token.Attr) != 0 { | ||||
| 				token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps) | ||||
| @@ -217,13 +334,17 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer { | ||||
| 			if len(token.Attr) == 0 && !p.allowNoAttrs(token.Data) { | ||||
| 				if p.addSpaces { | ||||
| 					buff.WriteString(" ") | ||||
| 				} | ||||
| 					break | ||||
| 				} | ||||
|  | ||||
| 			} | ||||
| 			if !skipElementContent { | ||||
| 				// do not escape multiple query parameters | ||||
| 				if linkable(token.Data) { | ||||
| 					p.writeLinkableBuf(&buff, &token) | ||||
| 				} else { | ||||
| 					buff.WriteString(token.String()) | ||||
| 				} | ||||
| 			} | ||||
|  | ||||
| 		case html.TextToken: | ||||
|  | ||||
| @@ -242,6 +363,7 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer { | ||||
| 					buff.WriteString(token.String()) | ||||
| 				} | ||||
| 			} | ||||
|  | ||||
| 		default: | ||||
| 			// A token that didn't exist in the html package when we wrote this | ||||
| 			return &bytes.Buffer{} | ||||
| @@ -262,6 +384,23 @@ func (p *Policy) sanitizeAttrs( | ||||
| 		return attrs | ||||
| 	} | ||||
|  | ||||
| 	hasStylePolicies := false | ||||
| 	sps, elementHasStylePolicies := p.elsAndStyles[elementName] | ||||
| 	if len(p.globalStyles) > 0 || (elementHasStylePolicies && len(sps) > 0) { | ||||
| 		hasStylePolicies = true | ||||
| 	} | ||||
| 	// no specific element policy found, look for a pattern match | ||||
| 	if !hasStylePolicies { | ||||
| 		for k, v := range p.elsMatchingAndStyles { | ||||
| 			if k.MatchString(elementName) { | ||||
| 				if len(v) > 0 { | ||||
| 					hasStylePolicies = true | ||||
| 					break | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	// Builds a new attribute slice based on the whether the attribute has been | ||||
| 	// whitelisted explicitly or globally. | ||||
| 	cleanAttrs := []html.Attribute{} | ||||
| @@ -273,6 +412,19 @@ func (p *Policy) sanitizeAttrs( | ||||
| 				continue | ||||
| 			} | ||||
| 		} | ||||
| 		// Is this a "style" attribute, and if so, do we need to sanitize it? | ||||
| 		if htmlAttr.Key == "style" && hasStylePolicies { | ||||
| 			htmlAttr = p.sanitizeStyles(htmlAttr, elementName) | ||||
| 			if htmlAttr.Val == "" { | ||||
| 				// We've sanitized away any and all styles; don't bother to | ||||
| 				// output the style attribute (even if it's allowed) | ||||
| 				continue | ||||
| 			} else { | ||||
| 				cleanAttrs = append(cleanAttrs, htmlAttr) | ||||
| 				continue | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		// Is there an element specific attribute policy that applies? | ||||
| 		if ap, ok := aps[htmlAttr.Key]; ok { | ||||
| 			if ap.regexp != nil { | ||||
| @@ -354,6 +506,8 @@ func (p *Policy) sanitizeAttrs( | ||||
|  | ||||
| 		if (p.requireNoFollow || | ||||
| 			p.requireNoFollowFullyQualifiedLinks || | ||||
| 			p.requireNoReferrer || | ||||
| 			p.requireNoReferrerFullyQualifiedLinks || | ||||
| 			p.addTargetBlankToFullyQualifiedLinks) && | ||||
| 			len(cleanAttrs) > 0 { | ||||
|  | ||||
| @@ -381,12 +535,16 @@ func (p *Policy) sanitizeAttrs( | ||||
| 				if hrefFound { | ||||
| 					var ( | ||||
| 						noFollowFound    bool | ||||
| 						noReferrerFound  bool | ||||
| 						targetBlankFound bool | ||||
| 					) | ||||
|  | ||||
| 					addNoFollow := (p.requireNoFollow || | ||||
| 						externalLink && p.requireNoFollowFullyQualifiedLinks) | ||||
|  | ||||
| 					addNoReferrer := (p.requireNoReferrer || | ||||
| 						externalLink && p.requireNoReferrerFullyQualifiedLinks) | ||||
|  | ||||
| 					addTargetBlank := (externalLink && | ||||
| 						p.addTargetBlankToFullyQualifiedLinks) | ||||
|  | ||||
| @@ -394,19 +552,19 @@ func (p *Policy) sanitizeAttrs( | ||||
| 					for _, htmlAttr := range cleanAttrs { | ||||
|  | ||||
| 						var appended bool | ||||
| 						if htmlAttr.Key == "rel" && addNoFollow { | ||||
| 						if htmlAttr.Key == "rel" && (addNoFollow || addNoReferrer) { | ||||
|  | ||||
| 							if strings.Contains(htmlAttr.Val, "nofollow") { | ||||
| 								noFollowFound = true | ||||
| 								tmpAttrs = append(tmpAttrs, htmlAttr) | ||||
| 								appended = true | ||||
| 							} else { | ||||
| 							if addNoFollow && !strings.Contains(htmlAttr.Val, "nofollow") { | ||||
| 								htmlAttr.Val += " nofollow" | ||||
| 								noFollowFound = true | ||||
| 							} | ||||
| 							if addNoReferrer && !strings.Contains(htmlAttr.Val, "noreferrer") { | ||||
| 								htmlAttr.Val += " noreferrer" | ||||
| 							} | ||||
| 							noFollowFound = addNoFollow | ||||
| 							noReferrerFound = addNoReferrer | ||||
| 							tmpAttrs = append(tmpAttrs, htmlAttr) | ||||
| 							appended = true | ||||
| 						} | ||||
| 						} | ||||
|  | ||||
| 						if elementName == "a" && htmlAttr.Key == "target" { | ||||
| 							if htmlAttr.Val == "_blank" { | ||||
| @@ -424,14 +582,22 @@ func (p *Policy) sanitizeAttrs( | ||||
| 							tmpAttrs = append(tmpAttrs, htmlAttr) | ||||
| 						} | ||||
| 					} | ||||
| 					if noFollowFound || targetBlankFound { | ||||
| 					if noFollowFound || noReferrerFound || targetBlankFound { | ||||
| 						cleanAttrs = tmpAttrs | ||||
| 					} | ||||
|  | ||||
| 					if addNoFollow && !noFollowFound { | ||||
| 					if (addNoFollow && !noFollowFound) || (addNoReferrer && !noReferrerFound) { | ||||
| 						rel := html.Attribute{} | ||||
| 						rel.Key = "rel" | ||||
| 						if addNoFollow { | ||||
| 							rel.Val = "nofollow" | ||||
| 						} | ||||
| 						if addNoReferrer { | ||||
| 							if rel.Val != "" { | ||||
| 								rel.Val += " " | ||||
| 							} | ||||
| 							rel.Val += "noreferrer" | ||||
| 						} | ||||
| 						cleanAttrs = append(cleanAttrs, rel) | ||||
| 					} | ||||
|  | ||||
| @@ -501,8 +667,95 @@ func (p *Policy) sanitizeAttrs( | ||||
| 	return cleanAttrs | ||||
| } | ||||
|  | ||||
| func (p *Policy) sanitizeStyles(attr html.Attribute, elementName string) html.Attribute { | ||||
| 	sps := p.elsAndStyles[elementName] | ||||
| 	if len(sps) == 0 { | ||||
| 		sps = map[string]stylePolicy{} | ||||
| 		// check for any matching elements, if we don't already have a policy found | ||||
| 		// if multiple matches are found they will be overwritten, it's best | ||||
| 		// to not have overlapping matchers | ||||
| 		for regex, policies := range p.elsMatchingAndStyles { | ||||
| 			if regex.MatchString(elementName) { | ||||
| 				for k, v := range policies { | ||||
| 					sps[k] = v | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	//Add semi-colon to end to fix parsing issue | ||||
| 	if len(attr.Val) > 0 && attr.Val[len(attr.Val)-1] != ';' { | ||||
| 		attr.Val = attr.Val + ";" | ||||
| 	} | ||||
| 	decs, err := cssparser.ParseDeclarations(attr.Val) | ||||
| 	if err != nil { | ||||
| 		attr.Val = "" | ||||
| 		return attr | ||||
| 	} | ||||
| 	clean := []string{} | ||||
| 	prefixes := []string{"-webkit-", "-moz-", "-ms-", "-o-", "mso-", "-xv-", "-atsc-", "-wap-", "-khtml-", "prince-", "-ah-", "-hp-", "-ro-", "-rim-", "-tc-"} | ||||
|  | ||||
| 	for _, dec := range decs { | ||||
| 		addedProperty := false | ||||
| 		tempProperty := strings.ToLower(dec.Property) | ||||
| 		tempValue := removeUnicode(strings.ToLower(dec.Value)) | ||||
| 		for _, i := range prefixes { | ||||
| 			tempProperty = strings.TrimPrefix(tempProperty, i) | ||||
| 		} | ||||
| 		if sp, ok := sps[tempProperty]; ok { | ||||
| 			if sp.handler != nil { | ||||
| 				if sp.handler(tempValue) { | ||||
| 					clean = append(clean, dec.Property+": "+dec.Value) | ||||
| 					addedProperty = true | ||||
| 				} | ||||
| 			} else if len(sp.enum) > 0 { | ||||
| 				if stringInSlice(tempValue, sp.enum) { | ||||
| 					clean = append(clean, dec.Property+": "+dec.Value) | ||||
| 					addedProperty = true | ||||
| 				} | ||||
| 			} else if sp.regexp != nil { | ||||
| 				if sp.regexp.MatchString(tempValue) { | ||||
| 					clean = append(clean, dec.Property+": "+dec.Value) | ||||
| 					addedProperty = true | ||||
| 				} | ||||
| 				continue | ||||
| 			} | ||||
| 		} | ||||
| 		if sp, ok := p.globalStyles[tempProperty]; ok && !addedProperty { | ||||
| 			if sp.handler != nil { | ||||
| 				if sp.handler(tempValue) { | ||||
| 					clean = append(clean, dec.Property+": "+dec.Value) | ||||
| 				} | ||||
| 			} else if len(sp.enum) > 0 { | ||||
| 				if stringInSlice(tempValue, sp.enum) { | ||||
| 					clean = append(clean, dec.Property+": "+dec.Value) | ||||
| 				} | ||||
| 			} else if sp.regexp != nil { | ||||
| 				if sp.regexp.MatchString(tempValue) { | ||||
| 					clean = append(clean, dec.Property+": "+dec.Value) | ||||
| 				} | ||||
| 				continue | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	if len(clean) > 0 { | ||||
| 		attr.Val = strings.Join(clean, "; ") | ||||
| 	} else { | ||||
| 		attr.Val = "" | ||||
| 	} | ||||
| 	return attr | ||||
| } | ||||
|  | ||||
| func (p *Policy) allowNoAttrs(elementName string) bool { | ||||
| 	_, ok := p.setOfElementsAllowedWithoutAttrs[elementName] | ||||
| 	if !ok { | ||||
| 		for _, r := range p.setOfElementsMatchingAllowedWithoutAttrs { | ||||
| 			if r.MatchString(elementName) { | ||||
| 				ok = true | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	return ok | ||||
| } | ||||
|  | ||||
| @@ -561,6 +814,16 @@ func linkable(elementName string) bool { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // stringInSlice returns true if needle exists in haystack | ||||
| func stringInSlice(needle string, haystack []string) bool { | ||||
| 	for _, straw := range haystack { | ||||
| 		if strings.ToLower(straw) == strings.ToLower(needle) { | ||||
| 			return true | ||||
| 		} | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func isDataAttribute(val string) bool { | ||||
| 	if !dataAttribute.MatchString(val) { | ||||
| 		return false | ||||
| @@ -579,3 +842,48 @@ func isDataAttribute(val string) bool { | ||||
| 	} | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| func removeUnicode(value string) string { | ||||
| 	substitutedValue := value | ||||
| 	currentLoc := cssUnicodeChar.FindStringIndex(substitutedValue) | ||||
| 	for currentLoc != nil { | ||||
|  | ||||
| 		character := substitutedValue[currentLoc[0]+1 : currentLoc[1]] | ||||
| 		character = strings.TrimSpace(character) | ||||
| 		if len(character) < 4 { | ||||
| 			character = strings.Repeat("0", 4-len(character)) + character | ||||
| 		} else { | ||||
| 			for len(character) > 4 { | ||||
| 				if character[0] != '0' { | ||||
| 					character = "" | ||||
| 					break | ||||
| 				} else { | ||||
| 					character = character[1:] | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 		character = "\\u" + character | ||||
| 		translatedChar, err := strconv.Unquote(`"` + character + `"`) | ||||
| 		translatedChar = strings.TrimSpace(translatedChar) | ||||
| 		if err != nil { | ||||
| 			return "" | ||||
| 		} | ||||
| 		substitutedValue = substitutedValue[0:currentLoc[0]] + translatedChar + substitutedValue[currentLoc[1]:] | ||||
| 		currentLoc = cssUnicodeChar.FindStringIndex(substitutedValue) | ||||
| 	} | ||||
| 	return substitutedValue | ||||
| } | ||||
|  | ||||
| func (p *Policy) matchRegex(elementName string) (map[string]attrPolicy, bool) { | ||||
| 	aps := make(map[string]attrPolicy, 0) | ||||
| 	matched := false | ||||
| 	for regex, attrs := range p.elsMatchingAndAttrs { | ||||
| 		if regex.MatchString(elementName) { | ||||
| 			matched = true | ||||
| 			for k, v := range attrs { | ||||
| 				aps[k] = v | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	return aps, matched | ||||
| } | ||||
|   | ||||
							
								
								
									
										11
									
								
								vendor/github.com/muesli/reflow/ansi/buffer.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								vendor/github.com/muesli/reflow/ansi/buffer.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -11,11 +11,16 @@ type Buffer struct { | ||||
| 	bytes.Buffer | ||||
| } | ||||
|  | ||||
| // PrintableRuneCount returns the amount of printable runes in the buffer. | ||||
| func (w Buffer) PrintableRuneCount() int { | ||||
| // PrintableRuneWidth returns the width of all printable runes in the buffer. | ||||
| func (w Buffer) PrintableRuneWidth() int { | ||||
| 	return PrintableRuneWidth(w.String()) | ||||
| } | ||||
|  | ||||
| func PrintableRuneWidth(s string) int { | ||||
| 	var n int | ||||
| 	var ansi bool | ||||
| 	for _, c := range w.String() { | ||||
|  | ||||
| 	for _, c := range s { | ||||
| 		if c == '\x1B' { | ||||
| 			// ANSI escape sequence | ||||
| 			ansi = true | ||||
|   | ||||
							
								
								
									
										6
									
								
								vendor/github.com/muesli/reflow/wordwrap/wordwrap.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								vendor/github.com/muesli/reflow/wordwrap/wordwrap.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -66,7 +66,7 @@ func (w *WordWrap) addSpace() { | ||||
| func (w *WordWrap) addWord() { | ||||
| 	if w.word.Len() > 0 { | ||||
| 		w.addSpace() | ||||
| 		w.lineLen += w.word.PrintableRuneCount() | ||||
| 		w.lineLen += w.word.PrintableRuneWidth() | ||||
| 		w.buf.Write(w.word.Bytes()) | ||||
| 		w.word.Reset() | ||||
| 	} | ||||
| @@ -139,8 +139,8 @@ func (w *WordWrap) Write(b []byte) (int, error) { | ||||
|  | ||||
| 			// add a line break if the current word would exceed the line's | ||||
| 			// character limit | ||||
| 			if w.lineLen+w.space.Len()+w.word.PrintableRuneCount() > w.Limit && | ||||
| 				w.word.PrintableRuneCount() < w.Limit { | ||||
| 			if w.lineLen+w.space.Len()+w.word.PrintableRuneWidth() > w.Limit && | ||||
| 				w.word.PrintableRuneWidth() < w.Limit { | ||||
| 				w.addNewLine() | ||||
| 			} | ||||
| 		} | ||||
|   | ||||
							
								
								
									
										15
									
								
								vendor/github.com/yuin/goldmark-emoji/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								vendor/github.com/yuin/goldmark-emoji/.gitignore
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| # Binaries for programs and plugins | ||||
| *.exe | ||||
| *.exe~ | ||||
| *.dll | ||||
| *.so | ||||
| *.dylib | ||||
|  | ||||
| # Test binary, build with `go test -c` | ||||
| *.test | ||||
| *.pprof | ||||
|  | ||||
| # Output of the go coverage tool, specifically when used with LiteIDE | ||||
| *.out | ||||
|  | ||||
| .DS_Store | ||||
							
								
								
									
										21
									
								
								vendor/github.com/yuin/goldmark-emoji/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								vendor/github.com/yuin/goldmark-emoji/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| MIT License | ||||
|  | ||||
| Copyright (c) 2020 Yusuke Inuzuka | ||||
|  | ||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| of this software and associated documentation files (the "Software"), to deal | ||||
| in the Software without restriction, including without limitation the rights | ||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| copies of the Software, and to permit persons to whom the Software is | ||||
| furnished to do so, subject to the following conditions: | ||||
|  | ||||
| The above copyright notice and this permission notice shall be included in all | ||||
| copies or substantial portions of the Software. | ||||
|  | ||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||||
| SOFTWARE. | ||||
							
								
								
									
										71
									
								
								vendor/github.com/yuin/goldmark-emoji/README.md
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										71
									
								
								vendor/github.com/yuin/goldmark-emoji/README.md
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,71 @@ | ||||
| goldmark-emoji | ||||
| ========================= | ||||
|  | ||||
| [![GoDev][godev-image]][godev-url] | ||||
|  | ||||
| [godev-image]: https://pkg.go.dev/badge/github.com/yuin/goldmark-emoji | ||||
| [godev-url]: https://pkg.go.dev/github.com/yuin/goldmark-emoji | ||||
|  | ||||
| goldmark-emoji is an extension for the [goldmark](http://github.com/yuin/goldmark)  | ||||
| that parses `:joy:` style emojis. | ||||
|  | ||||
| Installation | ||||
| -------------------- | ||||
|  | ||||
| ``` | ||||
| go get github.com/yuin/goldmark-emoji | ||||
| ``` | ||||
|  | ||||
| Usage | ||||
| -------------------- | ||||
|  | ||||
| ```go | ||||
| import ( | ||||
|     "bytes" | ||||
|     "fmt" | ||||
|  | ||||
|     "github.com/yuin/goldmark" | ||||
|     "github.com/yuin/goldmark-emoji" | ||||
|     "github.com/yuin/goldmark-emoji/definition" | ||||
| ) | ||||
|  | ||||
| func main() { | ||||
|     markdown := goldmark.New( | ||||
|         goldmark.WithExtensions( | ||||
|             emoji.Emoji, | ||||
|         ), | ||||
|     ) | ||||
|     source := ` | ||||
|     Joy :joy: | ||||
|     ` | ||||
|     var buf bytes.Buffer | ||||
|     if err := markdown.Convert([]byte(source), &buf); err != nil { | ||||
|         panic(err) | ||||
|     } | ||||
|     fmt.Print(buf.String()) | ||||
| } | ||||
| ``` | ||||
|  | ||||
| See `emoji_test.go` for detailed usage. | ||||
|  | ||||
| ### Options | ||||
|  | ||||
| Options for the extension | ||||
|  | ||||
| | Option | Description | | ||||
| | ------ | ----------- | | ||||
| | `WithEmojis` | Definition of emojis. This defaults to github emoji set | | ||||
| | `WithRenderingMethod` | `Entity` : renders as HTML entities, `Twemoji` : renders as an img tag that uses [twemoji](https://github.com/twitter/twemoji), `Func` : renders using a go function | | ||||
| | `WithTwemojiTemplate` | Twemoji img tag printf template | | ||||
| | `WithRendererFunc` | renders by a go function | | ||||
|  | ||||
|  | ||||
|  | ||||
| License | ||||
| -------------------- | ||||
| MIT | ||||
|  | ||||
| Author | ||||
| -------------------- | ||||
| Yusuke Inuzuka | ||||
|  | ||||
							
								
								
									
										42
									
								
								vendor/github.com/yuin/goldmark-emoji/ast/emoji.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								vendor/github.com/yuin/goldmark-emoji/ast/emoji.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,42 @@ | ||||
| // Package ast defines AST nodes that represetns emoji extension's elements. | ||||
| package ast | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
|  | ||||
| 	"github.com/yuin/goldmark-emoji/definition" | ||||
| 	gast "github.com/yuin/goldmark/ast" | ||||
| ) | ||||
|  | ||||
| // Emoji represents an inline emoji. | ||||
| type Emoji struct { | ||||
| 	gast.BaseInline | ||||
|  | ||||
| 	ShortName []byte | ||||
| 	Value     *definition.Emoji | ||||
| } | ||||
|  | ||||
| // Dump implements Node.Dump. | ||||
| func (n *Emoji) Dump(source []byte, level int) { | ||||
| 	m := map[string]string{ | ||||
| 		"ShortName": string(n.ShortName), | ||||
| 		"Value":     fmt.Sprintf("%#v", n.Value), | ||||
| 	} | ||||
| 	gast.DumpHelper(n, source, level, m, nil) | ||||
| } | ||||
|  | ||||
| // KindEmoji is a NodeKind of the emoji node. | ||||
| var KindEmoji = gast.NewNodeKind("Emoji") | ||||
|  | ||||
| // Kind implements Node.Kind. | ||||
| func (n *Emoji) Kind() gast.NodeKind { | ||||
| 	return KindEmoji | ||||
| } | ||||
|  | ||||
| // NewEmoji returns a new Emoji node. | ||||
| func NewEmoji(shortName []byte, value *definition.Emoji) *Emoji { | ||||
| 	return &Emoji{ | ||||
| 		ShortName: shortName, | ||||
| 		Value:     value, | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										106
									
								
								vendor/github.com/yuin/goldmark-emoji/definition/definition.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										106
									
								
								vendor/github.com/yuin/goldmark-emoji/definition/definition.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,106 @@ | ||||
| package definition | ||||
|  | ||||
| // Emoji is a data structure that holds a single emoji. | ||||
| type Emoji struct { | ||||
| 	// Name is a name of this emoji. | ||||
| 	Name string | ||||
|  | ||||
| 	// ShortNames is a shorter representation of this emoji. | ||||
| 	ShortNames []string | ||||
|  | ||||
| 	// Unicode is an unicode representation of this emoji. | ||||
| 	Unicode []rune | ||||
| } | ||||
|  | ||||
| // NewEmoji returns a new Emoji. | ||||
| func NewEmoji(name string, unicode []rune, shortNames ...string) Emoji { | ||||
| 	if len(shortNames) == 0 { | ||||
| 		panic("Emoji must have at leat 1 short name.") | ||||
| 	} | ||||
| 	if unicode == nil || len(unicode) == 0 { | ||||
| 		unicode = []rune{0xFFFD} | ||||
| 	} | ||||
| 	return Emoji{ | ||||
| 		Name:       name, | ||||
| 		ShortNames: shortNames, | ||||
| 		Unicode:    unicode, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // IsUnicode returns true if this emoji is defined in unicode, otherwise false. | ||||
| func (em *Emoji) IsUnicode() bool { | ||||
| 	return !(len(em.Unicode) == 1 && em.Unicode[0] == 0xFFFD) | ||||
| } | ||||
|  | ||||
| // Emojis is a collection of emojis. | ||||
| type Emojis interface { | ||||
| 	// Get returns (*Emoji, true) if found mapping associated with given short name, otherwise (nil, false). | ||||
| 	Get(shortName string) (*Emoji, bool) | ||||
|  | ||||
| 	// Add adds new emojis to this collection. | ||||
| 	Add(Emojis) | ||||
|  | ||||
| 	// Clone clones this collection. | ||||
| 	Clone() Emojis | ||||
| } | ||||
|  | ||||
| type emojis struct { | ||||
| 	list     []Emoji | ||||
| 	m        map[string]*Emoji | ||||
| 	children []Emojis | ||||
| } | ||||
|  | ||||
| // NewEmojis returns a new Emojis. | ||||
| func NewEmojis(es ...Emoji) Emojis { | ||||
| 	m := &emojis{ | ||||
| 		list:     es, | ||||
| 		m:        map[string]*Emoji{}, | ||||
| 		children: []Emojis{}, | ||||
| 	} | ||||
| 	for i, _ := range es { | ||||
| 		emoji := &m.list[i] | ||||
| 		for _, s := range emoji.ShortNames { | ||||
| 			m.m[s] = emoji | ||||
| 		} | ||||
| 	} | ||||
| 	return m | ||||
| } | ||||
|  | ||||
| func (m *emojis) Add(emojis Emojis) { | ||||
| 	m.children = append(m.children, emojis) | ||||
| } | ||||
|  | ||||
| func (m *emojis) Clone() Emojis { | ||||
| 	es := &emojis{ | ||||
| 		list:     m.list, | ||||
| 		m:        m.m, | ||||
| 		children: make([]Emojis, len(m.children)), | ||||
| 	} | ||||
| 	copy(es.children, m.children) | ||||
| 	return es | ||||
| } | ||||
|  | ||||
| func (m *emojis) Get(shortName string) (*Emoji, bool) { | ||||
| 	v, ok := m.m[shortName] | ||||
| 	if ok { | ||||
| 		return v, ok | ||||
| 	} | ||||
|  | ||||
| 	for _, es := range m.children { | ||||
| 		v, ok := es.Get(shortName) | ||||
| 		if ok { | ||||
| 			return v, ok | ||||
| 		} | ||||
| 	} | ||||
| 	return nil, false | ||||
| } | ||||
|  | ||||
| // EmojisOption sets options for Emojis. | ||||
| type EmojisOption func(Emojis) | ||||
|  | ||||
| // WithEmojis is an EmojisOption that adds emojis to the Emojis. | ||||
| func WithEmojis(emojis ...Emoji) EmojisOption { | ||||
| 	return func(m Emojis) { | ||||
| 		m.Add(NewEmojis(emojis...)) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										1757
									
								
								vendor/github.com/yuin/goldmark-emoji/definition/github.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1757
									
								
								vendor/github.com/yuin/goldmark-emoji/definition/github.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										360
									
								
								vendor/github.com/yuin/goldmark-emoji/emoji.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										360
									
								
								vendor/github.com/yuin/goldmark-emoji/emoji.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,360 @@ | ||||
| // package emoji is a extension for the goldmark(http://github.com/yuin/goldmark). | ||||
| package emoji | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/yuin/goldmark" | ||||
| 	east "github.com/yuin/goldmark-emoji/ast" | ||||
| 	"github.com/yuin/goldmark-emoji/definition" | ||||
| 	"github.com/yuin/goldmark/ast" | ||||
| 	"github.com/yuin/goldmark/parser" | ||||
| 	"github.com/yuin/goldmark/renderer" | ||||
| 	"github.com/yuin/goldmark/renderer/html" | ||||
| 	"github.com/yuin/goldmark/text" | ||||
| 	"github.com/yuin/goldmark/util" | ||||
| ) | ||||
|  | ||||
| // Option interface sets options for this extension. | ||||
| type Option interface { | ||||
| 	emojiOption() | ||||
| } | ||||
|  | ||||
| // ParserConfig struct is a data structure that holds configuration of | ||||
| // the Emoji extension. | ||||
| type ParserConfig struct { | ||||
| 	Emojis definition.Emojis | ||||
| } | ||||
|  | ||||
| const optEmojis parser.OptionName = "EmojiEmojis" | ||||
|  | ||||
| // SetOption implements parser.SetOptioner | ||||
| func (c *ParserConfig) SetOption(name parser.OptionName, value interface{}) { | ||||
| 	switch name { | ||||
| 	case optEmojis: | ||||
| 		c.Emojis = value.(definition.Emojis) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // A ParserOption interface sets options for the emoji parser. | ||||
| type ParserOption interface { | ||||
| 	Option | ||||
| 	parser.Option | ||||
|  | ||||
| 	SetEmojiOption(*ParserConfig) | ||||
| } | ||||
|  | ||||
| var _ ParserOption = &withEmojis{} | ||||
|  | ||||
| type withEmojis struct { | ||||
| 	value definition.Emojis | ||||
| } | ||||
|  | ||||
| func (o *withEmojis) emojiOption() {} | ||||
|  | ||||
| func (o *withEmojis) SetParserOption(c *parser.Config) { | ||||
| 	c.Options[optEmojis] = o.value | ||||
| } | ||||
|  | ||||
| func (o *withEmojis) SetEmojiOption(c *ParserConfig) { | ||||
| 	c.Emojis = o.value | ||||
| } | ||||
|  | ||||
| // WithMaping is a functional option that defines links names to unicode emojis. | ||||
| func WithEmojis(value definition.Emojis) Option { | ||||
| 	return &withEmojis{ | ||||
| 		value: value, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // RenderingMethod indicates how emojis are rendered. | ||||
| type RenderingMethod int | ||||
|  | ||||
| // RendererFunc will be used for rendering emojis. | ||||
| type RendererFunc func(w util.BufWriter, source []byte, n *east.Emoji, config *RendererConfig) | ||||
|  | ||||
| const ( | ||||
| 	// Entity renders an emoji as an html entity. | ||||
| 	Entity RenderingMethod = iota | ||||
|  | ||||
| 	// Unicode renders an emoji as unicode character. | ||||
| 	Unicode | ||||
|  | ||||
| 	// Twemoji renders an emoji as an img tag with [twemoji](https://github.com/twitter/twemoji). | ||||
| 	Twemoji | ||||
|  | ||||
| 	// Func renders an emoji using RendererFunc. | ||||
| 	Func | ||||
| ) | ||||
|  | ||||
| // RendererConfig struct holds options for the emoji renderer. | ||||
| type RendererConfig struct { | ||||
| 	html.Config | ||||
|  | ||||
| 	// Method indicates how emojis are rendered. | ||||
| 	Method RenderingMethod | ||||
|  | ||||
| 	// TwemojiTemplate is a printf template for twemoji. This value is valid only when Method is set to Twemoji. | ||||
| 	// `printf` arguments are: | ||||
| 	// | ||||
| 	//     1: name (e.g. "face with tears of joy") | ||||
| 	//     2: file name without an extension (e.g. 1f646-2642) | ||||
| 	//     3: '/' if XHTML, otherwise '' | ||||
| 	// | ||||
| 	TwemojiTemplate string | ||||
|  | ||||
| 	// RendererFunc is a RendererFunc that renders emojis. This value is valid only when Method is set to Func. | ||||
| 	RendererFunc RendererFunc | ||||
| } | ||||
|  | ||||
| // DefaultTwemojiTemplate is a default value for RendererConfig.TwemojiTemplate. | ||||
| const DefaultTwemojiTemplate = `<img class="emoji" draggable="false" alt="%[1]s" src="https://twemoji.maxcdn.com/v/latest/72x72/%[2]s.png"%[3]s>` | ||||
|  | ||||
| // SetOption implements renderer.SetOptioner. | ||||
| func (c *RendererConfig) SetOption(name renderer.OptionName, value interface{}) { | ||||
| 	switch name { | ||||
| 	case optRenderingMethod: | ||||
| 		c.Method = value.(RenderingMethod) | ||||
| 	case optTwemojiTemplate: | ||||
| 		c.TwemojiTemplate = value.(string) | ||||
| 	case optRendererFunc: | ||||
| 		c.RendererFunc = value.(RendererFunc) | ||||
| 	default: | ||||
| 		c.Config.SetOption(name, value) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // A RendererOption interface sets options for the emoji renderer. | ||||
| type RendererOption interface { | ||||
| 	Option | ||||
| 	renderer.Option | ||||
|  | ||||
| 	SetEmojiOption(*RendererConfig) | ||||
| } | ||||
|  | ||||
| var _ RendererOption = &withRenderingMethod{} | ||||
|  | ||||
| type withRenderingMethod struct { | ||||
| 	value RenderingMethod | ||||
| } | ||||
|  | ||||
| func (o *withRenderingMethod) emojiOption() { | ||||
| } | ||||
|  | ||||
| // SetConfig implements renderer.Option#SetConfig. | ||||
| func (o *withRenderingMethod) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optRenderingMethod] = o.value | ||||
| } | ||||
|  | ||||
| // SetEmojiOption implements RendererOption#SetEmojiOption | ||||
| func (o *withRenderingMethod) SetEmojiOption(c *RendererConfig) { | ||||
| 	c.Method = o.value | ||||
| } | ||||
|  | ||||
| const optRenderingMethod renderer.OptionName = "EmojiRenderingMethod" | ||||
|  | ||||
| // WithRenderingMethod is a functional option that indicates how emojis are rendered. | ||||
| func WithRenderingMethod(a RenderingMethod) Option { | ||||
| 	return &withRenderingMethod{a} | ||||
| } | ||||
|  | ||||
| type withTwemojiTemplate struct { | ||||
| 	value string | ||||
| } | ||||
|  | ||||
| func (o *withTwemojiTemplate) emojiOption() { | ||||
| } | ||||
|  | ||||
| // SetConfig implements renderer.Option#SetConfig. | ||||
| func (o *withTwemojiTemplate) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optTwemojiTemplate] = o.value | ||||
| } | ||||
|  | ||||
| // SetEmojiOption implements RendererOption#SetEmojiOption | ||||
| func (o *withTwemojiTemplate) SetEmojiOption(c *RendererConfig) { | ||||
| 	c.TwemojiTemplate = o.value | ||||
| } | ||||
|  | ||||
| const optTwemojiTemplate renderer.OptionName = "EmojiTwemojiTemplate" | ||||
|  | ||||
| // WithTwemojiTemplate is a functional option that changes a twemoji img tag. | ||||
| func WithTwemojiTemplate(s string) Option { | ||||
| 	return &withTwemojiTemplate{s} | ||||
| } | ||||
|  | ||||
| var _ RendererOption = &withRendererFunc{} | ||||
|  | ||||
| type withRendererFunc struct { | ||||
| 	value RendererFunc | ||||
| } | ||||
|  | ||||
| func (o *withRendererFunc) emojiOption() { | ||||
| } | ||||
|  | ||||
| // SetConfig implements renderer.Option#SetConfig. | ||||
| func (o *withRendererFunc) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optRendererFunc] = o.value | ||||
| } | ||||
|  | ||||
| // SetEmojiOption implements RendererOption#SetEmojiOption | ||||
| func (o *withRendererFunc) SetEmojiOption(c *RendererConfig) { | ||||
| 	c.RendererFunc = o.value | ||||
| } | ||||
|  | ||||
| const optRendererFunc renderer.OptionName = "EmojiRendererFunc" | ||||
|  | ||||
| // WithRendererFunc is a functional option that changes a renderer func. | ||||
| func WithRendererFunc(f RendererFunc) Option { | ||||
| 	return &withRendererFunc{f} | ||||
| } | ||||
|  | ||||
| type emojiParser struct { | ||||
| 	ParserConfig | ||||
| } | ||||
|  | ||||
| // NewParser returns a new parser.InlineParser that can parse emoji expressions. | ||||
| func NewParser(opts ...ParserOption) parser.InlineParser { | ||||
| 	p := &emojiParser{ | ||||
| 		ParserConfig: ParserConfig{ | ||||
| 			Emojis: definition.Github(), | ||||
| 		}, | ||||
| 	} | ||||
| 	for _, o := range opts { | ||||
| 		o.SetEmojiOption(&p.ParserConfig) | ||||
| 	} | ||||
| 	return p | ||||
| } | ||||
|  | ||||
| func (s *emojiParser) Trigger() []byte { | ||||
| 	return []byte{':'} | ||||
| } | ||||
|  | ||||
| func (s *emojiParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node { | ||||
| 	line, _ := block.PeekLine() | ||||
| 	if len(line) < 1 { | ||||
| 		return nil | ||||
| 	} | ||||
| 	i := 1 | ||||
| 	for ; i < len(line); i++ { | ||||
| 		c := line[i] | ||||
| 		if !(util.IsAlphaNumeric(c) || c == '_' || c == '-' || c == '+') { | ||||
| 			break | ||||
| 		} | ||||
| 	} | ||||
| 	if i >= len(line) || line[i] != ':' { | ||||
| 		return nil | ||||
| 	} | ||||
| 	block.Advance(i + 1) | ||||
| 	shortName := line[1:i] | ||||
| 	emoji, ok := s.Emojis.Get(util.BytesToReadOnlyString(shortName)) | ||||
| 	if !ok { | ||||
| 		return nil | ||||
| 	} | ||||
| 	return east.NewEmoji(shortName, emoji) | ||||
| } | ||||
|  | ||||
| type emojiHTMLRenderer struct { | ||||
| 	RendererConfig | ||||
| } | ||||
|  | ||||
| // NewHTMLRenderer returns a new HTMLRenderer. | ||||
| func NewHTMLRenderer(opts ...RendererOption) renderer.NodeRenderer { | ||||
| 	r := &emojiHTMLRenderer{ | ||||
| 		RendererConfig: RendererConfig{ | ||||
| 			Config:          html.NewConfig(), | ||||
| 			Method:          Entity, | ||||
| 			TwemojiTemplate: DefaultTwemojiTemplate, | ||||
| 			RendererFunc:    nil, | ||||
| 		}, | ||||
| 	} | ||||
| 	for _, opt := range opts { | ||||
| 		opt.SetEmojiOption(&r.RendererConfig) | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| // RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs. | ||||
| func (r *emojiHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) { | ||||
| 	reg.Register(east.KindEmoji, r.renderEmoji) | ||||
| } | ||||
|  | ||||
| const slash = " /" | ||||
| const empty = "" | ||||
|  | ||||
| func (r *emojiHTMLRenderer) renderEmoji(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) { | ||||
| 	if !entering { | ||||
| 		return ast.WalkContinue, nil | ||||
| 	} | ||||
| 	node := n.(*east.Emoji) | ||||
| 	if !node.Value.IsUnicode() && r.Method != Func { | ||||
| 		fmt.Fprintf(w, `<span title="%s">:%s:</span>`, util.EscapeHTML(util.StringToReadOnlyBytes(node.Value.Name)), node.ShortName) | ||||
| 		return ast.WalkContinue, nil | ||||
| 	} | ||||
|  | ||||
| 	switch r.Method { | ||||
| 	case Entity: | ||||
| 		for _, r := range node.Value.Unicode { | ||||
| 			if r == 0x200D { | ||||
| 				_, _ = w.WriteString("‍") | ||||
| 				continue | ||||
| 			} | ||||
| 			fmt.Fprintf(w, "&#x%x;", r) | ||||
| 		} | ||||
| 	case Unicode: | ||||
| 		fmt.Fprintf(w, "%s", string(node.Value.Unicode)) | ||||
| 	case Twemoji: | ||||
| 		s := slash | ||||
| 		if !r.XHTML { | ||||
| 			s = empty | ||||
| 		} | ||||
| 		values := []string{} | ||||
| 		for _, r := range node.Value.Unicode { | ||||
| 			values = append(values, fmt.Sprintf("%x", r)) | ||||
| 		} | ||||
| 		fmt.Fprintf(w, r.TwemojiTemplate, util.EscapeHTML(util.StringToReadOnlyBytes(node.Value.Name)), strings.Join(values, "-"), s) | ||||
| 	case Func: | ||||
| 		r.RendererFunc(w, source, node, &r.RendererConfig) | ||||
| 	} | ||||
| 	return ast.WalkContinue, nil | ||||
| } | ||||
|  | ||||
| type emoji struct { | ||||
| 	options []Option | ||||
| } | ||||
|  | ||||
| // Emoji is a goldmark.Extender implementation. | ||||
| var Emoji = &emoji{ | ||||
| 	options: []Option{}, | ||||
| } | ||||
|  | ||||
| // New returns a new extension with given options. | ||||
| func New(opts ...Option) goldmark.Extender { | ||||
| 	return &emoji{ | ||||
| 		options: opts, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Extend implements goldmark.Extender. | ||||
| func (e *emoji) Extend(m goldmark.Markdown) { | ||||
| 	pOpts := []ParserOption{} | ||||
| 	rOpts := []RendererOption{} | ||||
| 	for _, o := range e.options { | ||||
| 		if po, ok := o.(ParserOption); ok { | ||||
| 			pOpts = append(pOpts, po) | ||||
| 			continue | ||||
| 		} | ||||
| 		if ro, ok := o.(RendererOption); ok { | ||||
| 			rOpts = append(rOpts, ro) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	m.Renderer().AddOptions(renderer.WithNodeRenderers( | ||||
| 		util.Prioritized(NewHTMLRenderer(rOpts...), 200), | ||||
| 	)) | ||||
|  | ||||
| 	m.Parser().AddOptions(parser.WithInlineParsers( | ||||
| 		util.Prioritized(NewParser(pOpts...), 999), | ||||
| 	)) | ||||
|  | ||||
| } | ||||
							
								
								
									
										5
									
								
								vendor/github.com/yuin/goldmark-emoji/go.mod
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								vendor/github.com/yuin/goldmark-emoji/go.mod
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| module github.com/yuin/goldmark-emoji | ||||
|  | ||||
| go 1.15 | ||||
|  | ||||
| require github.com/yuin/goldmark v1.2.1 | ||||
							
								
								
									
										2
									
								
								vendor/github.com/yuin/goldmark-emoji/go.sum
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/yuin/goldmark-emoji/go.sum
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM= | ||||
| github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | ||||
							
								
								
									
										88
									
								
								vendor/github.com/yuin/goldmark/README.md
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										88
									
								
								vendor/github.com/yuin/goldmark/README.md
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -1,7 +1,7 @@ | ||||
| goldmark | ||||
| ========================================== | ||||
|  | ||||
| [](http://godoc.org/github.com/yuin/goldmark) | ||||
| [](https://pkg.go.dev/github.com/yuin/goldmark) | ||||
| [](https://github.com/yuin/goldmark/actions?query=workflow:test) | ||||
| [](https://coveralls.io/github/yuin/goldmark) | ||||
| [](https://goreportcard.com/report/github.com/yuin/goldmark) | ||||
| @@ -173,6 +173,7 @@ Parser and Renderer options | ||||
|     - This extension enables Table, Strikethrough, Linkify and TaskList. | ||||
|     - This extension does not filter tags defined in [6.11: Disallowed Raw HTML (extension)](https://github.github.com/gfm/#disallowed-raw-html-extension-). | ||||
|     If you need to filter HTML tags, see [Security](#security). | ||||
|     - If you need to parse github emojis, you can use [goldmark-emoji](https://github.com/yuin/goldmark-emoji) extension. | ||||
| - `extension.DefinitionList` | ||||
|     - [PHP Markdown Extra: Definition lists](https://michelf.ca/projects/php-markdown/extra/#def-list) | ||||
| - `extension.Footnote` | ||||
| @@ -286,6 +287,89 @@ markdown := goldmark.New( | ||||
| ) | ||||
| ``` | ||||
|  | ||||
| ### Footnotes extension | ||||
|  | ||||
| The Footnote extension implements [PHP Markdown Extra: Footnotes](https://michelf.ca/projects/php-markdown/extra/#footnotes). | ||||
|  | ||||
| This extension has some options: | ||||
|  | ||||
| | Functional option | Type | Description | | ||||
| | ----------------- | ---- | ----------- | | ||||
| | `extension.WithFootnoteIDPrefix` | `[]byte` |  a prefix for the id attributes.| | ||||
| | `extension.WithFootnoteIDPrefixFunction` | `func(gast.Node) []byte` |  a function that determines the id attribute for given Node.| | ||||
| | `extension.WithFootnoteLinkTitle` | `[]byte` |  an optional title attribute for footnote links.| | ||||
| | `extension.WithFootnoteBacklinkTitle` | `[]byte` |  an optional title attribute for footnote backlinks. | | ||||
| | `extension.WithFootnoteLinkClass` | `[]byte` |  a class for footnote links. This defaults to `footnote-ref`. | | ||||
| | `extension.WithFootnoteBacklinkClass` | `[]byte` |  a class for footnote backlinks. This defaults to `footnote-backref`. | | ||||
| | `extension.WithFootnoteBacklinkHTML` | `[]byte` |  a class for footnote backlinks. This defaults to `↩︎`. | | ||||
|  | ||||
| Some options can have special substitutions. Occurances of “^^” in the string will be replaced by the corresponding footnote number in the HTML output. Occurances of “%%” will be replaced by a number for the reference (footnotes can have multiple references). | ||||
|  | ||||
| `extension.WithFootnoteIDPrefix` and `extension.WithFootnoteIDPrefixFunction` are useful if you have multiple Markdown documents displayed inside one HTML document to avoid footnote ids to clash each other. | ||||
|  | ||||
| `extension.WithFootnoteIDPrefix` sets fixed id prefix, so you may write codes like the following: | ||||
|  | ||||
| ```go | ||||
| for _, path := range files { | ||||
|     source := readAll(path) | ||||
|     prefix := getPrefix(path) | ||||
|  | ||||
|     markdown := goldmark.New( | ||||
|         goldmark.WithExtensions( | ||||
|             NewFootnote( | ||||
|                 WithFootnoteIDPrefix([]byte(path)), | ||||
|             ), | ||||
|         ), | ||||
|     ) | ||||
|     var b bytes.Buffer | ||||
|     err := markdown.Convert(source, &b) | ||||
|     if err != nil { | ||||
|         t.Error(err.Error()) | ||||
|     } | ||||
| } | ||||
| ``` | ||||
|  | ||||
| `extension.WithFootnoteIDPrefixFunction` determines an id prefix by calling given function, so you may write codes like the following: | ||||
|  | ||||
| ```go | ||||
| markdown := goldmark.New( | ||||
|     goldmark.WithExtensions( | ||||
|         NewFootnote( | ||||
|                 WithFootnoteIDPrefixFunction(func(n gast.Node) []byte { | ||||
|                     v, ok := n.OwnerDocument().Meta()["footnote-prefix"] | ||||
|                     if ok { | ||||
|                         return util.StringToReadOnlyBytes(v.(string)) | ||||
|                     } | ||||
|                     return nil | ||||
|                 }), | ||||
|         ), | ||||
|     ), | ||||
| ) | ||||
|  | ||||
| for _, path := range files { | ||||
|     source := readAll(path) | ||||
|     var b bytes.Buffer | ||||
|  | ||||
|     doc := markdown.Parser().Parse(text.NewReader(source)) | ||||
|     doc.Meta()["footnote-prefix"] = getPrefix(path) | ||||
|     err := markdown.Renderer().Render(&b, source, doc) | ||||
| } | ||||
| ``` | ||||
|  | ||||
| You can use [goldmark-meta](https://github.com/yuin/goldmark-meta) to define a id prefix in the markdown document: | ||||
|  | ||||
|  | ||||
| ```markdown | ||||
| --- | ||||
| title: document title | ||||
| slug: article1 | ||||
| footnote-prefix: article1 | ||||
| --- | ||||
|  | ||||
| # My article | ||||
|  | ||||
| ``` | ||||
|   | ||||
| Security | ||||
| -------------------- | ||||
| By default, goldmark does not render raw HTML or potentially-dangerous URLs. | ||||
| @@ -336,6 +420,8 @@ Extensions | ||||
|   extension for the goldmark Markdown parser. | ||||
| - [goldmark-highlighting](https://github.com/yuin/goldmark-highlighting): A syntax-highlighting extension | ||||
|   for the goldmark markdown parser. | ||||
| - [goldmark-emoji](https://github.com/yuin/goldmark-emoji): An emoji | ||||
|   extension for the goldmark Markdown parser. | ||||
| - [goldmark-mathjax](https://github.com/litao91/goldmark-mathjax): Mathjax support for the goldmark markdown parser | ||||
|  | ||||
| goldmark internal(for extension developers) | ||||
|   | ||||
							
								
								
									
										28
									
								
								vendor/github.com/yuin/goldmark/ast/ast.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										28
									
								
								vendor/github.com/yuin/goldmark/ast/ast.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -45,11 +45,6 @@ type Attribute struct { | ||||
| 	Value interface{} | ||||
| } | ||||
|  | ||||
| var attrNameIDS = []byte("#") | ||||
| var attrNameID = []byte("id") | ||||
| var attrNameClassS = []byte(".") | ||||
| var attrNameClass = []byte("class") | ||||
|  | ||||
| // A Node interface defines basic AST node functionalities. | ||||
| type Node interface { | ||||
| 	// Type returns a type of this node. | ||||
| @@ -116,6 +111,11 @@ type Node interface { | ||||
| 	// tail of the children. | ||||
| 	InsertAfter(self, v1, insertee Node) | ||||
|  | ||||
| 	// OwnerDocument returns this node's owner document. | ||||
| 	// If this node is not a child of the Document node, OwnerDocument | ||||
| 	// returns nil. | ||||
| 	OwnerDocument() *Document | ||||
|  | ||||
| 	// Dump dumps an AST tree structure to stdout. | ||||
| 	// This function completely aimed for debugging. | ||||
| 	// level is a indent level. Implementer should indent informations with | ||||
| @@ -169,7 +169,7 @@ type Node interface { | ||||
| 	RemoveAttributes() | ||||
| } | ||||
|  | ||||
| // A BaseNode struct implements the Node interface. | ||||
| // A BaseNode struct implements the Node interface partialliy. | ||||
| type BaseNode struct { | ||||
| 	firstChild Node | ||||
| 	lastChild  Node | ||||
| @@ -358,6 +358,22 @@ func (n *BaseNode) InsertBefore(self, v1, insertee Node) { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // OwnerDocument implements Node.OwnerDocument | ||||
| func (n *BaseNode) OwnerDocument() *Document { | ||||
| 	d := n.Parent() | ||||
| 	for { | ||||
| 		p := d.Parent() | ||||
| 		if p == nil { | ||||
| 			if v, ok := d.(*Document); ok { | ||||
| 				return v | ||||
| 			} | ||||
| 			break | ||||
| 		} | ||||
| 		d = p | ||||
| 	} | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // Text implements Node.Text  . | ||||
| func (n *BaseNode) Text(source []byte) []byte { | ||||
| 	var buf bytes.Buffer | ||||
|   | ||||
							
								
								
									
										23
									
								
								vendor/github.com/yuin/goldmark/ast/block.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								vendor/github.com/yuin/goldmark/ast/block.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -7,7 +7,7 @@ import ( | ||||
| 	textm "github.com/yuin/goldmark/text" | ||||
| ) | ||||
|  | ||||
| // A BaseBlock struct implements the Node interface. | ||||
| // A BaseBlock struct implements the Node interface partialliy. | ||||
| type BaseBlock struct { | ||||
| 	BaseNode | ||||
| 	blankPreviousLines bool | ||||
| @@ -50,6 +50,8 @@ func (b *BaseBlock) SetLines(v *textm.Segments) { | ||||
| // A Document struct is a root node of Markdown text. | ||||
| type Document struct { | ||||
| 	BaseBlock | ||||
|  | ||||
| 	meta map[string]interface{} | ||||
| } | ||||
|  | ||||
| // KindDocument is a NodeKind of the Document node. | ||||
| @@ -70,10 +72,29 @@ func (n *Document) Kind() NodeKind { | ||||
| 	return KindDocument | ||||
| } | ||||
|  | ||||
| // OwnerDocument implements Node.OwnerDocument | ||||
| func (n *Document) OwnerDocument() *Document { | ||||
| 	return n | ||||
| } | ||||
|  | ||||
| // Meta returns metadata of this document. | ||||
| func (n *Document) Meta() map[string]interface{} { | ||||
| 	if n.meta == nil { | ||||
| 		n.meta = map[string]interface{}{} | ||||
| 	} | ||||
| 	return n.meta | ||||
| } | ||||
|  | ||||
| // SetMeta sets given metadata to this document. | ||||
| func (n *Document) SetMeta(meta map[string]interface{}) { | ||||
| 	n.meta = meta | ||||
| } | ||||
|  | ||||
| // NewDocument returns a new Document node. | ||||
| func NewDocument() *Document { | ||||
| 	return &Document{ | ||||
| 		BaseBlock: BaseBlock{}, | ||||
| 		meta:      nil, | ||||
| 	} | ||||
| } | ||||
|  | ||||
|   | ||||
							
								
								
									
										2
									
								
								vendor/github.com/yuin/goldmark/ast/inline.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/yuin/goldmark/ast/inline.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -8,7 +8,7 @@ import ( | ||||
| 	"github.com/yuin/goldmark/util" | ||||
| ) | ||||
|  | ||||
| // A BaseInline struct implements the Node interface. | ||||
| // A BaseInline struct implements the Node interface partialliy. | ||||
| type BaseInline struct { | ||||
| 	BaseNode | ||||
| } | ||||
|   | ||||
							
								
								
									
										27
									
								
								vendor/github.com/yuin/goldmark/extension/ast/footnote.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								vendor/github.com/yuin/goldmark/extension/ast/footnote.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -2,6 +2,7 @@ package ast | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
|  | ||||
| 	gast "github.com/yuin/goldmark/ast" | ||||
| ) | ||||
|  | ||||
| @@ -10,12 +11,14 @@ import ( | ||||
| type FootnoteLink struct { | ||||
| 	gast.BaseInline | ||||
| 	Index    int | ||||
| 	RefCount int | ||||
| } | ||||
|  | ||||
| // Dump implements Node.Dump. | ||||
| func (n *FootnoteLink) Dump(source []byte, level int) { | ||||
| 	m := map[string]string{} | ||||
| 	m["Index"] = fmt.Sprintf("%v", n.Index) | ||||
| 	m["RefCount"] = fmt.Sprintf("%v", n.RefCount) | ||||
| 	gast.DumpHelper(n, source, level, m, nil) | ||||
| } | ||||
|  | ||||
| @@ -31,35 +34,39 @@ func (n *FootnoteLink) Kind() gast.NodeKind { | ||||
| func NewFootnoteLink(index int) *FootnoteLink { | ||||
| 	return &FootnoteLink{ | ||||
| 		Index:    index, | ||||
| 		RefCount: 0, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // A FootnoteBackLink struct represents a link to a footnote of Markdown | ||||
| // A FootnoteBacklink struct represents a link to a footnote of Markdown | ||||
| // (PHP Markdown Extra) text. | ||||
| type FootnoteBackLink struct { | ||||
| type FootnoteBacklink struct { | ||||
| 	gast.BaseInline | ||||
| 	Index    int | ||||
| 	RefCount int | ||||
| } | ||||
|  | ||||
| // Dump implements Node.Dump. | ||||
| func (n *FootnoteBackLink) Dump(source []byte, level int) { | ||||
| func (n *FootnoteBacklink) Dump(source []byte, level int) { | ||||
| 	m := map[string]string{} | ||||
| 	m["Index"] = fmt.Sprintf("%v", n.Index) | ||||
| 	m["RefCount"] = fmt.Sprintf("%v", n.RefCount) | ||||
| 	gast.DumpHelper(n, source, level, m, nil) | ||||
| } | ||||
|  | ||||
| // KindFootnoteBackLink is a NodeKind of the FootnoteBackLink node. | ||||
| var KindFootnoteBackLink = gast.NewNodeKind("FootnoteBackLink") | ||||
| // KindFootnoteBacklink is a NodeKind of the FootnoteBacklink node. | ||||
| var KindFootnoteBacklink = gast.NewNodeKind("FootnoteBacklink") | ||||
|  | ||||
| // Kind implements Node.Kind. | ||||
| func (n *FootnoteBackLink) Kind() gast.NodeKind { | ||||
| 	return KindFootnoteBackLink | ||||
| func (n *FootnoteBacklink) Kind() gast.NodeKind { | ||||
| 	return KindFootnoteBacklink | ||||
| } | ||||
|  | ||||
| // NewFootnoteBackLink returns a new FootnoteBackLink node. | ||||
| func NewFootnoteBackLink(index int) *FootnoteBackLink { | ||||
| 	return &FootnoteBackLink{ | ||||
| // NewFootnoteBacklink returns a new FootnoteBacklink node. | ||||
| func NewFootnoteBacklink(index int) *FootnoteBacklink { | ||||
| 	return &FootnoteBacklink{ | ||||
| 		Index:    index, | ||||
| 		RefCount: 0, | ||||
| 	} | ||||
| } | ||||
|  | ||||
|   | ||||
							
								
								
									
										382
									
								
								vendor/github.com/yuin/goldmark/extension/footnote.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										382
									
								
								vendor/github.com/yuin/goldmark/extension/footnote.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -2,6 +2,8 @@ package extension | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"strconv" | ||||
|  | ||||
| 	"github.com/yuin/goldmark" | ||||
| 	gast "github.com/yuin/goldmark/ast" | ||||
| 	"github.com/yuin/goldmark/extension/ast" | ||||
| @@ -10,10 +12,10 @@ import ( | ||||
| 	"github.com/yuin/goldmark/renderer/html" | ||||
| 	"github.com/yuin/goldmark/text" | ||||
| 	"github.com/yuin/goldmark/util" | ||||
| 	"strconv" | ||||
| ) | ||||
|  | ||||
| var footnoteListKey = parser.NewContextKey() | ||||
| var footnoteLinkListKey = parser.NewContextKey() | ||||
|  | ||||
| type footnoteBlockParser struct { | ||||
| } | ||||
| @@ -164,7 +166,20 @@ func (s *footnoteParser) Parse(parent gast.Node, block text.Reader, pc parser.Co | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	return ast.NewFootnoteLink(index) | ||||
| 	fnlink := ast.NewFootnoteLink(index) | ||||
| 	var fnlist []*ast.FootnoteLink | ||||
| 	if tmp := pc.Get(footnoteLinkListKey); tmp != nil { | ||||
| 		fnlist = tmp.([]*ast.FootnoteLink) | ||||
| 	} else { | ||||
| 		fnlist = []*ast.FootnoteLink{} | ||||
| 		pc.Set(footnoteLinkListKey, fnlist) | ||||
| 	} | ||||
| 	pc.Set(footnoteLinkListKey, append(fnlist, fnlink)) | ||||
| 	if line[0] == '!' { | ||||
| 		parent.AppendChild(parent, gast.NewTextSegment(text.NewSegment(segment.Start, segment.Start+1))) | ||||
| 	} | ||||
|  | ||||
| 	return fnlink | ||||
| } | ||||
|  | ||||
| type footnoteASTTransformer struct { | ||||
| @@ -180,23 +195,46 @@ func NewFootnoteASTTransformer() parser.ASTTransformer { | ||||
|  | ||||
| func (a *footnoteASTTransformer) Transform(node *gast.Document, reader text.Reader, pc parser.Context) { | ||||
| 	var list *ast.FootnoteList | ||||
| 	if tlist := pc.Get(footnoteListKey); tlist != nil { | ||||
| 		list = tlist.(*ast.FootnoteList) | ||||
| 	} else { | ||||
| 	var fnlist []*ast.FootnoteLink | ||||
| 	if tmp := pc.Get(footnoteListKey); tmp != nil { | ||||
| 		list = tmp.(*ast.FootnoteList) | ||||
| 	} | ||||
| 	if tmp := pc.Get(footnoteLinkListKey); tmp != nil { | ||||
| 		fnlist = tmp.([]*ast.FootnoteLink) | ||||
| 	} | ||||
|  | ||||
| 	pc.Set(footnoteListKey, nil) | ||||
| 	pc.Set(footnoteLinkListKey, nil) | ||||
|  | ||||
| 	if list == nil { | ||||
| 		return | ||||
| 	} | ||||
| 	pc.Set(footnoteListKey, nil) | ||||
|  | ||||
| 	counter := map[int]int{} | ||||
| 	if fnlist != nil { | ||||
| 		for _, fnlink := range fnlist { | ||||
| 			if fnlink.Index >= 0 { | ||||
| 				counter[fnlink.Index]++ | ||||
| 			} | ||||
| 		} | ||||
| 		for _, fnlink := range fnlist { | ||||
| 			fnlink.RefCount = counter[fnlink.Index] | ||||
| 		} | ||||
| 	} | ||||
| 	for footnote := list.FirstChild(); footnote != nil; { | ||||
| 		var container gast.Node = footnote | ||||
| 		next := footnote.NextSibling() | ||||
| 		if fc := container.LastChild(); fc != nil && gast.IsParagraph(fc) { | ||||
| 			container = fc | ||||
| 		} | ||||
| 		index := footnote.(*ast.Footnote).Index | ||||
| 		fn := footnote.(*ast.Footnote) | ||||
| 		index := fn.Index | ||||
| 		if index < 0 { | ||||
| 			list.RemoveChild(list, footnote) | ||||
| 		} else { | ||||
| 			container.AppendChild(container, ast.NewFootnoteBackLink(index)) | ||||
| 			backLink := ast.NewFootnoteBacklink(index) | ||||
| 			backLink.RefCount = counter[index] | ||||
| 			container.AppendChild(container, backLink) | ||||
| 		} | ||||
| 		footnote = next | ||||
| 	} | ||||
| @@ -214,19 +252,250 @@ func (a *footnoteASTTransformer) Transform(node *gast.Document, reader text.Read | ||||
| 	node.AppendChild(node, list) | ||||
| } | ||||
|  | ||||
| // FootnoteConfig holds configuration values for the footnote extension. | ||||
| // | ||||
| // Link* and Backlink* configurations have some variables: | ||||
| // Occurrances of “^^” in the string will be replaced by the | ||||
| // corresponding footnote number in the HTML output. | ||||
| // Occurrances of “%%” will be replaced by a number for the | ||||
| // reference (footnotes can have multiple references). | ||||
| type FootnoteConfig struct { | ||||
| 	html.Config | ||||
|  | ||||
| 	// IDPrefix is a prefix for the id attributes generated by footnotes. | ||||
| 	IDPrefix []byte | ||||
|  | ||||
| 	// IDPrefix is a function that determines the id attribute for given Node. | ||||
| 	IDPrefixFunction func(gast.Node) []byte | ||||
|  | ||||
| 	// LinkTitle is an optional title attribute for footnote links. | ||||
| 	LinkTitle []byte | ||||
|  | ||||
| 	// BacklinkTitle is an optional title attribute for footnote backlinks. | ||||
| 	BacklinkTitle []byte | ||||
|  | ||||
| 	// LinkClass is a class for footnote links. | ||||
| 	LinkClass []byte | ||||
|  | ||||
| 	// BacklinkClass is a class for footnote backlinks. | ||||
| 	BacklinkClass []byte | ||||
|  | ||||
| 	// BacklinkHTML is an HTML content for footnote backlinks. | ||||
| 	BacklinkHTML []byte | ||||
| } | ||||
|  | ||||
| // FootnoteOption interface is a functional option interface for the extension. | ||||
| type FootnoteOption interface { | ||||
| 	renderer.Option | ||||
| 	// SetFootnoteOption sets given option to the extension. | ||||
| 	SetFootnoteOption(*FootnoteConfig) | ||||
| } | ||||
|  | ||||
| // NewFootnoteConfig returns a new Config with defaults. | ||||
| func NewFootnoteConfig() FootnoteConfig { | ||||
| 	return FootnoteConfig{ | ||||
| 		Config:        html.NewConfig(), | ||||
| 		LinkTitle:     []byte(""), | ||||
| 		BacklinkTitle: []byte(""), | ||||
| 		LinkClass:     []byte("footnote-ref"), | ||||
| 		BacklinkClass: []byte("footnote-backref"), | ||||
| 		BacklinkHTML:  []byte("↩︎"), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // SetOption implements renderer.SetOptioner. | ||||
| func (c *FootnoteConfig) SetOption(name renderer.OptionName, value interface{}) { | ||||
| 	switch name { | ||||
| 	case optFootnoteIDPrefixFunction: | ||||
| 		c.IDPrefixFunction = value.(func(gast.Node) []byte) | ||||
| 	case optFootnoteIDPrefix: | ||||
| 		c.IDPrefix = value.([]byte) | ||||
| 	case optFootnoteLinkTitle: | ||||
| 		c.LinkTitle = value.([]byte) | ||||
| 	case optFootnoteBacklinkTitle: | ||||
| 		c.BacklinkTitle = value.([]byte) | ||||
| 	case optFootnoteLinkClass: | ||||
| 		c.LinkClass = value.([]byte) | ||||
| 	case optFootnoteBacklinkClass: | ||||
| 		c.BacklinkClass = value.([]byte) | ||||
| 	case optFootnoteBacklinkHTML: | ||||
| 		c.BacklinkHTML = value.([]byte) | ||||
| 	default: | ||||
| 		c.Config.SetOption(name, value) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| type withFootnoteHTMLOptions struct { | ||||
| 	value []html.Option | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteHTMLOptions) SetConfig(c *renderer.Config) { | ||||
| 	if o.value != nil { | ||||
| 		for _, v := range o.value { | ||||
| 			v.(renderer.Option).SetConfig(c) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteHTMLOptions) SetFootnoteOption(c *FootnoteConfig) { | ||||
| 	if o.value != nil { | ||||
| 		for _, v := range o.value { | ||||
| 			v.SetHTMLOption(&c.Config) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // WithFootnoteHTMLOptions is functional option that wraps goldmark HTMLRenderer options. | ||||
| func WithFootnoteHTMLOptions(opts ...html.Option) FootnoteOption { | ||||
| 	return &withFootnoteHTMLOptions{opts} | ||||
| } | ||||
|  | ||||
| const optFootnoteIDPrefix renderer.OptionName = "FootnoteIDPrefix" | ||||
|  | ||||
| type withFootnoteIDPrefix struct { | ||||
| 	value []byte | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteIDPrefix) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optFootnoteIDPrefix] = o.value | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteIDPrefix) SetFootnoteOption(c *FootnoteConfig) { | ||||
| 	c.IDPrefix = o.value | ||||
| } | ||||
|  | ||||
| // WithFootnoteIDPrefix is a functional option that is a prefix for the id attributes generated by footnotes. | ||||
| func WithFootnoteIDPrefix(a []byte) FootnoteOption { | ||||
| 	return &withFootnoteIDPrefix{a} | ||||
| } | ||||
|  | ||||
| const optFootnoteIDPrefixFunction renderer.OptionName = "FootnoteIDPrefixFunction" | ||||
|  | ||||
| type withFootnoteIDPrefixFunction struct { | ||||
| 	value func(gast.Node) []byte | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteIDPrefixFunction) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optFootnoteIDPrefixFunction] = o.value | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteIDPrefixFunction) SetFootnoteOption(c *FootnoteConfig) { | ||||
| 	c.IDPrefixFunction = o.value | ||||
| } | ||||
|  | ||||
| // WithFootnoteIDPrefixFunction is a functional option that is a prefix for the id attributes generated by footnotes. | ||||
| func WithFootnoteIDPrefixFunction(a func(gast.Node) []byte) FootnoteOption { | ||||
| 	return &withFootnoteIDPrefixFunction{a} | ||||
| } | ||||
|  | ||||
| const optFootnoteLinkTitle renderer.OptionName = "FootnoteLinkTitle" | ||||
|  | ||||
| type withFootnoteLinkTitle struct { | ||||
| 	value []byte | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteLinkTitle) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optFootnoteLinkTitle] = o.value | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteLinkTitle) SetFootnoteOption(c *FootnoteConfig) { | ||||
| 	c.LinkTitle = o.value | ||||
| } | ||||
|  | ||||
| // WithFootnoteLinkTitle is a functional option that is an optional title attribute for footnote links. | ||||
| func WithFootnoteLinkTitle(a []byte) FootnoteOption { | ||||
| 	return &withFootnoteLinkTitle{a} | ||||
| } | ||||
|  | ||||
| const optFootnoteBacklinkTitle renderer.OptionName = "FootnoteBacklinkTitle" | ||||
|  | ||||
| type withFootnoteBacklinkTitle struct { | ||||
| 	value []byte | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteBacklinkTitle) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optFootnoteBacklinkTitle] = o.value | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteBacklinkTitle) SetFootnoteOption(c *FootnoteConfig) { | ||||
| 	c.BacklinkTitle = o.value | ||||
| } | ||||
|  | ||||
| // WithFootnoteBacklinkTitle is a functional option that is an optional title attribute for footnote backlinks. | ||||
| func WithFootnoteBacklinkTitle(a []byte) FootnoteOption { | ||||
| 	return &withFootnoteBacklinkTitle{a} | ||||
| } | ||||
|  | ||||
| const optFootnoteLinkClass renderer.OptionName = "FootnoteLinkClass" | ||||
|  | ||||
| type withFootnoteLinkClass struct { | ||||
| 	value []byte | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteLinkClass) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optFootnoteLinkClass] = o.value | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteLinkClass) SetFootnoteOption(c *FootnoteConfig) { | ||||
| 	c.LinkClass = o.value | ||||
| } | ||||
|  | ||||
| // WithFootnoteLinkClass is a functional option that is a class for footnote links. | ||||
| func WithFootnoteLinkClass(a []byte) FootnoteOption { | ||||
| 	return &withFootnoteLinkClass{a} | ||||
| } | ||||
|  | ||||
| const optFootnoteBacklinkClass renderer.OptionName = "FootnoteBacklinkClass" | ||||
|  | ||||
| type withFootnoteBacklinkClass struct { | ||||
| 	value []byte | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteBacklinkClass) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optFootnoteBacklinkClass] = o.value | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteBacklinkClass) SetFootnoteOption(c *FootnoteConfig) { | ||||
| 	c.BacklinkClass = o.value | ||||
| } | ||||
|  | ||||
| // WithFootnoteBacklinkClass is a functional option that is a class for footnote backlinks. | ||||
| func WithFootnoteBacklinkClass(a []byte) FootnoteOption { | ||||
| 	return &withFootnoteBacklinkClass{a} | ||||
| } | ||||
|  | ||||
| const optFootnoteBacklinkHTML renderer.OptionName = "FootnoteBacklinkHTML" | ||||
|  | ||||
| type withFootnoteBacklinkHTML struct { | ||||
| 	value []byte | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteBacklinkHTML) SetConfig(c *renderer.Config) { | ||||
| 	c.Options[optFootnoteBacklinkHTML] = o.value | ||||
| } | ||||
|  | ||||
| func (o *withFootnoteBacklinkHTML) SetFootnoteOption(c *FootnoteConfig) { | ||||
| 	c.BacklinkHTML = o.value | ||||
| } | ||||
|  | ||||
| // WithFootnoteBacklinkHTML is an HTML content for footnote backlinks. | ||||
| func WithFootnoteBacklinkHTML(a []byte) FootnoteOption { | ||||
| 	return &withFootnoteBacklinkHTML{a} | ||||
| } | ||||
|  | ||||
| // FootnoteHTMLRenderer is a renderer.NodeRenderer implementation that | ||||
| // renders FootnoteLink nodes. | ||||
| type FootnoteHTMLRenderer struct { | ||||
| 	html.Config | ||||
| 	FootnoteConfig | ||||
| } | ||||
|  | ||||
| // NewFootnoteHTMLRenderer returns a new FootnoteHTMLRenderer. | ||||
| func NewFootnoteHTMLRenderer(opts ...html.Option) renderer.NodeRenderer { | ||||
| func NewFootnoteHTMLRenderer(opts ...FootnoteOption) renderer.NodeRenderer { | ||||
| 	r := &FootnoteHTMLRenderer{ | ||||
| 		Config: html.NewConfig(), | ||||
| 		FootnoteConfig: NewFootnoteConfig(), | ||||
| 	} | ||||
| 	for _, opt := range opts { | ||||
| 		opt.SetHTMLOption(&r.Config) | ||||
| 		opt.SetFootnoteOption(&r.FootnoteConfig) | ||||
| 	} | ||||
| 	return r | ||||
| } | ||||
| @@ -234,7 +503,7 @@ func NewFootnoteHTMLRenderer(opts ...html.Option) renderer.NodeRenderer { | ||||
| // RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs. | ||||
| func (r *FootnoteHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) { | ||||
| 	reg.Register(ast.KindFootnoteLink, r.renderFootnoteLink) | ||||
| 	reg.Register(ast.KindFootnoteBackLink, r.renderFootnoteBackLink) | ||||
| 	reg.Register(ast.KindFootnoteBacklink, r.renderFootnoteBacklink) | ||||
| 	reg.Register(ast.KindFootnote, r.renderFootnote) | ||||
| 	reg.Register(ast.KindFootnoteList, r.renderFootnoteList) | ||||
| } | ||||
| @@ -243,25 +512,45 @@ func (r *FootnoteHTMLRenderer) renderFootnoteLink(w util.BufWriter, source []byt | ||||
| 	if entering { | ||||
| 		n := node.(*ast.FootnoteLink) | ||||
| 		is := strconv.Itoa(n.Index) | ||||
| 		_, _ = w.WriteString(`<sup id="fnref:`) | ||||
| 		_, _ = w.WriteString(`<sup id="`) | ||||
| 		_, _ = w.Write(r.idPrefix(node)) | ||||
| 		_, _ = w.WriteString(`fnref:`) | ||||
| 		_, _ = w.WriteString(is) | ||||
| 		_, _ = w.WriteString(`"><a href="#fn:`) | ||||
| 		_, _ = w.WriteString(`"><a href="#`) | ||||
| 		_, _ = w.Write(r.idPrefix(node)) | ||||
| 		_, _ = w.WriteString(`fn:`) | ||||
| 		_, _ = w.WriteString(is) | ||||
| 		_, _ = w.WriteString(`" class="footnote-ref" role="doc-noteref">`) | ||||
| 		_, _ = w.WriteString(`" class="`) | ||||
| 		_, _ = w.Write(applyFootnoteTemplate(r.FootnoteConfig.LinkClass, | ||||
| 			n.Index, n.RefCount)) | ||||
| 		if len(r.FootnoteConfig.LinkTitle) > 0 { | ||||
| 			_, _ = w.WriteString(`" title="`) | ||||
| 			_, _ = w.Write(util.EscapeHTML(applyFootnoteTemplate(r.FootnoteConfig.LinkTitle, n.Index, n.RefCount))) | ||||
| 		} | ||||
| 		_, _ = w.WriteString(`" role="doc-noteref">`) | ||||
|  | ||||
| 		_, _ = w.WriteString(is) | ||||
| 		_, _ = w.WriteString(`</a></sup>`) | ||||
| 	} | ||||
| 	return gast.WalkContinue, nil | ||||
| } | ||||
|  | ||||
| func (r *FootnoteHTMLRenderer) renderFootnoteBackLink(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) { | ||||
| func (r *FootnoteHTMLRenderer) renderFootnoteBacklink(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) { | ||||
| 	if entering { | ||||
| 		n := node.(*ast.FootnoteBackLink) | ||||
| 		n := node.(*ast.FootnoteBacklink) | ||||
| 		is := strconv.Itoa(n.Index) | ||||
| 		_, _ = w.WriteString(` <a href="#fnref:`) | ||||
| 		_, _ = w.WriteString(` <a href="#`) | ||||
| 		_, _ = w.Write(r.idPrefix(node)) | ||||
| 		_, _ = w.WriteString(`fnref:`) | ||||
| 		_, _ = w.WriteString(is) | ||||
| 		_, _ = w.WriteString(`" class="footnote-backref" role="doc-backlink">`) | ||||
| 		_, _ = w.WriteString("↩︎") | ||||
| 		_, _ = w.WriteString(`" class="`) | ||||
| 		_, _ = w.Write(applyFootnoteTemplate(r.FootnoteConfig.BacklinkClass, n.Index, n.RefCount)) | ||||
| 		if len(r.FootnoteConfig.BacklinkTitle) > 0 { | ||||
| 			_, _ = w.WriteString(`" title="`) | ||||
| 			_, _ = w.Write(util.EscapeHTML(applyFootnoteTemplate(r.FootnoteConfig.BacklinkTitle, n.Index, n.RefCount))) | ||||
| 		} | ||||
| 		_, _ = w.WriteString(`" role="doc-backlink">`) | ||||
| 		_, _ = w.Write(applyFootnoteTemplate(r.FootnoteConfig.BacklinkHTML, n.Index, n.RefCount)) | ||||
| 		_, _ = w.WriteString(`</a>`) | ||||
| 	} | ||||
| 	return gast.WalkContinue, nil | ||||
| @@ -271,7 +560,9 @@ func (r *FootnoteHTMLRenderer) renderFootnote(w util.BufWriter, source []byte, n | ||||
| 	n := node.(*ast.Footnote) | ||||
| 	is := strconv.Itoa(n.Index) | ||||
| 	if entering { | ||||
| 		_, _ = w.WriteString(`<li id="fn:`) | ||||
| 		_, _ = w.WriteString(`<li id="`) | ||||
| 		_, _ = w.Write(r.idPrefix(node)) | ||||
| 		_, _ = w.WriteString(`fn:`) | ||||
| 		_, _ = w.WriteString(is) | ||||
| 		_, _ = w.WriteString(`" role="doc-endnote"`) | ||||
| 		if node.Attributes() != nil { | ||||
| @@ -312,11 +603,54 @@ func (r *FootnoteHTMLRenderer) renderFootnoteList(w util.BufWriter, source []byt | ||||
| 	return gast.WalkContinue, nil | ||||
| } | ||||
|  | ||||
| func (r *FootnoteHTMLRenderer) idPrefix(node gast.Node) []byte { | ||||
| 	if r.FootnoteConfig.IDPrefix != nil { | ||||
| 		return r.FootnoteConfig.IDPrefix | ||||
| 	} | ||||
| 	if r.FootnoteConfig.IDPrefixFunction != nil { | ||||
| 		return r.FootnoteConfig.IDPrefixFunction(node) | ||||
| 	} | ||||
| 	return []byte("") | ||||
| } | ||||
|  | ||||
| func applyFootnoteTemplate(b []byte, index, refCount int) []byte { | ||||
| 	fast := true | ||||
| 	for i, c := range b { | ||||
| 		if i != 0 { | ||||
| 			if b[i-1] == '^' && c == '^' { | ||||
| 				fast = false | ||||
| 				break | ||||
| 			} | ||||
| 			if b[i-1] == '%' && c == '%' { | ||||
| 				fast = false | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	if fast { | ||||
| 		return b | ||||
| 	} | ||||
| 	is := []byte(strconv.Itoa(index)) | ||||
| 	rs := []byte(strconv.Itoa(refCount)) | ||||
| 	ret := bytes.Replace(b, []byte("^^"), is, -1) | ||||
| 	return bytes.Replace(ret, []byte("%%"), rs, -1) | ||||
| } | ||||
|  | ||||
| type footnote struct { | ||||
| 	options []FootnoteOption | ||||
| } | ||||
|  | ||||
| // Footnote is an extension that allow you to use PHP Markdown Extra Footnotes. | ||||
| var Footnote = &footnote{} | ||||
| var Footnote = &footnote{ | ||||
| 	options: []FootnoteOption{}, | ||||
| } | ||||
|  | ||||
| // NewFootnote returns a new extension with given options. | ||||
| func NewFootnote(opts ...FootnoteOption) goldmark.Extender { | ||||
| 	return &footnote{ | ||||
| 		options: opts, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (e *footnote) Extend(m goldmark.Markdown) { | ||||
| 	m.Parser().AddOptions( | ||||
| @@ -331,6 +665,6 @@ func (e *footnote) Extend(m goldmark.Markdown) { | ||||
| 		), | ||||
| 	) | ||||
| 	m.Renderer().AddOptions(renderer.WithNodeRenderers( | ||||
| 		util.Prioritized(NewFootnoteHTMLRenderer(), 500), | ||||
| 		util.Prioritized(NewFootnoteHTMLRenderer(e.options...), 500), | ||||
| 	)) | ||||
| } | ||||
|   | ||||
							
								
								
									
										4
									
								
								vendor/github.com/yuin/goldmark/extension/linkify.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								vendor/github.com/yuin/goldmark/extension/linkify.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -11,9 +11,9 @@ import ( | ||||
| 	"github.com/yuin/goldmark/util" | ||||
| ) | ||||
|  | ||||
| var wwwURLRegxp = regexp.MustCompile(`^www\.[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]+(?:(?:/|[#?])[-a-zA-Z0-9@:%_\+.~#!?&//=\(\);,'">\^{}\[\]` + "`" + `]*)?`) | ||||
| var wwwURLRegxp = regexp.MustCompile(`^www\.[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]+(?:[/#?][-a-zA-Z0-9@:%_\+.~#!?&/=\(\);,'">\^{}\[\]` + "`" + `]*)?`) | ||||
|  | ||||
| var urlRegexp = regexp.MustCompile(`^(?:http|https|ftp):\/\/(?:www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]+(?:(?:/|[#?])[-a-zA-Z0-9@:%_+.~#$!?&//=\(\);,'">\^{}\[\]` + "`" + `]*)?`) | ||||
| var urlRegexp = regexp.MustCompile(`^(?:http|https|ftp)://[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]+(?::\d+)?(?:[/#?][-a-zA-Z0-9@:%_+.~#$!?&/=\(\);,'">\^{}\[\]` + "`" + `]*)?`) | ||||
|  | ||||
| // An LinkifyConfig struct is a data structure that holds configuration of the | ||||
| // Linkify extension. | ||||
|   | ||||
							
								
								
									
										102
									
								
								vendor/github.com/yuin/goldmark/extension/table.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										102
									
								
								vendor/github.com/yuin/goldmark/extension/table.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -15,6 +15,13 @@ import ( | ||||
| 	"github.com/yuin/goldmark/util" | ||||
| ) | ||||
|  | ||||
| var escapedPipeCellListKey = parser.NewContextKey() | ||||
|  | ||||
| type escapedPipeCell struct { | ||||
| 	Cell *ast.TableCell | ||||
| 	Pos  []int | ||||
| } | ||||
|  | ||||
| // TableCellAlignMethod indicates how are table cells aligned in HTML format.indicates how are table cells aligned in HTML format. | ||||
| type TableCellAlignMethod int | ||||
|  | ||||
| @@ -148,7 +155,7 @@ func (b *tableParagraphTransformer) Transform(node *gast.Paragraph, reader text. | ||||
| 		if alignments == nil { | ||||
| 			continue | ||||
| 		} | ||||
| 		header := b.parseRow(lines.At(i-1), alignments, true, reader) | ||||
| 		header := b.parseRow(lines.At(i-1), alignments, true, reader, pc) | ||||
| 		if header == nil || len(alignments) != header.ChildCount() { | ||||
| 			return | ||||
| 		} | ||||
| @@ -156,7 +163,7 @@ func (b *tableParagraphTransformer) Transform(node *gast.Paragraph, reader text. | ||||
| 		table.Alignments = alignments | ||||
| 		table.AppendChild(table, ast.NewTableHeader(header)) | ||||
| 		for j := i + 1; j < lines.Len(); j++ { | ||||
| 			table.AppendChild(table, b.parseRow(lines.At(j), alignments, false, reader)) | ||||
| 			table.AppendChild(table, b.parseRow(lines.At(j), alignments, false, reader, pc)) | ||||
| 		} | ||||
| 		node.Lines().SetSliced(0, i-1) | ||||
| 		node.Parent().InsertAfter(node.Parent(), node, table) | ||||
| @@ -170,7 +177,7 @@ func (b *tableParagraphTransformer) Transform(node *gast.Paragraph, reader text. | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (b *tableParagraphTransformer) parseRow(segment text.Segment, alignments []ast.Alignment, isHeader bool, reader text.Reader) *ast.TableRow { | ||||
| func (b *tableParagraphTransformer) parseRow(segment text.Segment, alignments []ast.Alignment, isHeader bool, reader text.Reader, pc parser.Context) *ast.TableRow { | ||||
| 	source := reader.Source() | ||||
| 	line := segment.Value(source) | ||||
| 	pos := 0 | ||||
| @@ -194,18 +201,39 @@ func (b *tableParagraphTransformer) parseRow(segment text.Segment, alignments [] | ||||
| 		} else { | ||||
| 			alignment = alignments[i] | ||||
| 		} | ||||
| 		closure := util.FindClosure(line[pos:], byte(0), '|', true, false) | ||||
| 		if closure < 0 { | ||||
| 			closure = len(line[pos:]) | ||||
| 		} | ||||
|  | ||||
| 		var escapedCell *escapedPipeCell | ||||
| 		node := ast.NewTableCell() | ||||
| 		seg := text.NewSegment(segment.Start+pos, segment.Start+pos+closure) | ||||
| 		node.Alignment = alignment | ||||
| 		hasBacktick := false | ||||
| 		closure := pos | ||||
| 		for ; closure < limit; closure++ { | ||||
| 			if line[closure] == '`' { | ||||
| 				hasBacktick = true | ||||
| 			} | ||||
| 			if line[closure] == '|' { | ||||
| 				if closure == 0 || line[closure-1] != '\\' { | ||||
| 					break | ||||
| 				} else if hasBacktick { | ||||
| 					if escapedCell == nil { | ||||
| 						escapedCell = &escapedPipeCell{node, []int{}} | ||||
| 						escapedList := pc.ComputeIfAbsent(escapedPipeCellListKey, | ||||
| 							func() interface{} { | ||||
| 								return []*escapedPipeCell{} | ||||
| 							}).([]*escapedPipeCell) | ||||
| 						escapedList = append(escapedList, escapedCell) | ||||
| 						pc.Set(escapedPipeCellListKey, escapedList) | ||||
| 					} | ||||
| 					escapedCell.Pos = append(escapedCell.Pos, segment.Start+closure-1) | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 		seg := text.NewSegment(segment.Start+pos, segment.Start+closure) | ||||
| 		seg = seg.TrimLeftSpace(source) | ||||
| 		seg = seg.TrimRightSpace(source) | ||||
| 		node.Lines().Append(seg) | ||||
| 		node.Alignment = alignment | ||||
| 		row.AppendChild(row, node) | ||||
| 		pos += closure + 1 | ||||
| 		pos = closure + 1 | ||||
| 	} | ||||
| 	for ; i < len(alignments); i++ { | ||||
| 		row.AppendChild(row, ast.NewTableCell()) | ||||
| @@ -243,6 +271,49 @@ func (b *tableParagraphTransformer) parseDelimiter(segment text.Segment, reader | ||||
| 	return alignments | ||||
| } | ||||
|  | ||||
| type tableASTTransformer struct { | ||||
| } | ||||
|  | ||||
| var defaultTableASTTransformer = &tableASTTransformer{} | ||||
|  | ||||
| // NewTableASTTransformer returns a parser.ASTTransformer for tables. | ||||
| func NewTableASTTransformer() parser.ASTTransformer { | ||||
| 	return defaultTableASTTransformer | ||||
| } | ||||
|  | ||||
| func (a *tableASTTransformer) Transform(node *gast.Document, reader text.Reader, pc parser.Context) { | ||||
| 	lst := pc.Get(escapedPipeCellListKey) | ||||
| 	if lst == nil { | ||||
| 		return | ||||
| 	} | ||||
| 	pc.Set(escapedPipeCellListKey, nil) | ||||
| 	for _, v := range lst.([]*escapedPipeCell) { | ||||
| 		_ = gast.Walk(v.Cell, func(n gast.Node, entering bool) (gast.WalkStatus, error) { | ||||
| 			if n.Kind() != gast.KindCodeSpan { | ||||
| 				return gast.WalkContinue, nil | ||||
| 			} | ||||
| 			c := n.FirstChild() | ||||
| 			for c != nil { | ||||
| 				next := c.NextSibling() | ||||
| 				if c.Kind() == gast.KindText { | ||||
| 					t := c.(*gast.Text) | ||||
| 					for _, pos := range v.Pos { | ||||
| 						if t.Segment.Start <= pos && t.Segment.Stop > pos { | ||||
| 							n1 := gast.NewRawTextSegment(t.Segment.WithStop(pos)) | ||||
| 							n2 := gast.NewRawTextSegment(t.Segment.WithStart(pos + 1)) | ||||
| 							n.InsertAfter(n, c, n1) | ||||
| 							n.InsertAfter(n, n1, n2) | ||||
| 							n.RemoveChild(n, c) | ||||
| 						} | ||||
| 					} | ||||
| 				} | ||||
| 				c = next | ||||
| 			} | ||||
| 			return gast.WalkContinue, nil | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // TableHTMLRenderer is a renderer.NodeRenderer implementation that | ||||
| // renders Table nodes. | ||||
| type TableHTMLRenderer struct { | ||||
| @@ -419,7 +490,7 @@ func (r *TableHTMLRenderer) renderTableCell(w util.BufWriter, source []byte, nod | ||||
| 					cob.AppendByte(';') | ||||
| 				} | ||||
| 				style := fmt.Sprintf("text-align:%s", n.Alignment.String()) | ||||
| 				cob.Append(util.StringToReadOnlyBytes(style)) | ||||
| 				cob.AppendString(style) | ||||
| 				n.SetAttributeString("style", cob.Bytes()) | ||||
| 			} | ||||
| 		} | ||||
| @@ -454,9 +525,14 @@ func NewTable(opts ...TableOption) goldmark.Extender { | ||||
| } | ||||
|  | ||||
| func (e *table) Extend(m goldmark.Markdown) { | ||||
| 	m.Parser().AddOptions(parser.WithParagraphTransformers( | ||||
| 	m.Parser().AddOptions( | ||||
| 		parser.WithParagraphTransformers( | ||||
| 			util.Prioritized(NewTableParagraphTransformer(), 200), | ||||
| 	)) | ||||
| 		), | ||||
| 		parser.WithASTTransformers( | ||||
| 			util.Prioritized(defaultTableASTTransformer, 0), | ||||
| 		), | ||||
| 	) | ||||
| 	m.Renderer().AddOptions(renderer.WithNodeRenderers( | ||||
| 		util.Prioritized(NewTableHTMLRenderer(e.options...), 500), | ||||
| 	)) | ||||
|   | ||||
							
								
								
									
										2
									
								
								vendor/github.com/yuin/goldmark/go.mod
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								vendor/github.com/yuin/goldmark/go.mod
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -1,3 +1,3 @@ | ||||
| module github.com/yuin/goldmark | ||||
|  | ||||
| go 1.13 | ||||
| go 1.15 | ||||
|   | ||||
							
								
								
									
										8
									
								
								vendor/github.com/yuin/goldmark/parser/link.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								vendor/github.com/yuin/goldmark/parser/link.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -2,7 +2,6 @@ package parser | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"regexp" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/yuin/goldmark/ast" | ||||
| @@ -113,8 +112,6 @@ func (s *linkParser) Trigger() []byte { | ||||
| 	return []byte{'!', '[', ']'} | ||||
| } | ||||
|  | ||||
| var linkDestinationRegexp = regexp.MustCompile(`\s*([^\s].+)`) | ||||
| var linkTitleRegexp = regexp.MustCompile(`\s+(\)|["'\(].+)`) | ||||
| var linkBottom = NewContextKey() | ||||
|  | ||||
| func (s *linkParser) Parse(parent ast.Node, block text.Reader, pc Context) ast.Node { | ||||
| @@ -293,20 +290,17 @@ func (s *linkParser) parseLink(parent ast.Node, last *linkLabelState, block text | ||||
| func parseLinkDestination(block text.Reader) ([]byte, bool) { | ||||
| 	block.SkipSpaces() | ||||
| 	line, _ := block.PeekLine() | ||||
| 	buf := []byte{} | ||||
| 	if block.Peek() == '<' { | ||||
| 		i := 1 | ||||
| 		for i < len(line) { | ||||
| 			c := line[i] | ||||
| 			if c == '\\' && i < len(line)-1 && util.IsPunct(line[i+1]) { | ||||
| 				buf = append(buf, '\\', line[i+1]) | ||||
| 				i += 2 | ||||
| 				continue | ||||
| 			} else if c == '>' { | ||||
| 				block.Advance(i + 1) | ||||
| 				return line[1:i], true | ||||
| 			} | ||||
| 			buf = append(buf, c) | ||||
| 			i++ | ||||
| 		} | ||||
| 		return nil, false | ||||
| @@ -316,7 +310,6 @@ func parseLinkDestination(block text.Reader) ([]byte, bool) { | ||||
| 	for i < len(line) { | ||||
| 		c := line[i] | ||||
| 		if c == '\\' && i < len(line)-1 && util.IsPunct(line[i+1]) { | ||||
| 			buf = append(buf, '\\', line[i+1]) | ||||
| 			i += 2 | ||||
| 			continue | ||||
| 		} else if c == '(' { | ||||
| @@ -329,7 +322,6 @@ func parseLinkDestination(block text.Reader) ([]byte, bool) { | ||||
| 		} else if util.IsSpace(c) { | ||||
| 			break | ||||
| 		} | ||||
| 		buf = append(buf, c) | ||||
| 		i++ | ||||
| 	} | ||||
| 	block.Advance(i) | ||||
|   | ||||
							
								
								
									
										12
									
								
								vendor/github.com/yuin/goldmark/parser/parser.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								vendor/github.com/yuin/goldmark/parser/parser.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -138,6 +138,9 @@ type Context interface { | ||||
| 	// Get returns a value associated with the given key. | ||||
| 	Get(ContextKey) interface{} | ||||
|  | ||||
| 	// ComputeIfAbsent computes a value if a value associated with the given key is absent and returns the value. | ||||
| 	ComputeIfAbsent(ContextKey, func() interface{}) interface{} | ||||
|  | ||||
| 	// Set sets the given value to the context. | ||||
| 	Set(ContextKey, interface{}) | ||||
|  | ||||
| @@ -252,6 +255,15 @@ func (p *parseContext) Get(key ContextKey) interface{} { | ||||
| 	return p.store[key] | ||||
| } | ||||
|  | ||||
| func (p *parseContext) ComputeIfAbsent(key ContextKey, f func() interface{}) interface{} { | ||||
| 	v := p.store[key] | ||||
| 	if v == nil { | ||||
| 		v = f() | ||||
| 		p.store[key] = v | ||||
| 	} | ||||
| 	return v | ||||
| } | ||||
|  | ||||
| func (p *parseContext) Set(key ContextKey, value interface{}) { | ||||
| 	p.store[key] = value | ||||
| } | ||||
|   | ||||
							
								
								
									
										9
									
								
								vendor/github.com/yuin/goldmark/parser/raw_html.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										9
									
								
								vendor/github.com/yuin/goldmark/parser/raw_html.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -2,10 +2,11 @@ package parser | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"regexp" | ||||
|  | ||||
| 	"github.com/yuin/goldmark/ast" | ||||
| 	"github.com/yuin/goldmark/text" | ||||
| 	"github.com/yuin/goldmark/util" | ||||
| 	"regexp" | ||||
| ) | ||||
|  | ||||
| type rawHTMLParser struct { | ||||
| @@ -67,8 +68,6 @@ func (s *rawHTMLParser) parseSingleLineRegexp(reg *regexp.Regexp, block text.Rea | ||||
| 	return node | ||||
| } | ||||
|  | ||||
| var dummyMatch = [][]byte{} | ||||
|  | ||||
| func (s *rawHTMLParser) parseMultiLineRegexp(reg *regexp.Regexp, block text.Reader, pc Context) ast.Node { | ||||
| 	sline, ssegment := block.Position() | ||||
| 	if block.Match(reg) { | ||||
| @@ -102,7 +101,3 @@ func (s *rawHTMLParser) parseMultiLineRegexp(reg *regexp.Regexp, block text.Read | ||||
| 	} | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (s *rawHTMLParser) CloseBlock(parent ast.Node, pc Context) { | ||||
| 	// nothing to do | ||||
| } | ||||
|   | ||||
							
								
								
									
										16
									
								
								vendor/github.com/yuin/goldmark/util/util.go
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								vendor/github.com/yuin/goldmark/util/util.go
									
									
									
										generated
									
									
										vendored
									
									
								
							| @@ -37,6 +37,12 @@ func (b *CopyOnWriteBuffer) Write(value []byte) { | ||||
| 	b.buffer = append(b.buffer, value...) | ||||
| } | ||||
|  | ||||
| // WriteString writes given string to the buffer. | ||||
| // WriteString allocate new buffer and clears it at the first time. | ||||
| func (b *CopyOnWriteBuffer) WriteString(value string) { | ||||
| 	b.Write(StringToReadOnlyBytes(value)) | ||||
| } | ||||
|  | ||||
| // Append appends given bytes to the buffer. | ||||
| // Append copy buffer at the first time. | ||||
| func (b *CopyOnWriteBuffer) Append(value []byte) { | ||||
| @@ -49,6 +55,12 @@ func (b *CopyOnWriteBuffer) Append(value []byte) { | ||||
| 	b.buffer = append(b.buffer, value...) | ||||
| } | ||||
|  | ||||
| // AppendString appends given string to the buffer. | ||||
| // AppendString copy buffer at the first time. | ||||
| func (b *CopyOnWriteBuffer) AppendString(value string) { | ||||
| 	b.Append(StringToReadOnlyBytes(value)) | ||||
| } | ||||
|  | ||||
| // WriteByte writes the given byte to the buffer. | ||||
| // WriteByte allocate new buffer and clears it at the first time. | ||||
| func (b *CopyOnWriteBuffer) WriteByte(c byte) { | ||||
| @@ -804,7 +816,7 @@ func IsPunct(c byte) bool { | ||||
| 	return punctTable[c] == 1 | ||||
| } | ||||
|  | ||||
| // IsPunct returns true if the given rune is a punctuation, otherwise false. | ||||
| // IsPunctRune returns true if the given rune is a punctuation, otherwise false. | ||||
| func IsPunctRune(r rune) bool { | ||||
| 	return int32(r) <= 256 && IsPunct(byte(r)) || unicode.IsPunct(r) | ||||
| } | ||||
| @@ -814,7 +826,7 @@ func IsSpace(c byte) bool { | ||||
| 	return spaceTable[c] == 1 | ||||
| } | ||||
|  | ||||
| // IsSpace returns true if the given rune is a space, otherwise false. | ||||
| // IsSpaceRune returns true if the given rune is a space, otherwise false. | ||||
| func IsSpaceRune(r rune) bool { | ||||
| 	return int32(r) <= 256 && IsSpace(byte(r)) || unicode.IsSpace(r) | ||||
| } | ||||
|   | ||||
							
								
								
									
										21
									
								
								vendor/modules.txt
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								vendor/modules.txt
									
									
									
									
										vendored
									
									
								
							| @@ -15,7 +15,7 @@ github.com/Microsoft/go-winio | ||||
| github.com/Microsoft/go-winio/pkg/guid | ||||
| # github.com/adrg/xdg v0.3.1 | ||||
| github.com/adrg/xdg | ||||
| # github.com/alecthomas/chroma v0.7.3 | ||||
| # github.com/alecthomas/chroma v0.8.1 | ||||
| github.com/alecthomas/chroma | ||||
| github.com/alecthomas/chroma/formatters | ||||
| github.com/alecthomas/chroma/formatters/html | ||||
| @@ -47,13 +47,18 @@ github.com/alecthomas/chroma/lexers/v | ||||
| github.com/alecthomas/chroma/lexers/w | ||||
| github.com/alecthomas/chroma/lexers/x | ||||
| github.com/alecthomas/chroma/lexers/y | ||||
| github.com/alecthomas/chroma/lexers/z | ||||
| github.com/alecthomas/chroma/quick | ||||
| github.com/alecthomas/chroma/styles | ||||
| # github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e | ||||
| github.com/araddon/dateparse | ||||
| # github.com/charmbracelet/glamour v0.2.0 | ||||
| # github.com/aymerick/douceur v0.2.0 | ||||
| github.com/aymerick/douceur/css | ||||
| # github.com/charmbracelet/glamour v0.2.0 => github.com/noerw/glamour v0.2.1-0.20210305125354-f0a29f1de0c2 | ||||
| github.com/charmbracelet/glamour | ||||
| github.com/charmbracelet/glamour/ansi | ||||
| # github.com/chris-ramon/douceur v0.2.0 | ||||
| github.com/chris-ramon/douceur/parser | ||||
| # github.com/cpuguy83/go-md2man/v2 v2.0.0 | ||||
| github.com/cpuguy83/go-md2man/v2/md2man | ||||
| # github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 | ||||
| @@ -124,6 +129,8 @@ github.com/go-git/go-git/v5/utils/merkletrie/filesystem | ||||
| github.com/go-git/go-git/v5/utils/merkletrie/index | ||||
| github.com/go-git/go-git/v5/utils/merkletrie/internal/frame | ||||
| github.com/go-git/go-git/v5/utils/merkletrie/noder | ||||
| # github.com/gorilla/css v1.0.0 | ||||
| github.com/gorilla/css/scanner | ||||
| # github.com/hashicorp/go-version v1.2.1 | ||||
| github.com/hashicorp/go-version | ||||
| # github.com/imdario/mergo v0.3.11 | ||||
| @@ -144,11 +151,11 @@ github.com/mattn/go-isatty | ||||
| github.com/mattn/go-runewidth | ||||
| # github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d | ||||
| github.com/mgutz/ansi | ||||
| # github.com/microcosm-cc/bluemonday v1.0.2 | ||||
| # github.com/microcosm-cc/bluemonday v1.0.4 | ||||
| github.com/microcosm-cc/bluemonday | ||||
| # github.com/mitchellh/go-homedir v1.1.0 | ||||
| github.com/mitchellh/go-homedir | ||||
| # github.com/muesli/reflow v0.1.0 | ||||
| # github.com/muesli/reflow v0.2.0 | ||||
| github.com/muesli/reflow/ansi | ||||
| github.com/muesli/reflow/indent | ||||
| github.com/muesli/reflow/padding | ||||
| @@ -173,7 +180,7 @@ github.com/stretchr/testify/assert | ||||
| github.com/urfave/cli/v2 | ||||
| # github.com/xanzy/ssh-agent v0.3.0 | ||||
| github.com/xanzy/ssh-agent | ||||
| # github.com/yuin/goldmark v1.2.1 | ||||
| # github.com/yuin/goldmark v1.3.1 | ||||
| github.com/yuin/goldmark | ||||
| github.com/yuin/goldmark/ast | ||||
| github.com/yuin/goldmark/extension | ||||
| @@ -183,6 +190,10 @@ github.com/yuin/goldmark/renderer | ||||
| github.com/yuin/goldmark/renderer/html | ||||
| github.com/yuin/goldmark/text | ||||
| github.com/yuin/goldmark/util | ||||
| # github.com/yuin/goldmark-emoji v1.0.1 | ||||
| github.com/yuin/goldmark-emoji | ||||
| github.com/yuin/goldmark-emoji/ast | ||||
| github.com/yuin/goldmark-emoji/definition | ||||
| # golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83 | ||||
| golang.org/x/crypto/blowfish | ||||
| golang.org/x/crypto/cast5 | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Norwin
					Norwin