chore: modernize CI and update Go toolchain

- Bump Go from 1.19 to 1.26 and update all dependencies
- Rewrite CI workflow with matrix strategy (Linux, macOS, Windows)
- Update GitHub Actions to current versions (checkout@v4, setup-go@v5)
- Update CodeQL actions from v1 to v3
- Fix cross-platform bug in mock/path.go (path.Join -> filepath.Join)
- Clean up dependabot config (weekly schedule, remove stale ignore)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Christopher Allen Lane
2026-02-14 20:58:51 -05:00
parent cc85a4bdb1
commit 2a19755804
657 changed files with 49050 additions and 32001 deletions

View File

@@ -1,6 +1,7 @@
package chroma
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
@@ -135,11 +136,20 @@ func NewLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) {
}
// Trace enables debug tracing.
//
// Deprecated: Use SetTracing instead.
func (r *RegexLexer) Trace(trace bool) *RegexLexer {
r.trace = trace
return r
}
// SetTracing enables debug tracing.
//
// This complies with the [TracingLexer] interface.
func (r *RegexLexer) SetTracing(trace bool) {
r.trace = trace
}
// A CompiledRule is a Rule with a pre-compiled regex.
//
// Note that regular expressions are lazily compiled on first use of the lexer.
@@ -185,6 +195,7 @@ func (l *LexerState) Get(key interface{}) interface{} {
// Iterator returns the next Token from the lexer.
func (l *LexerState) Iterator() Token { // nolint: gocognit
trace := json.NewEncoder(os.Stderr)
end := len(l.Text)
if l.newlineAdded {
end--
@@ -194,6 +205,9 @@ func (l *LexerState) Iterator() Token { // nolint: gocognit
for len(l.iteratorStack) > 0 {
n := len(l.iteratorStack) - 1
t := l.iteratorStack[n]()
if t.Type == Ignore {
continue
}
if t == EOF {
l.iteratorStack = l.iteratorStack[:n]
continue
@@ -202,14 +216,33 @@ func (l *LexerState) Iterator() Token { // nolint: gocognit
}
l.State = l.Stack[len(l.Stack)-1]
if l.Lexer.trace {
fmt.Fprintf(os.Stderr, "%s: pos=%d, text=%q\n", l.State, l.Pos, string(l.Text[l.Pos:]))
}
selectedRule, ok := l.Rules[l.State]
if !ok {
panic("unknown state " + l.State)
}
var start time.Time
if l.Lexer.trace {
start = time.Now()
}
ruleIndex, rule, groups, namedGroups := matchRules(l.Text, l.Pos, selectedRule)
if l.Lexer.trace {
var length int
if groups != nil {
length = len(groups[0])
} else {
length = -1
}
_ = trace.Encode(Trace{ //nolint
Lexer: l.Lexer.config.Name,
State: l.State,
Rule: ruleIndex,
Pattern: rule.Pattern,
Pos: l.Pos,
Length: length,
Elapsed: float64(time.Since(start)) / float64(time.Millisecond),
})
// fmt.Fprintf(os.Stderr, "%s: pos=%d, text=%q, elapsed=%s\n", l.State, l.Pos, string(l.Text[l.Pos:]), time.Since(start))
}
// No match.
if groups == nil {
// From Pygments :\
@@ -243,6 +276,9 @@ func (l *LexerState) Iterator() Token { // nolint: gocognit
for len(l.iteratorStack) > 0 {
n := len(l.iteratorStack) - 1
t := l.iteratorStack[n]()
if t.Type == Ignore {
continue
}
if t == EOF {
l.iteratorStack = l.iteratorStack[:n]
continue
@@ -343,7 +379,7 @@ func (r *RegexLexer) maybeCompile() (err error) {
restart:
seen := map[LexerMutator]bool{}
for state := range r.rules {
for i := 0; i < len(r.rules[state]); i++ {
for i := range len(r.rules[state]) {
rule := r.rules[state][i]
if compile, ok := rule.Mutator.(LexerMutator); ok {
if seen[compile] {
@@ -360,6 +396,17 @@ restart:
}
}
}
// Validate emitters
for state := range r.rules {
for i := range len(r.rules[state]) {
rule := r.rules[state][i]
if validate, ok := rule.Type.(ValidatingEmitter); ok {
if err := validate.ValidateEmitter(rule); err != nil {
return fmt.Errorf("%s: %s: %s: %w", r.config.Name, state, rule.Pattern, err)
}
}
}
}
r.compiled = true
return nil
}
@@ -468,7 +515,7 @@ func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule
func ensureLF(text string) string {
buf := make([]byte, len(text))
var j int
for i := 0; i < len(text); i++ {
for i := range len(text) {
c := text[i]
if c == '\r' {
if i < len(text)-1 && text[i+1] == '\n' {