chore: bump version to 4.5.0

Bug fixes:
- Fix inverted pager detection logic (returned error instead of path)
- Fix repo.Clone ignoring destination directory parameter
- Fix sheet loading using append on pre-sized slices
- Clean up partial files on copy failure
- Trim whitespace from editor config

Security:
- Add path traversal protection for cheatsheet names

Performance:
- Move regex compilation outside search loop
- Replace string concatenation with strings.Join in search

Build:
- Remove go:generate; embed config and usage as string literals
- Parallelize release builds
- Add fuzz testing infrastructure

Testing:
- Improve test coverage from 38.9% to 50.2%
- Add fuzz tests for search, filter, tags, and validation

Documentation:
- Fix inaccurate code examples in HACKING.md
- Add missing --conf and --all options to man page
- Add ADRs for path traversal, env parsing, and search parallelization
- Update CONTRIBUTING.md to reflect project policy

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Christopher Allen Lane
2026-02-14 19:56:19 -05:00
parent 7908a678df
commit cc85a4bdb1
69 changed files with 4802 additions and 577 deletions

View File

@@ -32,3 +32,29 @@ func TestColorize(t *testing.T) {
t.Errorf("failed to colorize sheet: want: %s, got: %s", want, s.Text)
}
}
// TestColorizeError tests the error handling in Colorize
func TestColorizeError(_ *testing.T) {
// Create a sheet with content
sheet := Sheet{
Text: "some text",
Syntax: "invalidlexer12345", // Use an invalid lexer that might cause issues
}
// Create a config with invalid formatter/style
conf := config.Config{
Formatter: "invalidformatter",
Style: "invalidstyle",
}
// Store original text
originalText := sheet.Text
// Colorize should not panic even with invalid settings
sheet.Colorize(conf)
// The text might be unchanged if there was an error, or it might be colorized
// We're mainly testing that it doesn't panic
_ = sheet.Text
_ = originalText
}

View File

@@ -39,6 +39,8 @@ func (s *Sheet) Copy(dest string) error {
// copy file contents
_, err = io.Copy(outfile, infile)
if err != nil {
// Clean up the partially written file on error
os.Remove(dest)
return fmt.Errorf(
"failed to copy file: infile: %s, outfile: %s, err: %v",
s.Path,

View File

@@ -0,0 +1,187 @@
package sheet
import (
"os"
"path/filepath"
"testing"
)
// TestCopyErrors tests error cases for the Copy method
func TestCopyErrors(t *testing.T) {
tests := []struct {
name string
setup func() (*Sheet, string, func())
wantErr bool
errMsg string
}{
{
name: "source file does not exist",
setup: func() (*Sheet, string, func()) {
// Create a sheet with non-existent path
sheet := &Sheet{
Title: "test",
Path: "/non/existent/file.txt",
CheatPath: "test",
}
dest := filepath.Join(os.TempDir(), "copy-test-dest.txt")
cleanup := func() {
os.Remove(dest)
}
return sheet, dest, cleanup
},
wantErr: true,
errMsg: "failed to open cheatsheet",
},
{
name: "destination directory creation fails",
setup: func() (*Sheet, string, func()) {
// Create a source file
src, err := os.CreateTemp("", "copy-test-src-*")
if err != nil {
t.Fatalf("failed to create temp file: %v", err)
}
src.WriteString("test content")
src.Close()
sheet := &Sheet{
Title: "test",
Path: src.Name(),
CheatPath: "test",
}
// Create a file where we want a directory
blockerFile := filepath.Join(os.TempDir(), "copy-blocker-file")
if err := os.WriteFile(blockerFile, []byte("blocker"), 0644); err != nil {
t.Fatalf("failed to create blocker file: %v", err)
}
// Try to create dest under the blocker file (will fail)
dest := filepath.Join(blockerFile, "subdir", "dest.txt")
cleanup := func() {
os.Remove(src.Name())
os.Remove(blockerFile)
}
return sheet, dest, cleanup
},
wantErr: true,
errMsg: "failed to create directory",
},
{
name: "destination file creation fails",
setup: func() (*Sheet, string, func()) {
// Create a source file
src, err := os.CreateTemp("", "copy-test-src-*")
if err != nil {
t.Fatalf("failed to create temp file: %v", err)
}
src.WriteString("test content")
src.Close()
sheet := &Sheet{
Title: "test",
Path: src.Name(),
CheatPath: "test",
}
// Create a directory where we want the file
destDir := filepath.Join(os.TempDir(), "copy-test-dir")
if err := os.Mkdir(destDir, 0755); err != nil && !os.IsExist(err) {
t.Fatalf("failed to create dest dir: %v", err)
}
cleanup := func() {
os.Remove(src.Name())
os.RemoveAll(destDir)
}
return sheet, destDir, cleanup
},
wantErr: true,
errMsg: "failed to create outfile",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
sheet, dest, cleanup := tt.setup()
defer cleanup()
err := sheet.Copy(dest)
if (err != nil) != tt.wantErr {
t.Errorf("Copy() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err != nil && tt.errMsg != "" {
if !contains(err.Error(), tt.errMsg) {
t.Errorf("Copy() error = %v, want error containing %q", err, tt.errMsg)
}
}
})
}
}
// TestCopyIOError tests the io.Copy error case
func TestCopyIOError(t *testing.T) {
// This is difficult to test without mocking io.Copy
// The error case would occur if the source file is modified
// or removed after opening but before copying
t.Skip("Skipping io.Copy error test - requires file system race condition")
}
// TestCopyCleanupOnError verifies that partially written files are cleaned up on error
func TestCopyCleanupOnError(t *testing.T) {
// Create a source file that we'll make unreadable after opening
src, err := os.CreateTemp("", "copy-test-cleanup-*")
if err != nil {
t.Fatalf("failed to create temp file: %v", err)
}
defer os.Remove(src.Name())
// Write some content
content := "test content for cleanup"
if _, err := src.WriteString(content); err != nil {
t.Fatalf("failed to write content: %v", err)
}
src.Close()
sheet := &Sheet{
Title: "test",
Path: src.Name(),
CheatPath: "test",
}
// Destination path
dest := filepath.Join(os.TempDir(), "copy-cleanup-test.txt")
defer os.Remove(dest) // Clean up if test fails
// Make the source file unreadable (simulating a read error during copy)
// This is platform-specific, but should work on Unix-like systems
if err := os.Chmod(src.Name(), 0000); err != nil {
t.Skip("Cannot change file permissions on this platform")
}
defer os.Chmod(src.Name(), 0644) // Restore permissions for cleanup
// Attempt to copy - this should fail during io.Copy
err = sheet.Copy(dest)
if err == nil {
t.Error("Expected Copy to fail with permission error")
}
// Verify the destination file was cleaned up
if _, err := os.Stat(dest); !os.IsNotExist(err) {
t.Error("Destination file should have been removed after copy failure")
}
}
func contains(s, substr string) bool {
return len(s) >= len(substr) && (s == substr || len(s) > 0 && containsHelper(s, substr))
}
func containsHelper(s, substr string) bool {
for i := 0; i <= len(s)-len(substr); i++ {
if s[i:i+len(substr)] == substr {
return true
}
}
return false
}

65
internal/sheet/doc.go Normal file
View File

@@ -0,0 +1,65 @@
// Package sheet provides functionality for parsing and managing individual cheat sheets.
//
// A sheet represents a single cheatsheet file containing helpful commands, notes,
// or documentation. Sheets can include optional YAML frontmatter for metadata
// such as tags and syntax highlighting preferences.
//
// # Sheet Format
//
// Sheets are plain text files that may begin with YAML frontmatter:
//
// ---
// syntax: bash
// tags: [networking, linux, ssh]
// ---
// # Connect to remote server
// ssh user@hostname
//
// # Copy files over SSH
// scp local_file user@hostname:/remote/path
//
// The frontmatter is optional. If omitted, the sheet will use default values.
//
// # Core Types
//
// The Sheet type contains:
// - Title: The sheet's name (derived from filename)
// - Path: Full filesystem path to the sheet
// - Text: The content of the sheet (without frontmatter)
// - Tags: Categories assigned to the sheet
// - Syntax: Language hint for syntax highlighting
// - ReadOnly: Whether the sheet can be modified
//
// Key Functions
//
// - New: Creates a new Sheet from a file path
// - Parse: Extracts frontmatter and content from sheet text
// - Search: Searches sheet content using regular expressions
// - Colorize: Applies syntax highlighting to sheet content
//
// # Syntax Highlighting
//
// The package integrates with the Chroma library to provide syntax highlighting.
// Supported languages include bash, python, go, javascript, and many others.
// The syntax can be specified in the frontmatter or auto-detected.
//
// Example Usage
//
// // Load a sheet from disk
// s, err := sheet.New("/path/to/sheet", []string{"personal"}, false)
// if err != nil {
// log.Fatal(err)
// }
//
// // Search for content
// matches, err := s.Search("ssh", false)
// if err != nil {
// log.Fatal(err)
// }
//
// // Apply syntax highlighting
// colorized, err := s.Colorize(config)
// if err != nil {
// log.Fatal(err)
// }
package sheet

View File

@@ -0,0 +1,54 @@
package sheet
import (
"runtime"
"testing"
)
// TestParseWindowsLineEndings tests parsing with Windows line endings
func TestParseWindowsLineEndings(t *testing.T) {
// Only test Windows line endings on Windows
if runtime.GOOS != "windows" {
t.Skip("Skipping Windows line ending test on non-Windows platform")
}
// stub our cheatsheet content with Windows line endings
markdown := "---\r\nsyntax: go\r\ntags: [ test ]\r\n---\r\nTo foo the bar: baz"
// parse the frontmatter
fm, text, err := parse(markdown)
// assert expectations
if err != nil {
t.Errorf("failed to parse markdown: %v", err)
}
want := "To foo the bar: baz"
if text != want {
t.Errorf("failed to parse text: want: %s, got: %s", want, text)
}
want = "go"
if fm.Syntax != want {
t.Errorf("failed to parse syntax: want: %s, got: %s", want, fm.Syntax)
}
}
// TestParseInvalidYAML tests parsing with invalid YAML in frontmatter
func TestParseInvalidYAML(t *testing.T) {
// stub our cheatsheet content with invalid YAML
markdown := `---
syntax: go
tags: [ test
unclosed bracket
---
To foo the bar: baz`
// parse the frontmatter
_, _, err := parse(markdown)
// assert that an error was returned for invalid YAML
if err == nil {
t.Error("expected error for invalid YAML, got nil")
}
}

View File

@@ -0,0 +1,132 @@
package sheet
import (
"strings"
"testing"
)
// FuzzParse tests the parse function with fuzzing to uncover edge cases
// and potential panics in YAML frontmatter parsing
func FuzzParse(f *testing.F) {
// Add seed corpus with various valid and edge case inputs
// Valid frontmatter
f.Add("---\nsyntax: go\n---\nContent")
f.Add("---\ntags: [a, b]\n---\n")
f.Add("---\nsyntax: bash\ntags: [linux, shell]\n---\n#!/bin/bash\necho hello")
// No frontmatter
f.Add("No frontmatter here")
f.Add("")
f.Add("Just plain text\nwith multiple lines")
// Edge cases with delimiters
f.Add("---")
f.Add("---\n")
f.Add("---\n---")
f.Add("---\n---\n")
f.Add("---\n---\n---")
f.Add("---\n---\n---\n---")
f.Add("------\n------")
// Invalid YAML
f.Add("---\n{invalid yaml\n---\n")
f.Add("---\nsyntax: \"unclosed quote\n---\n")
f.Add("---\ntags: [a, b,\n---\n")
// Windows line endings
f.Add("---\r\nsyntax: go\r\n---\r\nContent")
f.Add("---\r\n---\r\n")
// Mixed line endings
f.Add("---\nsyntax: go\r\n---\nContent")
f.Add("---\r\nsyntax: go\n---\r\nContent")
// Unicode and special characters
f.Add("---\ntags: [emoji, 🎉]\n---\n")
f.Add("---\nsyntax: 中文\n---\n")
f.Add("---\ntags: [\x00, \x01]\n---\n")
// Very long inputs
f.Add("---\ntags: [" + strings.Repeat("a,", 1000) + "a]\n---\n")
f.Add("---\n" + strings.Repeat("field: value\n", 1000) + "---\n")
// Nested structures
f.Add("---\ntags:\n - nested\n - list\n---\n")
f.Add("---\nmeta:\n author: test\n version: 1.0\n---\n")
f.Fuzz(func(t *testing.T, input string) {
// The parse function should never panic, regardless of input
func() {
defer func() {
if r := recover(); r != nil {
t.Errorf("parse panicked with input %q: %v", input, r)
}
}()
fm, text, err := parse(input)
// Verify invariants
if err == nil {
// If parsing succeeded, validate the result
// The returned text should be a suffix of the input
// (either the whole input if no frontmatter, or the part after frontmatter)
if !strings.HasSuffix(input, text) && text != input {
t.Errorf("returned text %q is not a valid suffix of input %q", text, input)
}
// If input starts with delimiter and has valid frontmatter,
// text should be shorter than input
if strings.HasPrefix(input, "---\n") || strings.HasPrefix(input, "---\r\n") {
if len(fm.Tags) > 0 || fm.Syntax != "" {
// We successfully parsed frontmatter, so text should be shorter
if len(text) >= len(input) {
t.Errorf("text length %d should be less than input length %d when frontmatter is parsed",
len(text), len(input))
}
}
}
// Note: Tags can be nil when frontmatter is not present or empty
// This is expected behavior in Go for uninitialized slices
} else {
// If parsing failed, the original input should be returned as text
if text != input {
t.Errorf("on error, text should equal input: got %q, want %q", text, input)
}
}
}()
})
}
// FuzzParseDelimiterHandling specifically tests delimiter edge cases
func FuzzParseDelimiterHandling(f *testing.F) {
// Seed corpus focusing on delimiter variations
f.Add("---", "content")
f.Add("", "---")
f.Add("---", "---")
f.Add("", "")
f.Fuzz(func(t *testing.T, prefix string, suffix string) {
// Build input with controllable parts around delimiters
inputs := []string{
prefix + "---\n" + suffix,
prefix + "---\r\n" + suffix,
prefix + "---\n---\n" + suffix,
prefix + "---\r\n---\r\n" + suffix,
prefix + "---\n" + "yaml: data\n" + "---\n" + suffix,
}
for _, input := range inputs {
func() {
defer func() {
if r := recover(); r != nil {
t.Errorf("parse panicked with constructed input: %v", r)
}
}()
_, _, _ = parse(input)
}()
}
})
}

View File

@@ -9,16 +9,17 @@ import (
func (s *Sheet) Search(reg *regexp.Regexp) string {
// record matches
matches := ""
var matches []string
// search through the cheatsheet's text line by line
for _, line := range strings.Split(s.Text, "\n\n") {
// exit early if the line doesn't match the regex
// save matching lines
if reg.MatchString(line) {
matches += line + "\n\n"
matches = append(matches, line)
}
}
return strings.TrimSpace(matches)
// Join matches with the same delimiter used for splitting
return strings.Join(matches, "\n\n")
}

View File

@@ -0,0 +1,190 @@
package sheet
import (
"regexp"
"strings"
"testing"
"time"
)
// FuzzSearchRegex tests the regex compilation and search functionality
// to ensure it handles malformed patterns gracefully and doesn't suffer
// from catastrophic backtracking
func FuzzSearchRegex(f *testing.F) {
// Add seed corpus with various regex patterns
// Valid patterns
f.Add("test", "This is a test string")
f.Add("(?i)test", "This is a TEST string")
f.Add("foo|bar", "foo and bar")
f.Add("^start", "start of line\nnext line")
f.Add("end$", "at the end\nnext line")
f.Add("\\d+", "123 numbers 456")
f.Add("[a-z]+", "lowercase UPPERCASE")
// Edge cases and potentially problematic patterns
f.Add("", "empty pattern")
f.Add(".", "any character")
f.Add(".*", "match everything")
f.Add(".+", "match something")
f.Add("\\", "backslash")
f.Add("(", "unclosed paren")
f.Add(")", "unmatched paren")
f.Add("[", "unclosed bracket")
f.Add("]", "unmatched bracket")
f.Add("[^]", "negated empty class")
f.Add("(?", "incomplete group")
// Patterns that might cause performance issues
f.Add("(a+)+", "aaaaaaaaaaaaaaaaaaaaaaaab")
f.Add("(a*)*", "aaaaaaaaaaaaaaaaaaaaaaaab")
f.Add("(a|a)*", "aaaaaaaaaaaaaaaaaaaaaaaab")
f.Add("(.*)*", "any text here")
f.Add("(\\d+)+", "123456789012345678901234567890x")
// Unicode patterns
f.Add("☺", "Unicode ☺ smiley")
f.Add("[一-龯]", "Chinese 中文 characters")
f.Add("\\p{L}+", "Unicode letters")
// Very long patterns
f.Add(strings.Repeat("a", 1000), "long pattern")
f.Add(strings.Repeat("(a|b)", 100), "complex pattern")
f.Fuzz(func(t *testing.T, pattern string, text string) {
// Test 1: Regex compilation should not panic
var reg *regexp.Regexp
var compileErr error
func() {
defer func() {
if r := recover(); r != nil {
t.Errorf("regexp.Compile panicked with pattern %q: %v", pattern, r)
}
}()
reg, compileErr = regexp.Compile(pattern)
}()
// If compilation failed, that's OK - we're testing error handling
if compileErr != nil {
// This is expected for invalid patterns
return
}
// Test 2: Create a sheet and test Search method
sheet := Sheet{
Title: "test",
Text: text,
}
// Search should not panic
var result string
done := make(chan bool, 1)
go func() {
defer func() {
if r := recover(); r != nil {
t.Errorf("Search panicked with pattern %q on text %q: %v", pattern, text, r)
}
done <- true
}()
result = sheet.Search(reg)
}()
// Timeout after 100ms to catch catastrophic backtracking
select {
case <-done:
// Search completed successfully
case <-time.After(100 * time.Millisecond):
t.Errorf("Search timed out (possible catastrophic backtracking) with pattern %q on text %q", pattern, text)
}
// Test 3: Verify search result invariants
if result != "" {
// The Search function splits by "\n\n", so we need to compare using the same logic
resultLines := strings.Split(result, "\n\n")
textLines := strings.Split(text, "\n\n")
// Every result line should exist in the original text lines
for _, rLine := range resultLines {
found := false
for _, tLine := range textLines {
if rLine == tLine {
found = true
break
}
}
if !found && rLine != "" {
t.Errorf("Search result contains line not in original text: %q", rLine)
}
}
}
})
}
// FuzzSearchCatastrophicBacktracking specifically tests for regex patterns
// that could cause performance issues
func FuzzSearchCatastrophicBacktracking(f *testing.F) {
// Seed with patterns known to potentially cause issues
f.Add("a", 10, 5)
f.Add("x", 20, 3)
f.Fuzz(func(t *testing.T, char string, repeats int, groups int) {
// Limit the size to avoid memory issues in the test
if repeats > 30 || repeats < 0 || groups > 10 || groups < 0 || len(char) > 5 {
t.Skip("Skipping invalid or overly large test case")
}
// Construct patterns that might cause backtracking
patterns := []string{
strings.Repeat(char, repeats),
"(" + char + "+)+",
"(" + char + "*)*",
"(" + char + "|" + char + ")*",
}
// Add nested groups
if groups > 0 && groups < 10 {
nested := char
for i := 0; i < groups; i++ {
nested = "(" + nested + ")+"
}
patterns = append(patterns, nested)
}
// Test text that might trigger backtracking
testText := strings.Repeat(char, repeats) + "x"
for _, pattern := range patterns {
// Try to compile the pattern
reg, err := regexp.Compile(pattern)
if err != nil {
// Invalid pattern, skip
continue
}
// Test with timeout
done := make(chan bool, 1)
go func() {
defer func() {
if r := recover(); r != nil {
t.Errorf("Search panicked with backtracking pattern %q: %v", pattern, r)
}
done <- true
}()
sheet := Sheet{Text: testText}
_ = sheet.Search(reg)
}()
select {
case <-done:
// Completed successfully
case <-time.After(50 * time.Millisecond):
t.Logf("Warning: potential backtracking issue with pattern %q (completed slowly)", pattern)
}
}
})
}

View File

@@ -0,0 +1,94 @@
package sheet
import (
"strings"
"testing"
)
// FuzzTagged tests the Tagged function with potentially malicious tag inputs
//
// Threat model: An attacker crafts a malicious cheatsheet with specially
// crafted tags that could cause issues when a user searches/filters by tags.
// This is particularly relevant for shared community cheatsheets.
func FuzzTagged(f *testing.F) {
// Add seed corpus with potentially problematic inputs
// These represent tags an attacker might use in a malicious cheatsheet
f.Add("normal", "normal")
f.Add("", "")
f.Add(" ", " ")
f.Add("\n", "\n")
f.Add("\r\n", "\r\n")
f.Add("\x00", "\x00") // Null byte
f.Add("../../etc/passwd", "../../etc/passwd") // Path traversal attempt
f.Add("'; DROP TABLE sheets;--", "sql") // SQL injection attempt
f.Add("<script>alert('xss')</script>", "xss") // XSS attempt
f.Add("${HOME}", "${HOME}") // Environment variable
f.Add("$(whoami)", "$(whoami)") // Command substitution
f.Add("`date`", "`date`") // Command substitution
f.Add("\\x41\\x42", "\\x41\\x42") // Escape sequences
f.Add("%00", "%00") // URL encoded null
f.Add("tag\nwith\nnewlines", "tag")
f.Add(strings.Repeat("a", 10000), "a") // Very long tag
f.Add("🎉", "🎉") // Unicode
f.Add("\U0001F4A9", "\U0001F4A9") // Unicode poop emoji
f.Add("tag with spaces", "tag with spaces")
f.Add("TAG", "tag") // Case sensitivity check
f.Add("tag", "TAG") // Case sensitivity check
f.Fuzz(func(t *testing.T, sheetTag string, searchTag string) {
// Create a sheet with the potentially malicious tag
sheet := Sheet{
Title: "test",
Tags: []string{sheetTag},
}
// The Tagged function should never panic regardless of input
func() {
defer func() {
if r := recover(); r != nil {
t.Errorf("Tagged panicked with sheetTag=%q, searchTag=%q: %v",
sheetTag, searchTag, r)
}
}()
result := sheet.Tagged(searchTag)
// Verify the result is consistent with a simple string comparison
expected := false
for _, tag := range sheet.Tags {
if tag == searchTag {
expected = true
break
}
}
if result != expected {
t.Errorf("Tagged returned %v but expected %v for sheetTag=%q, searchTag=%q",
result, expected, sheetTag, searchTag)
}
// Additional invariant: Tagged should be case-sensitive
if sheetTag != searchTag && result {
t.Errorf("Tagged matched different strings: sheetTag=%q, searchTag=%q",
sheetTag, searchTag)
}
}()
// Test with multiple tags including the fuzzed one
sheetMulti := Sheet{
Title: "test",
Tags: []string{"safe1", sheetTag, "safe2", sheetTag}, // Duplicate tags
}
func() {
defer func() {
if r := recover(); r != nil {
t.Errorf("Tagged panicked with multiple tags including %q: %v",
sheetTag, r)
}
}()
_ = sheetMulti.Tagged(searchTag)
}()
})
}

View File

@@ -0,0 +1,4 @@
go test fuzz v1
string("0")
int(-6)
int(5)

View File

@@ -0,0 +1,3 @@
go test fuzz v1
string(".")
string(" 0000\n\n\n\n00000")