feat: implement lint --fix and standardize README
Add FixFile() to rewrite README entries: capitalize descriptions, add trailing periods, remove author attributions, and sort entries alphabetically within each section. Update parser regex to handle entries with markers between URL and description separator. Fix linter to check first letter (not first character) for capitalization. ~480 entries standardized across the README. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
144
internal/linter/fixer.go
Normal file
144
internal/linter/fixer.go
Normal file
@@ -0,0 +1,144 @@
|
||||
package linter
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/veggiemonk/awesome-docker/internal/parser"
|
||||
)
|
||||
|
||||
// attributionRe matches trailing author attributions like:
|
||||
//
|
||||
// by [@author](url), by [@author][ref], by @author
|
||||
//
|
||||
// Also handles "Created by", "Maintained by" etc.
|
||||
var attributionRe = regexp.MustCompile(`\s+(?:(?:[Cc]reated|[Mm]aintained|[Bb]uilt)\s+)?by\s+\[@[^\]]+\](?:\([^)]*\)|\[[^\]]*\])\.?$`)
|
||||
|
||||
// bareAttributionRe matches: by @author at end of line (no link).
|
||||
var bareAttributionRe = regexp.MustCompile(`\s+by\s+@\w+\.?$`)
|
||||
|
||||
// RemoveAttribution strips author attribution from a description string.
|
||||
func RemoveAttribution(desc string) string {
|
||||
desc = attributionRe.ReplaceAllString(desc, "")
|
||||
desc = bareAttributionRe.ReplaceAllString(desc, "")
|
||||
return strings.TrimSpace(desc)
|
||||
}
|
||||
|
||||
// FormatEntry reconstructs a markdown list line from a parsed Entry.
|
||||
func FormatEntry(e parser.Entry) string {
|
||||
desc := e.Description
|
||||
var markers []string
|
||||
for _, m := range e.Markers {
|
||||
switch m {
|
||||
case parser.MarkerAbandoned:
|
||||
markers = append(markers, ":skull:")
|
||||
case parser.MarkerPaid:
|
||||
markers = append(markers, ":heavy_dollar_sign:")
|
||||
case parser.MarkerWIP:
|
||||
markers = append(markers, ":construction:")
|
||||
}
|
||||
}
|
||||
if len(markers) > 0 {
|
||||
desc = strings.Join(markers, " ") + " " + desc
|
||||
}
|
||||
return fmt.Sprintf("- [%s](%s) - %s", e.Name, e.URL, desc)
|
||||
}
|
||||
|
||||
// entryGroup tracks a consecutive run of entry lines.
|
||||
type entryGroup struct {
|
||||
startIdx int // index in lines slice
|
||||
entries []parser.Entry
|
||||
}
|
||||
|
||||
// FixFile reads the README, fixes entries (capitalize, period, remove attribution,
|
||||
// sort), and writes the result back.
|
||||
func FixFile(path string) (int, error) {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
var lines []string
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
lines = append(lines, scanner.Text())
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Identify entry groups (consecutive parsed entry lines)
|
||||
var groups []entryGroup
|
||||
var current *entryGroup
|
||||
fixCount := 0
|
||||
|
||||
for i, line := range lines {
|
||||
entry, err := parser.ParseEntry(line, i+1)
|
||||
if err != nil {
|
||||
// Not an entry — close any active group
|
||||
if current != nil {
|
||||
groups = append(groups, *current)
|
||||
current = nil
|
||||
}
|
||||
continue
|
||||
}
|
||||
if current == nil {
|
||||
current = &entryGroup{startIdx: i}
|
||||
}
|
||||
current.entries = append(current.entries, entry)
|
||||
}
|
||||
if current != nil {
|
||||
groups = append(groups, *current)
|
||||
}
|
||||
|
||||
// Process each group: fix entries, sort, replace lines
|
||||
for _, g := range groups {
|
||||
var fixed []parser.Entry
|
||||
for _, e := range g.entries {
|
||||
f := FixEntry(e)
|
||||
f.Description = RemoveAttribution(f.Description)
|
||||
// Re-apply period after removing attribution (it may have been stripped)
|
||||
if len(f.Description) > 0 && !strings.HasSuffix(f.Description, ".") {
|
||||
f.Description += "."
|
||||
}
|
||||
fixed = append(fixed, f)
|
||||
}
|
||||
|
||||
sorted := SortEntries(fixed)
|
||||
|
||||
for j, e := range sorted {
|
||||
newLine := FormatEntry(e)
|
||||
idx := g.startIdx + j
|
||||
if lines[idx] != newLine {
|
||||
fixCount++
|
||||
lines[idx] = newLine
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fixCount == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
// Write back
|
||||
out, err := os.Create(path)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
w := bufio.NewWriter(out)
|
||||
for i, line := range lines {
|
||||
w.WriteString(line)
|
||||
if i < len(lines)-1 {
|
||||
w.WriteString("\n")
|
||||
}
|
||||
}
|
||||
// Preserve trailing newline if original had one
|
||||
w.WriteString("\n")
|
||||
return fixCount, w.Flush()
|
||||
}
|
||||
140
internal/linter/fixer_test.go
Normal file
140
internal/linter/fixer_test.go
Normal file
@@ -0,0 +1,140 @@
|
||||
package linter
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/veggiemonk/awesome-docker/internal/parser"
|
||||
)
|
||||
|
||||
func TestRemoveAttribution(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
want string
|
||||
}{
|
||||
{
|
||||
"Tool for managing containers by [@author](https://github.com/author)",
|
||||
"Tool for managing containers",
|
||||
},
|
||||
{
|
||||
"Tool for managing containers by [@author][author]",
|
||||
"Tool for managing containers",
|
||||
},
|
||||
{
|
||||
"Tool for managing containers by @author",
|
||||
"Tool for managing containers",
|
||||
},
|
||||
{
|
||||
"Analyzes resource usage. Created by [@Google][google]",
|
||||
"Analyzes resource usage.",
|
||||
},
|
||||
{
|
||||
"A tool by [@someone](https://example.com).",
|
||||
"A tool",
|
||||
},
|
||||
{
|
||||
"step-by-step tutorial and more resources",
|
||||
"step-by-step tutorial and more resources",
|
||||
},
|
||||
{
|
||||
"No attribution here",
|
||||
"No attribution here",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
got := RemoveAttribution(tt.input)
|
||||
if got != tt.want {
|
||||
t.Errorf("RemoveAttribution(%q) = %q, want %q", tt.input, got, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatEntry(t *testing.T) {
|
||||
e := parser.Entry{
|
||||
Name: "Portainer",
|
||||
URL: "https://github.com/portainer/portainer",
|
||||
Description: "Management UI for Docker.",
|
||||
}
|
||||
got := FormatEntry(e)
|
||||
want := "- [Portainer](https://github.com/portainer/portainer) - Management UI for Docker."
|
||||
if got != want {
|
||||
t.Errorf("FormatEntry = %q, want %q", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatEntryWithMarkers(t *testing.T) {
|
||||
e := parser.Entry{
|
||||
Name: "OldTool",
|
||||
URL: "https://github.com/old/tool",
|
||||
Description: "A deprecated tool.",
|
||||
Markers: []parser.Marker{parser.MarkerAbandoned},
|
||||
}
|
||||
got := FormatEntry(e)
|
||||
want := "- [OldTool](https://github.com/old/tool) - :skull: A deprecated tool."
|
||||
if got != want {
|
||||
t.Errorf("FormatEntry = %q, want %q", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFixFile(t *testing.T) {
|
||||
content := `# Awesome Docker
|
||||
|
||||
## Tools
|
||||
|
||||
- [Zebra](https://example.com/zebra) - a tool by [@author](https://github.com/author)
|
||||
- [Alpha](https://example.com/alpha) - another tool
|
||||
|
||||
## Other
|
||||
|
||||
Some text here.
|
||||
`
|
||||
tmp, err := os.CreateTemp("", "readme-*.md")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer os.Remove(tmp.Name())
|
||||
|
||||
if _, err := tmp.WriteString(content); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
tmp.Close()
|
||||
|
||||
count, err := FixFile(tmp.Name())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if count == 0 {
|
||||
t.Fatal("expected fixes, got 0")
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(tmp.Name())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
result := string(data)
|
||||
|
||||
// Check sorting: Alpha should come before Zebra
|
||||
alphaIdx := strings.Index(result, "[Alpha]")
|
||||
zebraIdx := strings.Index(result, "[Zebra]")
|
||||
if alphaIdx > zebraIdx {
|
||||
t.Error("expected Alpha before Zebra after sort")
|
||||
}
|
||||
|
||||
// Check capitalization
|
||||
if !strings.Contains(result, "- A tool.") {
|
||||
t.Errorf("expected capitalized description, got:\n%s", result)
|
||||
}
|
||||
|
||||
// Check attribution removed
|
||||
if strings.Contains(result, "@author") {
|
||||
t.Errorf("expected attribution removed, got:\n%s", result)
|
||||
}
|
||||
|
||||
// Check period added
|
||||
if !strings.Contains(result, "Another tool.") {
|
||||
t.Errorf("expected period added, got:\n%s", result)
|
||||
}
|
||||
}
|
||||
@@ -47,7 +47,7 @@ func (i Issue) String() string {
|
||||
func CheckEntry(e parser.Entry) []Issue {
|
||||
var issues []Issue
|
||||
|
||||
if len(e.Description) > 0 && !unicode.IsUpper(rune(e.Description[0])) {
|
||||
if first, ok := firstLetter(e.Description); ok && !unicode.IsUpper(first) {
|
||||
issues = append(issues, Issue{
|
||||
Rule: RuleDescriptionCapital,
|
||||
Severity: SeverityError,
|
||||
@@ -106,13 +106,28 @@ func CheckDuplicates(entries []parser.Entry) []Issue {
|
||||
return issues
|
||||
}
|
||||
|
||||
// firstLetter returns the first unicode letter in s and true, or zero and false if none.
|
||||
func firstLetter(s string) (rune, bool) {
|
||||
for _, r := range s {
|
||||
if unicode.IsLetter(r) {
|
||||
return r, true
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// FixEntry returns a copy of the entry with auto-fixable issues corrected.
|
||||
func FixEntry(e parser.Entry) parser.Entry {
|
||||
fixed := e
|
||||
if len(fixed.Description) > 0 {
|
||||
// Capitalize first letter
|
||||
// Capitalize first letter (find it, may not be at index 0)
|
||||
runes := []rune(fixed.Description)
|
||||
runes[0] = unicode.ToUpper(runes[0])
|
||||
for i, r := range runes {
|
||||
if unicode.IsLetter(r) {
|
||||
runes[i] = unicode.ToUpper(r)
|
||||
break
|
||||
}
|
||||
}
|
||||
fixed.Description = string(runes)
|
||||
|
||||
// Ensure period at end
|
||||
|
||||
@@ -9,7 +9,11 @@ import (
|
||||
)
|
||||
|
||||
// entryRe matches: - [Name](URL) - Description
|
||||
var entryRe = regexp.MustCompile(`^[-*]\s+\[([^\]]+)\]\(([^)]+)\)\s+-\s+(.+)$`)
|
||||
// Also handles optional markers/text between URL and " - " separator, e.g.:
|
||||
//
|
||||
// - [Name](URL) :skull: - Description
|
||||
// - [Name](URL) (2) :skull: - Description
|
||||
var entryRe = regexp.MustCompile(`^[-*]\s+\[([^\]]+)\]\(([^)]+)\)(.*?)\s+-\s+(.+)$`)
|
||||
|
||||
// headingRe matches markdown headings: # Title, ## Title, etc.
|
||||
var headingRe = regexp.MustCompile(`^(#{1,6})\s+(.+?)(?:\s*<!--.*-->)?$`)
|
||||
@@ -27,12 +31,15 @@ func ParseEntry(line string, lineNum int) (Entry, error) {
|
||||
return Entry{}, fmt.Errorf("line %d: not a valid entry: %q", lineNum, line)
|
||||
}
|
||||
|
||||
desc := m[3]
|
||||
middle := m[3] // text between URL closing paren and " - "
|
||||
desc := m[4]
|
||||
var markers []Marker
|
||||
|
||||
// Extract markers from both the middle section and the description
|
||||
for text, marker := range markerMap {
|
||||
if strings.Contains(desc, text) {
|
||||
if strings.Contains(middle, text) || strings.Contains(desc, text) {
|
||||
markers = append(markers, marker)
|
||||
middle = strings.ReplaceAll(middle, text, "")
|
||||
desc = strings.ReplaceAll(desc, text, "")
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user