refactor: apply go fix modernizers for Go 1.26

Automated fixes: interface{} → any, range-over-int, t.Context(),
wg.Go(), strings.SplitSeq, strings.Builder, slices.Contains,
maps helpers, min/max builtins.

Co-Authored-By: Virgil <virgil@lethean.io>
This commit is contained in:
Snider 2026-02-22 21:00:17 +00:00
parent ff2fc39a3d
commit f2a8f7be71
3 changed files with 32 additions and 41 deletions

View file

@ -183,7 +183,7 @@ func BenchmarkSearch(b *testing.B) {
index: newSearchIndex(),
}
for i := 0; i < 150; i++ {
for i := range 150 {
c.Add(&Topic{
ID: fmt.Sprintf("topic-%d", i),
Title: fmt.Sprintf("Topic Number %d About Various Subjects", i),

View file

@ -76,22 +76,22 @@ func extractSeeAlso(text string) ([]string, string) {
var related []string
var cleanedLines []string
lines := strings.Split(text, "\n")
for _, line := range lines {
lines := strings.SplitSeq(text, "\n")
for line := range lines {
trimmed := strings.TrimSpace(line)
lower := strings.ToLower(trimmed)
if strings.HasPrefix(lower, "see also:") {
if after, ok := strings.CutPrefix(lower, "see also:"); ok {
// Extract references after "See also:"
rest := strings.TrimPrefix(lower, "see also:")
rest := after
// The original casing version
restOrig := trimmed[len("See also:"):]
if len(restOrig) == 0 {
restOrig = rest
}
refs := strings.Split(restOrig, ",")
for _, ref := range refs {
refs := strings.SplitSeq(restOrig, ",")
for ref := range refs {
ref = strings.TrimSpace(ref)
if ref != "" {
related = append(related, GenerateID(ref))

View file

@ -2,6 +2,7 @@ package help
import (
"regexp"
"slices"
"sort"
"strings"
"unicode"
@ -9,16 +10,16 @@ import (
// Scoring weights for search result ranking.
const (
scoreExactWord = 1.0 // Exact word match in the index
scorePrefixWord = 0.5 // Prefix/partial word match
scoreFuzzyWord = 0.3 // Fuzzy (Levenshtein) match
scoreStemWord = 0.7 // Stemmed word match (between exact and prefix)
scoreTitleBoost = 10.0 // Query word appears in topic title
scoreSectionBoost = 5.0 // Query word appears in section title
scoreTagBoost = 3.0 // Query word appears in topic tags
scorePhraseBoost = 8.0 // Exact phrase match in content
scoreAllWords = 2.0 // All query words present (multi-word bonus)
fuzzyMaxDistance = 2 // Maximum edit distance for fuzzy matching
scoreExactWord = 1.0 // Exact word match in the index
scorePrefixWord = 0.5 // Prefix/partial word match
scoreFuzzyWord = 0.3 // Fuzzy (Levenshtein) match
scoreStemWord = 0.7 // Stemmed word match (between exact and prefix)
scoreTitleBoost = 10.0 // Query word appears in topic title
scoreSectionBoost = 5.0 // Query word appears in section title
scoreTagBoost = 3.0 // Query word appears in topic tags
scorePhraseBoost = 8.0 // Exact phrase match in content
scoreAllWords = 2.0 // All query words present (multi-word bonus)
fuzzyMaxDistance = 2 // Maximum edit distance for fuzzy matching
)
// SearchResult represents a search match.
@ -78,10 +79,8 @@ func (i *searchIndex) Add(topic *Topic) {
// addToIndex adds a word-to-topic mapping.
func (i *searchIndex) addToIndex(word, topicID string) {
// Avoid duplicates
for _, id := range i.index[word] {
if id == topicID {
return
}
if slices.Contains(i.index[word], topicID) {
return
}
i.index[word] = append(i.index[word], topicID)
}
@ -173,11 +172,12 @@ func (i *searchIndex) Search(query string) []*SearchResult {
for _, phrase := range phrases {
phraseLower := strings.ToLower(phrase)
for topicID, topic := range i.topics {
text := strings.ToLower(topic.Title + " " + topic.Content)
var text strings.Builder
text.WriteString(strings.ToLower(topic.Title + " " + topic.Content))
for _, section := range topic.Sections {
text += " " + strings.ToLower(section.Title+" "+section.Content)
text.WriteString(" " + strings.ToLower(section.Title+" "+section.Content))
}
if strings.Contains(text, phraseLower) {
if strings.Contains(text.String(), phraseLower) {
scores[topicID] += scorePhraseBoost
}
}
@ -314,9 +314,9 @@ func levenshtein(a, b string) int {
cost = 0
}
curr[j] = min3(
prev[j]+1, // deletion
curr[j-1]+1, // insertion
prev[j-1]+cost, // substitution
prev[j]+1, // deletion
curr[j-1]+1, // insertion
prev[j-1]+cost, // substitution
)
}
prev, curr = curr, prev
@ -434,8 +434,8 @@ func extractSnippet(content string, res []*regexp.Regexp) string {
// If no regexes, return start of content without highlighting
if len(res) == 0 {
lines := strings.Split(content, "\n")
for _, line := range lines {
lines := strings.SplitSeq(content, "\n")
for line := range lines {
line = strings.TrimSpace(line)
if line != "" && !strings.HasPrefix(line, "#") {
runes := []rune(line)
@ -465,24 +465,15 @@ func extractSnippet(content string, res []*regexp.Regexp) string {
if matchPos == -1 {
// No match found, use start of content
start = 0
end = snippetLen
if end > runeLen {
end = runeLen
}
end = min(snippetLen, runeLen)
} else {
// Convert byte position to rune position
matchRunePos := len([]rune(content[:matchPos]))
// Extract snippet around match (rune-based)
start = matchRunePos - 50
if start < 0 {
start = 0
}
start = max(matchRunePos-50, 0)
end = start + snippetLen
if end > runeLen {
end = runeLen
}
end = min(start+snippetLen, runeLen)
}
snippet := string(runes[start:end])