refactor: apply go fix modernizers for Go 1.26

Automated fixes: interface{} → any, range-over-int, t.Context(),
wg.Go(), strings.SplitSeq, strings.Builder, slices.Contains,
maps helpers, min/max builtins.

Co-Authored-By: Virgil <virgil@lethean.io>
This commit is contained in:
Snider 2026-02-22 21:00:16 +00:00
parent dac393cae2
commit ebe9537b6a
7 changed files with 19 additions and 22 deletions

View file

@ -15,9 +15,9 @@ func generateMarkdownDoc() string {
sb.WriteString("# Benchmark Document\n\n")
sb.WriteString("This document is generated for benchmarking the chunking pipeline.\n\n")
for i := 0; i < 20; i++ {
for i := range 20 {
sb.WriteString(fmt.Sprintf("## Section %d\n\n", i+1))
for j := 0; j < 5; j++ {
for j := range 5 {
sb.WriteString(fmt.Sprintf(
"Paragraph %d in section %d contains representative text for testing. "+
"It includes multiple sentences to exercise the sentence-aware splitter. "+
@ -76,7 +76,7 @@ func BenchmarkQuery_Mock(b *testing.B) {
store := newMockVectorStore()
store.collections["bench-col"] = 768
// Pre-populate with 50 points
for i := 0; i < 50; i++ {
for i := range 50 {
store.points["bench-col"] = append(store.points["bench-col"], Point{
ID: fmt.Sprintf("p%d", i),
Vector: make([]float32, 768),
@ -106,7 +106,7 @@ func BenchmarkQuery_Mock(b *testing.B) {
func BenchmarkIngest_Mock(b *testing.B) {
dir := b.TempDir()
// Create 10 markdown files
for i := 0; i < 10; i++ {
for i := range 10 {
content := fmt.Sprintf("## File %d\n\nThis is file number %d with some test content for benchmarking.\n", i, i)
path := filepath.Join(dir, fmt.Sprintf("doc%d.md", i))
if err := os.WriteFile(path, []byte(content), 0644); err != nil {

View file

@ -217,7 +217,7 @@ func TestChunkMarkdown_Edge_VeryLongSingleParagraph(t *testing.T) {
t.Run("long paragraph with line breaks produces chunks", func(t *testing.T) {
// Create long text with paragraph breaks so chunking can split
var parts []string
for i := 0; i < 50; i++ {
for range 50 {
parts = append(parts, "This is paragraph number that contains some meaningful text for testing purposes.")
}
longText := "## Long Content\n\n" + joinParagraphs(parts)
@ -331,23 +331,23 @@ func TestDefaultIngestConfig(t *testing.T) {
// Helper: repeat a string n times
func repeatString(s string, n int) string {
result := ""
for i := 0; i < n; i++ {
result += s
var result strings.Builder
for range n {
result.WriteString(s)
}
return result
return result.String()
}
// Helper: join paragraphs with double newlines
func joinParagraphs(parts []string) string {
result := ""
var result strings.Builder
for i, p := range parts {
if i > 0 {
result += "\n\n"
result.WriteString("\n\n")
}
result += p
result.WriteString(p)
}
return result
return result.String()
}
// --- Phase 3.1: Sentence splitting and overlap alignment ---

View file

@ -32,7 +32,7 @@ func TestQueryWith(t *testing.T) {
t.Run("respects topK parameter", func(t *testing.T) {
store := newMockVectorStore()
for i := 0; i < 10; i++ {
for i := range 10 {
store.points["col"] = append(store.points["col"], Point{
ID: fmt.Sprintf("p%d", i),
Vector: []float32{0.1},

View file

@ -160,10 +160,7 @@ func Ingest(ctx context.Context, store VectorStore, embedder Embedder, cfg Inges
// Batch upsert to vector store
if len(points) > 0 {
for i := 0; i < len(points); i += cfg.BatchSize {
end := i + cfg.BatchSize
if end > len(points) {
end = len(points)
}
end := min(i+cfg.BatchSize, len(points))
batch := points[i:end]
if err := store.UpsertPoints(ctx, cfg.Collection, batch); err != nil {
return stats, log.E("rag.Ingest", fmt.Sprintf("error upserting batch %d", i/cfg.BatchSize+1), err)

View file

@ -47,7 +47,7 @@ func TestIngest(t *testing.T) {
// Create content large enough to produce multiple chunks
var content string
content = "## Big Section\n\n"
for i := 0; i < 30; i++ {
for i := range 30 {
content += fmt.Sprintf("Paragraph %d with some meaningful content for testing. ", i)
if i%3 == 0 {
content += "\n\n"
@ -187,7 +187,7 @@ func TestIngest(t *testing.T) {
t.Run("batch size handling — multiple batches", func(t *testing.T) {
dir := t.TempDir()
// Create enough content for multiple chunks
for i := 0; i < 5; i++ {
for i := range 5 {
writeFile(t, filepath.Join(dir, fmt.Sprintf("doc%d.md", i)),
fmt.Sprintf("## Section %d\n\nContent for document %d.\n", i, i))
}

View file

@ -170,7 +170,7 @@ func (q *QdrantClient) Search(ctx context.Context, collection string, vector []f
query := &qdrant.QueryPoints{
CollectionName: collection,
Query: qdrant.NewQuery(vector...),
Limit: qdrant.PtrOf(limit),
Limit: new(limit),
WithPayload: qdrant.NewWithPayload(true),
}

View file

@ -493,7 +493,7 @@ func TestQuery(t *testing.T) {
t.Run("results respect limit", func(t *testing.T) {
store := newMockVectorStore()
// Add many points
for i := 0; i < 10; i++ {
for i := range 10 {
store.points["test-col"] = append(store.points["test-col"], Point{
ID: fmt.Sprintf("p%d", i),
Vector: []float32{0.1},