feat: upgrade to core v0.8.0-alpha.1, replace banned stdlib imports
All checks were successful
Security Scan / security (push) Successful in 8s
Test / test (push) Successful in 49s

Replace fmt, errors, strings, encoding/json with Core primitives
across 20 files. Keep strings.Fields/CutPrefix. No translation
files modified.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Claude 2026-03-26 14:11:15 +00:00 committed by Virgil
parent 7e602ed48a
commit c4ef1e6205
22 changed files with 225 additions and 205 deletions

View file

@ -2,9 +2,9 @@ package i18n
import (
"context"
"fmt"
"time"
"dappco.re/go/core"
log "dappco.re/go/core/log"
"forge.lthn.ai/core/go-inference"
)
@ -66,7 +66,7 @@ func CalibrateDomains(ctx context.Context, modelA, modelB inference.TextModel,
// Build classification prompts from sample texts.
prompts := make([]string, len(samples))
for i, s := range samples {
prompts[i] = fmt.Sprintf(cfg.promptTemplate, s.Text)
prompts[i] = core.Sprintf(cfg.promptTemplate, s.Text)
}
// Classify with model A.
@ -93,7 +93,7 @@ func CalibrateDomains(ctx context.Context, modelA, modelB inference.TextModel,
if agree {
stats.Agreed++
} else {
key := fmt.Sprintf("%s->%s", a, b)
key := core.Sprintf("%s->%s", a, b)
stats.ConfusionPairs[key]++
}
stats.ByDomainA[a]++
@ -140,7 +140,7 @@ func classifyAll(ctx context.Context, model inference.TextModel, prompts []strin
results, err := model.Classify(ctx, batch, inference.WithMaxTokens(1))
if err != nil {
return nil, 0, log.E("classifyAll", fmt.Sprintf("classify batch [%d:%d]", i, end), err)
return nil, 0, log.E("classifyAll", core.Sprintf("classify batch [%d:%d]", i, end), err)
}
for j, r := range results {

View file

@ -3,12 +3,10 @@ package i18n
import (
"bufio"
"context"
"encoding/json"
"fmt"
"io"
"strings"
"time"
"dappco.re/go/core"
log "dappco.re/go/core/log"
"forge.lthn.ai/core/go-inference"
)
@ -63,7 +61,7 @@ func mapTokenToDomain(token string) string {
if len(token) == 0 {
return "unknown"
}
lower := strings.ToLower(token)
lower := core.Lower(token)
switch {
case lower == "technical" || lower == "tech":
return "technical"
@ -107,7 +105,7 @@ func ClassifyCorpus(ctx context.Context, model inference.TextModel,
}
prompts := make([]string, len(batch))
for i, p := range batch {
prompts[i] = fmt.Sprintf(cfg.promptTemplate, p.prompt)
prompts[i] = core.Sprintf(cfg.promptTemplate, p.prompt)
}
results, err := model.Classify(ctx, prompts, inference.WithMaxTokens(1))
if err != nil {
@ -119,13 +117,12 @@ func ClassifyCorpus(ctx context.Context, model inference.TextModel,
stats.ByDomain[domain]++
stats.Total++
line, err := json.Marshal(batch[i].record)
if err != nil {
return log.E("ClassifyCorpus", "marshal output", err)
}
if _, err := fmt.Fprintf(output, "%s\n", line); err != nil {
return log.E("ClassifyCorpus", "write output", err)
mr := core.JSONMarshal(batch[i].record)
if !mr.OK {
return log.E("ClassifyCorpus", "marshal output", mr.Value.(error))
}
line := mr.Value.([]byte)
core.Print(output, "%s", line)
}
batch = batch[:0]
return nil
@ -133,7 +130,7 @@ func ClassifyCorpus(ctx context.Context, model inference.TextModel,
for scanner.Scan() {
var record map[string]any
if err := json.Unmarshal(scanner.Bytes(), &record); err != nil {
if r := core.JSONUnmarshal(scanner.Bytes(), &record); !r.OK {
stats.Skipped++
continue
}

View file

@ -3,11 +3,10 @@ package i18n
import (
"bytes"
"context"
"encoding/json"
"iter"
"strings"
"testing"
"dappco.re/go/core"
"forge.lthn.ai/core/go-inference"
)
@ -85,7 +84,7 @@ func TestClassifyCorpus_Basic(t *testing.T) {
},
}
input := strings.NewReader(
input := core.NewReader(
`{"seed_id":"1","domain":"general","prompt":"Delete the file"}` + "\n" +
`{"seed_id":"2","domain":"science","prompt":"Explain gravity"}` + "\n",
)
@ -102,15 +101,15 @@ func TestClassifyCorpus_Basic(t *testing.T) {
t.Errorf("Skipped = %d, want 0", stats.Skipped)
}
lines := strings.Split(strings.TrimSpace(output.String()), "\n")
lines := core.Split(core.Trim(output.String()), "\n")
if len(lines) != 2 {
t.Fatalf("output lines = %d, want 2", len(lines))
}
for i, line := range lines {
var record map[string]any
if err := json.Unmarshal([]byte(line), &record); err != nil {
t.Fatalf("line %d: unmarshal: %v", i, err)
if r := core.JSONUnmarshal([]byte(line), &record); !r.OK {
t.Fatalf("line %d: unmarshal: %v", i, r.Value)
}
if record["domain_1b"] != "technical" {
t.Errorf("line %d: domain_1b = %v, want %q", i, record["domain_1b"], "technical")
@ -133,7 +132,7 @@ func TestClassifyCorpus_SkipsMalformed(t *testing.T) {
},
}
input := strings.NewReader(
input := core.NewReader(
"not valid json\n" +
`{"seed_id":"1","domain":"general","prompt":"Hello world"}` + "\n" +
`{"seed_id":"2","domain":"general"}` + "\n",
@ -157,7 +156,7 @@ func TestClassifyCorpus_DomainMapping(t *testing.T) {
classifyFunc: func(_ context.Context, prompts []string, _ ...inference.GenerateOption) ([]inference.ClassifyResult, error) {
results := make([]inference.ClassifyResult, len(prompts))
for i, p := range prompts {
if strings.Contains(p, "Delete") {
if core.Contains(p, "Delete") {
results[i] = inference.ClassifyResult{Token: inference.Token{Text: "technical"}}
} else {
results[i] = inference.ClassifyResult{Token: inference.Token{Text: "ethical"}}
@ -167,7 +166,7 @@ func TestClassifyCorpus_DomainMapping(t *testing.T) {
},
}
input := strings.NewReader(
input := core.NewReader(
`{"prompt":"Delete the file now"}` + "\n" +
`{"prompt":"Is it right to lie?"}` + "\n",
)

View file

@ -1,9 +1,9 @@
package i18n
import (
"fmt"
"testing"
"dappco.re/go/core"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -123,7 +123,7 @@ func TestSubject_String_Good(t *testing.T) {
func TestSubject_String_Good_Stringer(t *testing.T) {
// Use a type that implements fmt.Stringer
subj := S("error", fmt.Errorf("something broke"))
subj := S("error", core.NewError("something broke"))
assert.Equal(t, "something broke", subj.String())
}

View file

@ -4,7 +4,6 @@ package i18n
import (
"context"
"fmt"
"io/fs"
"sync"
@ -59,7 +58,7 @@ func NewCoreService(opts ServiceOptions) func(*core.Core) (any, error) {
if opts.Language != "" {
if langErr := svc.SetLanguage(opts.Language); langErr != nil {
return nil, fmt.Errorf("i18n: invalid language %q: %w", opts.Language, langErr)
return nil, core.Wrap(langErr, "NewCoreService", core.Sprintf("i18n: invalid language %q", opts.Language))
}
}

7
go.mod
View file

@ -5,15 +5,12 @@ go 1.26.0
require golang.org/x/text v0.35.0
require (
dappco.re/go/core v0.5.0
dappco.re/go/core v0.8.0-alpha.1
dappco.re/go/core/log v0.1.0
forge.lthn.ai/core/go-inference v0.1.4
)
require (
forge.lthn.ai/core/go-log v0.0.4 // indirect
github.com/kr/text v0.2.0 // indirect
)
require github.com/kr/text v0.2.0 // indirect
require (
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect

4
go.sum
View file

@ -1,5 +1,5 @@
dappco.re/go/core v0.5.0 h1:P5DJoaCiK5Q+af5UiTdWqUIW4W4qYKzpgGK50thm21U=
dappco.re/go/core v0.5.0/go.mod h1:f2/tBZ3+3IqDrg2F5F598llv0nmb/4gJVCFzM5geE4A=
dappco.re/go/core v0.8.0-alpha.1 h1:gj7+Scv+L63Z7wMxbJYHhaRFkHJo2u4MMPuUSv/Dhtk=
dappco.re/go/core v0.8.0-alpha.1/go.mod h1:f2/tBZ3+3IqDrg2F5F598llv0nmb/4gJVCFzM5geE4A=
dappco.re/go/core/log v0.1.0 h1:pa71Vq2TD2aoEUQWFKwNcaJ3GBY8HbaNGqtE688Unyc=
dappco.re/go/core/log v0.1.0/go.mod h1:Nkqb8gsXhZAO8VLpx7B8i1iAmohhzqA20b9Zr8VUcJs=
forge.lthn.ai/core/go-inference v0.1.4 h1:fuAgWbqsEDajHniqAKyvHYbRcBrkGEiGSqR2pfTMRY0=

View file

@ -2,9 +2,10 @@ package i18n
import (
"maps"
"strings"
"text/template"
"unicode"
"dappco.re/go/core"
)
// GetGrammarData returns the grammar data for the specified language.
@ -55,7 +56,7 @@ func getVerbForm(lang, verb, form string) string {
if data == nil || data.Verbs == nil {
return ""
}
verb = strings.ToLower(verb)
verb = core.Lower(verb)
if forms, ok := data.Verbs[verb]; ok {
switch form {
case "past":
@ -72,7 +73,7 @@ func getWord(lang, word string) string {
if data == nil || data.Words == nil {
return ""
}
return data.Words[strings.ToLower(word)]
return data.Words[core.Lower(word)]
}
func getPunct(lang, rule, defaultVal string) string {
@ -98,7 +99,7 @@ func getNounForm(lang, noun, form string) string {
if data == nil || data.Nouns == nil {
return ""
}
noun = strings.ToLower(noun)
noun = core.Lower(noun)
if forms, ok := data.Nouns[noun]; ok {
switch form {
case "one":
@ -126,7 +127,7 @@ func currentLangForGrammar() string {
// PastTense("run") // "ran"
// PastTense("copy") // "copied"
func PastTense(verb string) string {
verb = strings.ToLower(strings.TrimSpace(verb))
verb = core.Lower(core.Trim(verb))
if verb == "" {
return ""
}
@ -140,16 +141,16 @@ func PastTense(verb string) string {
}
func applyRegularPastTense(verb string) string {
if strings.HasSuffix(verb, "ed") && len(verb) > 2 {
if core.HasSuffix(verb, "ed") && len(verb) > 2 {
thirdFromEnd := verb[len(verb)-3]
if !isVowel(rune(thirdFromEnd)) && thirdFromEnd != 'e' {
return verb
}
}
if strings.HasSuffix(verb, "e") {
if core.HasSuffix(verb, "e") {
return verb + "d"
}
if strings.HasSuffix(verb, "y") && len(verb) > 1 {
if core.HasSuffix(verb, "y") && len(verb) > 1 {
prev := rune(verb[len(verb)-2])
if !isVowel(prev) {
return verb[:len(verb)-1] + "ied"
@ -189,7 +190,7 @@ func shouldDoubleConsonant(verb string) bool {
// Gerund("run") // "running"
// Gerund("die") // "dying"
func Gerund(verb string) string {
verb = strings.ToLower(strings.TrimSpace(verb))
verb = core.Lower(core.Trim(verb))
if verb == "" {
return ""
}
@ -203,10 +204,10 @@ func Gerund(verb string) string {
}
func applyRegularGerund(verb string) string {
if strings.HasSuffix(verb, "ie") {
if core.HasSuffix(verb, "ie") {
return verb[:len(verb)-2] + "ying"
}
if strings.HasSuffix(verb, "e") && len(verb) > 1 {
if core.HasSuffix(verb, "e") && len(verb) > 1 {
secondLast := rune(verb[len(verb)-2])
if secondLast != 'e' && secondLast != 'y' && secondLast != 'o' {
return verb[:len(verb)-1] + "ing"
@ -232,20 +233,20 @@ func Pluralize(noun string, count int) string {
// PluralForm returns the plural form of a noun.
func PluralForm(noun string) string {
noun = strings.TrimSpace(noun)
noun = core.Trim(noun)
if noun == "" {
return ""
}
lower := strings.ToLower(noun)
lower := core.Lower(noun)
if form := getNounForm(currentLangForGrammar(), lower, "other"); form != "" {
if unicode.IsUpper(rune(noun[0])) && len(form) > 0 {
return strings.ToUpper(string(form[0])) + form[1:]
return core.Upper(string(form[0])) + form[1:]
}
return form
}
if plural, ok := irregularNouns[lower]; ok {
if unicode.IsUpper(rune(noun[0])) {
return strings.ToUpper(string(plural[0])) + plural[1:]
return core.Upper(string(plural[0])) + plural[1:]
}
return plural
}
@ -253,28 +254,28 @@ func PluralForm(noun string) string {
}
func applyRegularPlural(noun string) string {
lower := strings.ToLower(noun)
if strings.HasSuffix(lower, "s") ||
strings.HasSuffix(lower, "ss") ||
strings.HasSuffix(lower, "sh") ||
strings.HasSuffix(lower, "ch") ||
strings.HasSuffix(lower, "x") ||
strings.HasSuffix(lower, "z") {
lower := core.Lower(noun)
if core.HasSuffix(lower, "s") ||
core.HasSuffix(lower, "ss") ||
core.HasSuffix(lower, "sh") ||
core.HasSuffix(lower, "ch") ||
core.HasSuffix(lower, "x") ||
core.HasSuffix(lower, "z") {
return noun + "es"
}
if strings.HasSuffix(lower, "y") && len(noun) > 1 {
if core.HasSuffix(lower, "y") && len(noun) > 1 {
prev := rune(lower[len(lower)-2])
if !isVowel(prev) {
return noun[:len(noun)-1] + "ies"
}
}
if strings.HasSuffix(lower, "f") {
if core.HasSuffix(lower, "f") {
return noun[:len(noun)-1] + "ves"
}
if strings.HasSuffix(lower, "fe") {
if core.HasSuffix(lower, "fe") {
return noun[:len(noun)-2] + "ves"
}
if strings.HasSuffix(lower, "o") && len(noun) > 1 {
if core.HasSuffix(lower, "o") && len(noun) > 1 {
prev := rune(lower[len(lower)-2])
if !isVowel(prev) {
if lower == "hero" || lower == "potato" || lower == "tomato" || lower == "echo" || lower == "veto" {
@ -295,14 +296,14 @@ func Article(word string) string {
if word == "" {
return ""
}
lower := strings.ToLower(strings.TrimSpace(word))
lower := core.Lower(core.Trim(word))
for key := range consonantSounds {
if strings.HasPrefix(lower, key) {
if core.HasPrefix(lower, key) {
return "a"
}
}
for key := range vowelSounds {
if strings.HasPrefix(lower, key) {
if core.HasPrefix(lower, key) {
return "an"
}
}
@ -322,7 +323,7 @@ func isVowel(r rune) bool {
// Title capitalises the first letter of each word.
func Title(s string) string {
var b strings.Builder
b := core.NewBuilder()
b.Grow(len(s))
prev := ' '
for _, r := range s {
@ -345,8 +346,8 @@ func Quote(s string) string {
func TemplateFuncs() template.FuncMap {
return template.FuncMap{
"title": Title,
"lower": strings.ToLower,
"upper": strings.ToUpper,
"lower": core.Lower,
"upper": core.Upper,
"past": PastTense,
"gerund": Gerund,
"plural": Pluralize,

View file

@ -1,19 +1,18 @@
package i18n
import (
"fmt"
"strings"
"dappco.re/go/core"
)
// LabelHandler handles i18n.label.{word} -> "Status:" patterns.
type LabelHandler struct{}
func (h LabelHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.label.")
return core.HasPrefix(key, "i18n.label.")
}
func (h LabelHandler) Handle(key string, args []any, next func() string) string {
word := strings.TrimPrefix(key, "i18n.label.")
word := core.TrimPrefix(key, "i18n.label.")
return Label(word)
}
@ -21,11 +20,11 @@ func (h LabelHandler) Handle(key string, args []any, next func() string) string
type ProgressHandler struct{}
func (h ProgressHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.progress.")
return core.HasPrefix(key, "i18n.progress.")
}
func (h ProgressHandler) Handle(key string, args []any, next func() string) string {
verb := strings.TrimPrefix(key, "i18n.progress.")
verb := core.TrimPrefix(key, "i18n.progress.")
if len(args) > 0 {
if subj, ok := args[0].(string); ok {
return ProgressSubject(verb, subj)
@ -38,14 +37,14 @@ func (h ProgressHandler) Handle(key string, args []any, next func() string) stri
type CountHandler struct{}
func (h CountHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.count.")
return core.HasPrefix(key, "i18n.count.")
}
func (h CountHandler) Handle(key string, args []any, next func() string) string {
noun := strings.TrimPrefix(key, "i18n.count.")
noun := core.TrimPrefix(key, "i18n.count.")
if len(args) > 0 {
count := toInt(args[0])
return fmt.Sprintf("%d %s", count, Pluralize(noun, count))
return core.Sprintf("%d %s", count, Pluralize(noun, count))
}
return noun
}
@ -54,11 +53,11 @@ func (h CountHandler) Handle(key string, args []any, next func() string) string
type DoneHandler struct{}
func (h DoneHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.done.")
return core.HasPrefix(key, "i18n.done.")
}
func (h DoneHandler) Handle(key string, args []any, next func() string) string {
verb := strings.TrimPrefix(key, "i18n.done.")
verb := core.TrimPrefix(key, "i18n.done.")
if len(args) > 0 {
if subj, ok := args[0].(string); ok {
return ActionResult(verb, subj)
@ -71,11 +70,11 @@ func (h DoneHandler) Handle(key string, args []any, next func() string) string {
type FailHandler struct{}
func (h FailHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.fail.")
return core.HasPrefix(key, "i18n.fail.")
}
func (h FailHandler) Handle(key string, args []any, next func() string) string {
verb := strings.TrimPrefix(key, "i18n.fail.")
verb := core.TrimPrefix(key, "i18n.fail.")
if len(args) > 0 {
if subj, ok := args[0].(string); ok {
return ActionFailed(verb, subj)
@ -88,14 +87,14 @@ func (h FailHandler) Handle(key string, args []any, next func() string) string {
type NumericHandler struct{}
func (h NumericHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.numeric.")
return core.HasPrefix(key, "i18n.numeric.")
}
func (h NumericHandler) Handle(key string, args []any, next func() string) string {
if len(args) == 0 {
return next()
}
format := strings.TrimPrefix(key, "i18n.numeric.")
format := core.TrimPrefix(key, "i18n.numeric.")
switch format {
case "number", "int":
return FormatNumber(toInt64(args[0]))

View file

@ -2,9 +2,9 @@ package i18n
import (
"bytes"
"errors"
"strings"
"text/template"
"dappco.re/go/core"
)
// T translates a message using the default service.
@ -24,7 +24,7 @@ func Raw(messageID string, args ...any) string {
}
// ErrServiceNotInitialised is returned when the service is not initialised.
var ErrServiceNotInitialised = errors.New("i18n: service not initialised")
var ErrServiceNotInitialised = core.NewError("i18n: service not initialised")
// ErrServiceNotInitialized is deprecated: use ErrServiceNotInitialised.
var ErrServiceNotInitialized = ErrServiceNotInitialised
@ -109,7 +109,7 @@ func executeIntentTemplate(tmplStr string, data templateData) string {
}
func applyTemplate(text string, data any) string {
if !strings.Contains(text, "{{") {
if !core.Contains(text, "{{") {
return text
}
if cached, ok := templateCache.Load(text); ok {

View file

@ -2,10 +2,10 @@ package integration
import (
"context"
"fmt"
"sort"
"testing"
"dappco.re/go/core"
i18n "dappco.re/go/core/i18n"
"forge.lthn.ai/core/go-inference"
_ "forge.lthn.ai/core/go-mlx" // registers Metal backend
@ -549,7 +549,7 @@ func TestCalibrateDomains_1Bvs27B(t *testing.T) {
disagreements++
truth := ""
if r.TrueDomain != "" {
truth = fmt.Sprintf(" [truth=%s]", r.TrueDomain)
truth = core.Sprintf(" [truth=%s]", r.TrueDomain)
}
t.Logf(" DISAGREE: 1B=%s 27B=%s%s | %.60s", r.DomainA, r.DomainB, truth, r.Text)
if disagreements >= 50 {

View file

@ -3,12 +3,10 @@ package integration
import (
"bytes"
"context"
"encoding/json"
"fmt"
"strings"
"testing"
"time"
"dappco.re/go/core"
i18n "dappco.re/go/core/i18n"
"forge.lthn.ai/core/go-inference"
_ "forge.lthn.ai/core/go-mlx" // registers Metal backend
@ -24,9 +22,9 @@ func TestClassifyCorpus_Integration(t *testing.T) {
// Build 50 technical prompts for throughput measurement
var lines []string
for i := 0; i < 50; i++ {
lines = append(lines, fmt.Sprintf(`{"id":%d,"prompt":"Delete the configuration file and rebuild the project"}`, i))
lines = append(lines, core.Sprintf(`{"id":%d,"prompt":"Delete the configuration file and rebuild the project"}`, i))
}
input := strings.NewReader(strings.Join(lines, "\n") + "\n")
input := core.NewReader(core.Join("\n", lines...) + "\n")
var output bytes.Buffer
start := time.Now()
@ -58,10 +56,10 @@ func TestClassifyCorpus_Integration(t *testing.T) {
}
// Also inspect the output JSONL for misclassified entries
outLines := strings.Split(strings.TrimSpace(output.String()), "\n")
outLines := core.Split(core.Trim(output.String()), "\n")
for _, line := range outLines {
var record map[string]any
if err := json.Unmarshal([]byte(line), &record); err == nil {
if r := core.JSONUnmarshal([]byte(line), &record); r.OK {
if record["domain_1b"] != "technical" {
t.Logf(" misclassified: id=%v domain_1b=%v", record["id"], record["domain_1b"])
}

View file

@ -1,12 +1,12 @@
package i18n
import (
"encoding/json"
"io/fs"
"path"
"strings"
"sync"
"dappco.re/go/core"
log "dappco.re/go/core/log"
)
@ -29,8 +29,8 @@ func NewFSLoader(fsys fs.FS, dir string) *FSLoader {
func (l *FSLoader) Load(lang string) (map[string]Message, *GrammarData, error) {
variants := []string{
lang + ".json",
strings.ReplaceAll(lang, "-", "_") + ".json",
strings.ReplaceAll(lang, "_", "-") + ".json",
core.Replace(lang, "-", "_") + ".json",
core.Replace(lang, "_", "-") + ".json",
}
var data []byte
@ -47,8 +47,8 @@ func (l *FSLoader) Load(lang string) (map[string]Message, *GrammarData, error) {
}
var raw map[string]any
if err := json.Unmarshal(data, &raw); err != nil {
return nil, nil, log.E("FSLoader.Load", "invalid JSON in locale: "+lang, err)
if r := core.JSONUnmarshal(data, &raw); !r.OK {
return nil, nil, log.E("FSLoader.Load", "invalid JSON in locale: "+lang, r.Value.(error))
}
messages := make(map[string]Message)
@ -72,11 +72,11 @@ func (l *FSLoader) Languages() []string {
return
}
for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".json") {
if entry.IsDir() || !core.HasSuffix(entry.Name(), ".json") {
continue
}
lang := strings.TrimSuffix(entry.Name(), ".json")
lang = strings.ReplaceAll(lang, "_", "-")
lang := core.TrimSuffix(entry.Name(), ".json")
lang = core.Replace(lang, "_", "-")
l.languages = append(l.languages, lang)
}
})
@ -106,9 +106,9 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
switch v := value.(type) {
case string:
if grammar != nil && strings.HasPrefix(fullKey, "gram.word.") {
wordKey := strings.TrimPrefix(fullKey, "gram.word.")
grammar.Words[strings.ToLower(wordKey)] = v
if grammar != nil && core.HasPrefix(fullKey, "gram.word.") {
wordKey := core.TrimPrefix(fullKey, "gram.word.")
grammar.Words[core.Lower(wordKey)] = v
continue
}
out[fullKey] = Message{Text: v}
@ -127,12 +127,12 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if gerund, ok := v["gerund"].(string); ok {
forms.Gerund = gerund
}
grammar.Verbs[strings.ToLower(verbName)] = forms
grammar.Verbs[core.Lower(verbName)] = forms
continue
}
// Noun form object (under gram.noun.* or has gender field)
if grammar != nil && (strings.HasPrefix(fullKey, "gram.noun.") || isNounFormObject(v)) {
if grammar != nil && (core.HasPrefix(fullKey, "gram.noun.") || isNounFormObject(v)) {
nounName := key
if after, ok := strings.CutPrefix(fullKey, "gram.noun."); ok {
nounName = after
@ -150,7 +150,7 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if gender, ok := v["gender"].(string); ok {
forms.Gender = gender
}
grammar.Nouns[strings.ToLower(nounName)] = forms
grammar.Nouns[core.Lower(nounName)] = forms
continue
}
}
@ -161,7 +161,7 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if arr, ok := nd.([]any); ok {
for _, item := range arr {
if s, ok := item.(string); ok {
grammar.Signals.NounDeterminers = append(grammar.Signals.NounDeterminers, strings.ToLower(s))
grammar.Signals.NounDeterminers = append(grammar.Signals.NounDeterminers, core.Lower(s))
}
}
}
@ -170,7 +170,7 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if arr, ok := va.([]any); ok {
for _, item := range arr {
if s, ok := item.(string); ok {
grammar.Signals.VerbAuxiliaries = append(grammar.Signals.VerbAuxiliaries, strings.ToLower(s))
grammar.Signals.VerbAuxiliaries = append(grammar.Signals.VerbAuxiliaries, core.Lower(s))
}
}
}
@ -179,7 +179,7 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if arr, ok := vi.([]any); ok {
for _, item := range arr {
if s, ok := item.(string); ok {
grammar.Signals.VerbInfinitive = append(grammar.Signals.VerbInfinitive, strings.ToLower(s))
grammar.Signals.VerbInfinitive = append(grammar.Signals.VerbInfinitive, core.Lower(s))
}
}
}

View file

@ -2,8 +2,8 @@ package i18n
import (
"os"
"strings"
"dappco.re/go/core"
"golang.org/x/text/language"
)
@ -95,8 +95,8 @@ func detectLanguage(supported []language.Tag) string {
if langEnv == "" {
return ""
}
baseLang := strings.Split(langEnv, ".")[0]
baseLang = strings.ReplaceAll(baseLang, "_", "-")
baseLang := core.Split(langEnv, ".")[0]
baseLang = core.Replace(baseLang, "_", "-")
parsedLang, err := language.Parse(baseLang)
if err != nil {
return ""

View file

@ -1,15 +1,15 @@
package i18n
import (
"fmt"
"math"
"strconv"
"strings"
"dappco.re/go/core"
)
func getNumberFormat() NumberFormat {
lang := currentLangForGrammar()
if idx := strings.IndexAny(lang, "-_"); idx > 0 {
if idx := indexAny(lang, "-_"); idx > 0 {
lang = lang[:idx]
}
if fmt, ok := numberFormats[lang]; ok {
@ -42,8 +42,8 @@ func FormatDecimalN(f float64, decimals int) string {
if fracInt == 0 {
return intStr
}
fracStr := fmt.Sprintf("%0*d", decimals, fracInt)
fracStr = strings.TrimRight(fracStr, "0")
fracStr := core.Sprintf("%0*d", decimals, fracInt)
fracStr = trimRight(fracStr, "0")
return intStr + nf.DecimalSep + fracStr
}
@ -57,7 +57,7 @@ func FormatPercent(f float64) string {
} else {
numStr = FormatDecimalN(pct, 1)
}
return fmt.Sprintf(nf.PercentFmt, numStr)
return core.Sprintf(nf.PercentFmt, numStr)
}
// FormatBytes formats bytes as human-readable size.
@ -85,31 +85,31 @@ func FormatBytes(bytes int64) string {
value = float64(bytes) / KB
unit = "KB"
default:
return fmt.Sprintf("%d B", bytes)
return core.Sprintf("%d B", bytes)
}
intPart := int64(value)
fracPart := value - float64(intPart)
if fracPart < 0.05 {
return fmt.Sprintf("%d %s", intPart, unit)
return core.Sprintf("%d %s", intPart, unit)
}
fracDigit := int(math.Round(fracPart * 10))
if fracDigit == 10 {
return fmt.Sprintf("%d %s", intPart+1, unit)
return core.Sprintf("%d %s", intPart+1, unit)
}
return fmt.Sprintf("%d%s%d %s", intPart, nf.DecimalSep, fracDigit, unit)
return core.Sprintf("%d%s%d %s", intPart, nf.DecimalSep, fracDigit, unit)
}
// FormatOrdinal formats a number as an ordinal.
func FormatOrdinal(n int) string {
lang := currentLangForGrammar()
if idx := strings.IndexAny(lang, "-_"); idx > 0 {
if idx := indexAny(lang, "-_"); idx > 0 {
lang = lang[:idx]
}
switch lang {
case "en":
return formatEnglishOrdinal(n)
default:
return fmt.Sprintf("%d.", n)
return core.Sprintf("%d.", n)
}
}
@ -119,17 +119,17 @@ func formatEnglishOrdinal(n int) string {
abs = -abs
}
if abs%100 >= 11 && abs%100 <= 13 {
return fmt.Sprintf("%dth", n)
return core.Sprintf("%dth", n)
}
switch abs % 10 {
case 1:
return fmt.Sprintf("%dst", n)
return core.Sprintf("%dst", n)
case 2:
return fmt.Sprintf("%dnd", n)
return core.Sprintf("%dnd", n)
case 3:
return fmt.Sprintf("%drd", n)
return core.Sprintf("%drd", n)
default:
return fmt.Sprintf("%dth", n)
return core.Sprintf("%dth", n)
}
}
@ -148,7 +148,7 @@ func formatIntWithSep(n int64, sep string) string {
}
return str
}
var result strings.Builder
result := core.NewBuilder()
for i, c := range str {
if i > 0 && (len(str)-i)%3 == 0 {
result.WriteString(sep)
@ -160,3 +160,34 @@ func formatIntWithSep(n int64, sep string) string {
}
return result.String()
}
// indexAny returns the index of the first occurrence of any char in chars, or -1.
func indexAny(s, chars string) int {
for i, c := range s {
for _, ch := range chars {
if c == ch {
return i
}
}
}
return -1
}
// trimRight returns s with all trailing occurrences of cutset removed.
func trimRight(s, cutset string) string {
for len(s) > 0 {
found := false
r := rune(s[len(s)-1])
for _, c := range cutset {
if r == c {
found = true
break
}
}
if !found {
break
}
s = s[:len(s)-1]
}
return s
}

View file

@ -1,10 +1,10 @@
package reversal
import (
"fmt"
"sort"
"strings"
"testing"
"dappco.re/go/core"
)
// Domain categories for classification ground truth.
@ -418,17 +418,17 @@ func TestClassification_LeaveOneOut(t *testing.T) {
// Print confusion matrix
t.Log("\nConfusion matrix (rows=actual, cols=predicted):")
var header strings.Builder
header.WriteString(fmt.Sprintf(" %-10s", ""))
header := core.NewBuilder()
header.WriteString(core.Sprintf(" %-10s", ""))
for _, d := range domains {
header.WriteString(fmt.Sprintf(" %10s", d[:4]))
header.WriteString(core.Sprintf(" %10s", d[:4]))
}
t.Log(header.String())
for _, actual := range domains {
var row strings.Builder
row.WriteString(fmt.Sprintf(" %-10s", actual[:4]))
row := core.NewBuilder()
row.WriteString(core.Sprintf(" %-10s", actual[:4]))
for _, predicted := range domains {
row.WriteString(fmt.Sprintf(" %10d", confusion[actual][predicted]))
row.WriteString(core.Sprintf(" %10d", confusion[actual][predicted]))
}
t.Log(row.String())
}
@ -506,14 +506,14 @@ func TestClassification_TenseProfile(t *testing.T) {
}
}
var parts strings.Builder
parts.WriteString(fmt.Sprintf("%-10s verbs=%d", d, totalVerbs))
parts := core.NewBuilder()
parts.WriteString(core.Sprintf("%-10s verbs=%d", d, totalVerbs))
for _, tense := range tenses {
pct := 0.0
if totalVerbs > 0 {
pct = float64(tenseCounts[tense]) / float64(totalVerbs) * 100
}
parts.WriteString(fmt.Sprintf(" %s=%.0f%%", tense, pct))
parts.WriteString(core.Sprintf(" %s=%.0f%%", tense, pct))
}
t.Log(parts.String())
}
@ -550,12 +550,12 @@ func TestClassification_TopVerbs(t *testing.T) {
sort.Slice(sorted, func(i, j int) bool { return sorted[i].count > sorted[j].count })
top := min(len(sorted), 8)
var verbs strings.Builder
verbs := core.NewBuilder()
for i := 0; i < top; i++ {
if i > 0 {
verbs.WriteString(", ")
}
verbs.WriteString(fmt.Sprintf("%s(%d)", sorted[i].verb, sorted[i].count))
verbs.WriteString(core.Sprintf("%s(%d)", sorted[i].verb, sorted[i].count))
}
t.Logf("%-10s unique=%d top: %s", d, len(verbCounts), verbs.String())
}

View file

@ -1,9 +1,9 @@
package reversal
import (
"strings"
"unicode"
"dappco.re/go/core"
i18n "dappco.re/go/core/i18n"
)
@ -26,7 +26,7 @@ func NewMultiplierForLang(lang string) *Multiplier {
// Expand produces: original + tense flips (past, gerund) + number flips (plural toggle) + combinations.
// All output is deterministic and grammatically correct.
func (m *Multiplier) Expand(text string) []string {
text = strings.TrimSpace(text)
text = core.Trim(text)
if text == "" {
return nil
}
@ -140,7 +140,7 @@ func (m *Multiplier) applyVerbTransform(tokens []Token, vi int, targetTense stri
result[vi] = Token{
Raw: newForm,
Lower: strings.ToLower(newForm),
Lower: core.Lower(newForm),
Type: TokenVerb,
Confidence: 1.0,
VerbInfo: VerbMatch{
@ -191,7 +191,7 @@ func (m *Multiplier) applyNounTransformOnTokens(tokens []Token, ni int) []Token
result[ni] = Token{
Raw: newForm,
Lower: strings.ToLower(newForm),
Lower: core.Lower(newForm),
Type: TokenNoun,
Confidence: 1.0,
NounInfo: NounMatch{
@ -206,7 +206,7 @@ func (m *Multiplier) applyNounTransformOnTokens(tokens []Token, ni int) []Token
// reconstruct joins tokens back into a string, preserving spacing.
func reconstruct(tokens []Token) string {
var b strings.Builder
b := core.NewBuilder()
for i, tok := range tokens {
if i > 0 {
// Punctuation tokens that were split from the previous word
@ -235,7 +235,7 @@ func preserveCase(original, replacement string) string {
// If the original is all uppercase (like "DELETE"), make replacement all uppercase.
if isAllUpper(original) && len(original) > 1 {
return strings.ToUpper(replacement)
return core.Upper(replacement)
}
// If the first character of the original is uppercase, capitalise the replacement.

View file

@ -18,6 +18,7 @@ package reversal
import (
"strings"
"dappco.re/go/core"
i18n "dappco.re/go/core/i18n"
)
@ -209,7 +210,7 @@ func (t *Tokeniser) buildNounIndex() {
// Tier 3: Try reverse morphology rules and round-trip verify via
// the forward function PluralForm().
func (t *Tokeniser) MatchNoun(word string) (NounMatch, bool) {
word = strings.ToLower(strings.TrimSpace(word))
word = core.Lower(core.Trim(word))
if word == "" {
return NounMatch{}, false
}
@ -250,27 +251,27 @@ func (t *Tokeniser) reverseRegularPlural(word string) []string {
var candidates []string
// Rule: consonant + "ies" → consonant + "y" (e.g., "entries" → "entry")
if strings.HasSuffix(word, "ies") && len(word) > 3 {
if core.HasSuffix(word, "ies") && len(word) > 3 {
base := word[:len(word)-3] + "y"
candidates = append(candidates, base)
}
// Rule: "ves" → "f" or "fe" (e.g., "wolves" → "wolf", "knives" → "knife")
if strings.HasSuffix(word, "ves") && len(word) > 3 {
if core.HasSuffix(word, "ves") && len(word) > 3 {
candidates = append(candidates, word[:len(word)-3]+"f")
candidates = append(candidates, word[:len(word)-3]+"fe")
}
// Rule: sibilant + "es" (e.g., "processes" → "process", "branches" → "branch")
if strings.HasSuffix(word, "ses") || strings.HasSuffix(word, "xes") ||
strings.HasSuffix(word, "zes") || strings.HasSuffix(word, "ches") ||
strings.HasSuffix(word, "shes") {
if core.HasSuffix(word, "ses") || core.HasSuffix(word, "xes") ||
core.HasSuffix(word, "zes") || core.HasSuffix(word, "ches") ||
core.HasSuffix(word, "shes") {
base := word[:len(word)-2] // strip "es"
candidates = append(candidates, base)
}
// Rule: drop "s" (e.g., "servers" → "server")
if strings.HasSuffix(word, "s") && len(word) > 1 {
if core.HasSuffix(word, "s") && len(word) > 1 {
base := word[:len(word)-1]
candidates = append(candidates, base)
}
@ -285,7 +286,7 @@ func (t *Tokeniser) reverseRegularPlural(word string) []string {
// Tier 3: Try reverse morphology rules and round-trip verify via
// the forward functions PastTense() and Gerund().
func (t *Tokeniser) MatchVerb(word string) (VerbMatch, bool) {
word = strings.ToLower(strings.TrimSpace(word))
word = core.Lower(core.Trim(word))
if word == "" {
return VerbMatch{}, false
}
@ -358,7 +359,7 @@ func (t *Tokeniser) bestRoundTrip(target string, candidates []string, forward fu
// Priority 3: prefer candidate not ending in "e" (avoids phantom verbs
// with CCe endings like "walke", "processe")
for _, m := range matches {
if !strings.HasSuffix(m, "e") {
if !core.HasSuffix(m, "e") {
return m
}
}
@ -402,12 +403,12 @@ func isVowelByte(b byte) bool {
func (t *Tokeniser) reverseRegularPast(word string) []string {
var candidates []string
if !strings.HasSuffix(word, "ed") {
if !core.HasSuffix(word, "ed") {
return candidates
}
// Rule: consonant + "ied" → consonant + "y" (e.g., "copied" → "copy")
if strings.HasSuffix(word, "ied") && len(word) > 3 {
if core.HasSuffix(word, "ied") && len(word) > 3 {
base := word[:len(word)-3] + "y"
candidates = append(candidates, base)
}
@ -448,14 +449,14 @@ func (t *Tokeniser) reverseRegularPast(word string) []string {
func (t *Tokeniser) reverseRegularGerund(word string) []string {
var candidates []string
if !strings.HasSuffix(word, "ing") || len(word) < 4 {
if !core.HasSuffix(word, "ing") || len(word) < 4 {
return candidates
}
stem := word[:len(word)-3] // strip "ing"
// Rule: "ying" → "ie" (e.g., "dying" → "die")
if strings.HasSuffix(word, "ying") && len(word) > 4 {
if core.HasSuffix(word, "ying") && len(word) > 4 {
base := word[:len(word)-4] + "ie"
candidates = append(candidates, base)
}
@ -488,15 +489,15 @@ func (t *Tokeniser) buildWordIndex() {
}
for key, display := range data.Words {
// Map the key itself (already lowercase)
t.words[strings.ToLower(key)] = key
t.words[core.Lower(key)] = key
// Map the display form (e.g., "URL" → "url", "SSH" → "ssh")
t.words[strings.ToLower(display)] = key
t.words[core.Lower(display)] = key
}
}
// IsDualClass returns true if the word exists in both verb and noun tables.
func (t *Tokeniser) IsDualClass(word string) bool {
return t.dualClass[strings.ToLower(word)]
return t.dualClass[core.Lower(word)]
}
func (t *Tokeniser) buildDualClassIndex() {
@ -519,7 +520,7 @@ func (t *Tokeniser) buildSignalIndex() {
// falls back per-field rather than silently disabling signals.
if data != nil && len(data.Signals.NounDeterminers) > 0 {
for _, w := range data.Signals.NounDeterminers {
t.nounDet[strings.ToLower(w)] = true
t.nounDet[core.Lower(w)] = true
}
} else {
for _, w := range []string{
@ -534,7 +535,7 @@ func (t *Tokeniser) buildSignalIndex() {
if data != nil && len(data.Signals.VerbAuxiliaries) > 0 {
for _, w := range data.Signals.VerbAuxiliaries {
t.verbAux[strings.ToLower(w)] = true
t.verbAux[core.Lower(w)] = true
}
} else {
for _, w := range []string{
@ -548,7 +549,7 @@ func (t *Tokeniser) buildSignalIndex() {
if data != nil && len(data.Signals.VerbInfinitive) > 0 {
for _, w := range data.Signals.VerbInfinitive {
t.verbInf[strings.ToLower(w)] = true
t.verbInf[core.Lower(w)] = true
}
} else {
t.verbInf["to"] = true
@ -570,7 +571,7 @@ func defaultWeights() map[string]float64 {
// MatchWord performs a case-insensitive lookup in the words map.
// Returns the category key and true if found, or ("", false) otherwise.
func (t *Tokeniser) MatchWord(word string) (string, bool) {
cat, ok := t.words[strings.ToLower(word)]
cat, ok := t.words[core.Lower(word)]
return cat, ok
}
@ -583,13 +584,13 @@ func (t *Tokeniser) MatchArticle(word string) (string, bool) {
return "", false
}
lower := strings.ToLower(word)
lower := core.Lower(word)
if lower == strings.ToLower(data.Articles.IndefiniteDefault) ||
lower == strings.ToLower(data.Articles.IndefiniteVowel) {
if lower == core.Lower(data.Articles.IndefiniteDefault) ||
lower == core.Lower(data.Articles.IndefiniteVowel) {
return "indefinite", true
}
if lower == strings.ToLower(data.Articles.Definite) {
if lower == core.Lower(data.Articles.Definite) {
return "definite", true
}
@ -613,7 +614,7 @@ var clauseBoundaries = map[string]bool{
// Pass 1 classifies unambiguous tokens and marks dual-class base forms.
// Pass 2 resolves ambiguous tokens using weighted disambiguation signals.
func (t *Tokeniser) Tokenise(text string) []Token {
text = strings.TrimSpace(text)
text = core.Trim(text)
if text == "" {
return nil
}
@ -628,7 +629,7 @@ func (t *Tokeniser) Tokenise(text string) []Token {
// Classify the word portion (if any).
if word != "" {
tok := Token{Raw: raw, Lower: strings.ToLower(word)}
tok := Token{Raw: raw, Lower: core.Lower(word)}
if artType, ok := t.MatchArticle(word); ok {
// Articles are unambiguous.
@ -943,7 +944,7 @@ func splitTrailingPunct(s string) (string, string) {
}
// Check for "..." suffix first (3-char pattern).
if strings.HasSuffix(s, "...") {
if core.HasSuffix(s, "...") {
return s[:len(s)-3], "..."
}
// Check single-char trailing punctuation.

View file

@ -2,16 +2,14 @@ package i18n
import (
"embed"
"encoding/json"
"fmt"
"io/fs"
"maps"
"path"
"slices"
"strings"
"sync"
"sync/atomic"
"dappco.re/go/core"
log "dappco.re/go/core/log"
"golang.org/x/text/language"
)
@ -185,8 +183,8 @@ func AddLoader(loader Loader) {
func (s *Service) loadJSON(lang string, data []byte) error {
var raw map[string]any
if err := json.Unmarshal(data, &raw); err != nil {
return err
if r := core.JSONUnmarshal(data, &raw); !r.OK {
return r.Value.(error)
}
messages := make(map[string]Message)
grammarData := &GrammarData{
@ -324,8 +322,8 @@ func (s *Service) resolveWithFallback(messageID string, data any) string {
if text := s.tryResolve(s.fallbackLang, messageID, data); text != "" {
return text
}
if strings.Contains(messageID, ".") {
parts := strings.Split(messageID, ".")
if core.Contains(messageID, ".") {
parts := core.Split(messageID, ".")
verb := parts[len(parts)-1]
commonKey := "common.action." + verb
if text := s.tryResolve(s.currentLang, commonKey, data); text != "" {
@ -394,7 +392,7 @@ func (s *Service) getEffectiveFormality(data any) Formality {
return f
}
case string:
switch strings.ToLower(f) {
switch core.Lower(f) {
case "formal":
return FormalityFormal
case "informal":
@ -408,7 +406,7 @@ func (s *Service) getEffectiveFormality(data any) Formality {
func (s *Service) handleMissingKey(key string, args []any) string {
switch s.mode {
case ModeStrict:
panic(fmt.Sprintf("i18n: missing translation key %q", key))
panic(core.Sprintf("i18n: missing translation key %q", key))
case ModeCollect:
var argsMap map[string]any
if len(args) > 0 {
@ -506,7 +504,7 @@ func (s *Service) LoadFS(fsys fs.FS, dir string) error {
return log.E("Service.LoadFS", "read locales directory", err)
}
for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".json") {
if entry.IsDir() || !core.HasSuffix(entry.Name(), ".json") {
continue
}
filePath := path.Join(dir, entry.Name())
@ -514,8 +512,8 @@ func (s *Service) LoadFS(fsys fs.FS, dir string) error {
if err != nil {
return log.E("Service.LoadFS", "read locale: "+entry.Name(), err)
}
lang := strings.TrimSuffix(entry.Name(), ".json")
lang = strings.ReplaceAll(lang, "_", "-")
lang := core.TrimSuffix(entry.Name(), ".json")
lang = core.Replace(lang, "_", "-")
if err := s.loadJSON(lang, data); err != nil {
return log.E("Service.LoadFS", "parse locale: "+entry.Name(), err)
}

View file

@ -1,8 +1,9 @@
package i18n
import (
"fmt"
"time"
"dappco.re/go/core"
)
// TimeAgo returns a localised relative time string.
@ -28,12 +29,12 @@ func TimeAgo(t time.Time) string {
func FormatAgo(count int, unit string) string {
svc := Default()
if svc == nil {
return fmt.Sprintf("%d %ss ago", count, unit)
return core.Sprintf("%d %ss ago", count, unit)
}
key := "time.ago." + unit
result := svc.T(key, map[string]any{"Count": count})
if result == key {
return fmt.Sprintf("%d %s ago", count, Pluralize(unit, count))
return core.Sprintf("%d %s ago", count, Pluralize(unit, count))
}
return result
}

View file

@ -4,9 +4,8 @@ package i18n
import (
"context"
"fmt"
"strings"
"dappco.re/go/core"
log "dappco.re/go/core/log"
"forge.lthn.ai/core/go-inference"
)
@ -45,7 +44,7 @@ type IrregularResult struct {
// articlePrompt builds a fill-in-the-blank prompt for article prediction.
func articlePrompt(noun string) string {
return fmt.Sprintf(
return core.Sprintf(
"Complete with the correct article (a/an/the): ___ %s. Answer with just the article:",
noun,
)
@ -53,7 +52,7 @@ func articlePrompt(noun string) string {
// irregularPrompt builds a fill-in-the-blank prompt for irregular verb prediction.
func irregularPrompt(verb, tense string) string {
return fmt.Sprintf(
return core.Sprintf(
"What is the %s form of the verb '%s'? Answer with just the word:",
tense, verb,
)
@ -61,14 +60,14 @@ func irregularPrompt(verb, tense string) string {
// collectGenerated runs a single-token generation and returns the trimmed, lowercased output.
func collectGenerated(ctx context.Context, m inference.TextModel, prompt string) (string, error) {
var sb strings.Builder
sb := core.NewBuilder()
for tok := range m.Generate(ctx, prompt, inference.WithMaxTokens(1), inference.WithTemperature(0.05)) {
sb.WriteString(tok.Text)
}
if err := m.Err(); err != nil {
return "", err
}
return strings.TrimSpace(strings.ToLower(sb.String())), nil
return core.Trim(core.Lower(sb.String())), nil
}
// ValidateArticle checks whether a given article usage is grammatically correct
@ -80,7 +79,7 @@ func ValidateArticle(ctx context.Context, m inference.TextModel, noun string, ar
if err != nil {
return ArticleResult{}, log.E("ValidateArticle", "validate: "+noun, err)
}
given := strings.TrimSpace(strings.ToLower(article))
given := core.Trim(core.Lower(article))
return ArticleResult{
Noun: noun,
Given: given,
@ -99,7 +98,7 @@ func ValidateIrregular(ctx context.Context, m inference.TextModel, verb string,
if err != nil {
return IrregularResult{}, log.E("ValidateIrregular", "validate: "+verb+" ("+tense+")", err)
}
given := strings.TrimSpace(strings.ToLower(form))
given := core.Trim(core.Lower(form))
return IrregularResult{
Verb: verb,
Tense: tense,

View file

@ -4,10 +4,10 @@ package i18n
import (
"context"
"fmt"
"iter"
"testing"
"dappco.re/go/core"
"forge.lthn.ai/core/go-inference"
)
@ -73,7 +73,7 @@ func newMockIrregularModel(forms map[string]string) *mockGenerateModel {
// containsVerb checks if the prompt contains the verb in the expected format.
func containsVerb(prompt, verb string) bool {
return len(prompt) > 0 && len(verb) > 0 &&
contains(prompt, fmt.Sprintf("'%s'", verb))
contains(prompt, core.Sprintf("'%s'", verb))
}
// contains is a simple substring check (avoids importing strings in test).