feat: upgrade to core v0.8.0-alpha.1, replace banned stdlib imports
All checks were successful
Security Scan / security (push) Successful in 8s
Test / test (push) Successful in 49s

Replace fmt, errors, strings, encoding/json with Core primitives
across 20 files. Keep strings.Fields/CutPrefix. No translation
files modified.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Claude 2026-03-26 14:11:15 +00:00 committed by Virgil
parent 7e602ed48a
commit c4ef1e6205
22 changed files with 225 additions and 205 deletions

View file

@ -2,9 +2,9 @@ package i18n
import ( import (
"context" "context"
"fmt"
"time" "time"
"dappco.re/go/core"
log "dappco.re/go/core/log" log "dappco.re/go/core/log"
"forge.lthn.ai/core/go-inference" "forge.lthn.ai/core/go-inference"
) )
@ -66,7 +66,7 @@ func CalibrateDomains(ctx context.Context, modelA, modelB inference.TextModel,
// Build classification prompts from sample texts. // Build classification prompts from sample texts.
prompts := make([]string, len(samples)) prompts := make([]string, len(samples))
for i, s := range samples { for i, s := range samples {
prompts[i] = fmt.Sprintf(cfg.promptTemplate, s.Text) prompts[i] = core.Sprintf(cfg.promptTemplate, s.Text)
} }
// Classify with model A. // Classify with model A.
@ -93,7 +93,7 @@ func CalibrateDomains(ctx context.Context, modelA, modelB inference.TextModel,
if agree { if agree {
stats.Agreed++ stats.Agreed++
} else { } else {
key := fmt.Sprintf("%s->%s", a, b) key := core.Sprintf("%s->%s", a, b)
stats.ConfusionPairs[key]++ stats.ConfusionPairs[key]++
} }
stats.ByDomainA[a]++ stats.ByDomainA[a]++
@ -140,7 +140,7 @@ func classifyAll(ctx context.Context, model inference.TextModel, prompts []strin
results, err := model.Classify(ctx, batch, inference.WithMaxTokens(1)) results, err := model.Classify(ctx, batch, inference.WithMaxTokens(1))
if err != nil { if err != nil {
return nil, 0, log.E("classifyAll", fmt.Sprintf("classify batch [%d:%d]", i, end), err) return nil, 0, log.E("classifyAll", core.Sprintf("classify batch [%d:%d]", i, end), err)
} }
for j, r := range results { for j, r := range results {

View file

@ -3,12 +3,10 @@ package i18n
import ( import (
"bufio" "bufio"
"context" "context"
"encoding/json"
"fmt"
"io" "io"
"strings"
"time" "time"
"dappco.re/go/core"
log "dappco.re/go/core/log" log "dappco.re/go/core/log"
"forge.lthn.ai/core/go-inference" "forge.lthn.ai/core/go-inference"
) )
@ -63,7 +61,7 @@ func mapTokenToDomain(token string) string {
if len(token) == 0 { if len(token) == 0 {
return "unknown" return "unknown"
} }
lower := strings.ToLower(token) lower := core.Lower(token)
switch { switch {
case lower == "technical" || lower == "tech": case lower == "technical" || lower == "tech":
return "technical" return "technical"
@ -107,7 +105,7 @@ func ClassifyCorpus(ctx context.Context, model inference.TextModel,
} }
prompts := make([]string, len(batch)) prompts := make([]string, len(batch))
for i, p := range batch { for i, p := range batch {
prompts[i] = fmt.Sprintf(cfg.promptTemplate, p.prompt) prompts[i] = core.Sprintf(cfg.promptTemplate, p.prompt)
} }
results, err := model.Classify(ctx, prompts, inference.WithMaxTokens(1)) results, err := model.Classify(ctx, prompts, inference.WithMaxTokens(1))
if err != nil { if err != nil {
@ -119,13 +117,12 @@ func ClassifyCorpus(ctx context.Context, model inference.TextModel,
stats.ByDomain[domain]++ stats.ByDomain[domain]++
stats.Total++ stats.Total++
line, err := json.Marshal(batch[i].record) mr := core.JSONMarshal(batch[i].record)
if err != nil { if !mr.OK {
return log.E("ClassifyCorpus", "marshal output", err) return log.E("ClassifyCorpus", "marshal output", mr.Value.(error))
}
if _, err := fmt.Fprintf(output, "%s\n", line); err != nil {
return log.E("ClassifyCorpus", "write output", err)
} }
line := mr.Value.([]byte)
core.Print(output, "%s", line)
} }
batch = batch[:0] batch = batch[:0]
return nil return nil
@ -133,7 +130,7 @@ func ClassifyCorpus(ctx context.Context, model inference.TextModel,
for scanner.Scan() { for scanner.Scan() {
var record map[string]any var record map[string]any
if err := json.Unmarshal(scanner.Bytes(), &record); err != nil { if r := core.JSONUnmarshal(scanner.Bytes(), &record); !r.OK {
stats.Skipped++ stats.Skipped++
continue continue
} }

View file

@ -3,11 +3,10 @@ package i18n
import ( import (
"bytes" "bytes"
"context" "context"
"encoding/json"
"iter" "iter"
"strings"
"testing" "testing"
"dappco.re/go/core"
"forge.lthn.ai/core/go-inference" "forge.lthn.ai/core/go-inference"
) )
@ -85,7 +84,7 @@ func TestClassifyCorpus_Basic(t *testing.T) {
}, },
} }
input := strings.NewReader( input := core.NewReader(
`{"seed_id":"1","domain":"general","prompt":"Delete the file"}` + "\n" + `{"seed_id":"1","domain":"general","prompt":"Delete the file"}` + "\n" +
`{"seed_id":"2","domain":"science","prompt":"Explain gravity"}` + "\n", `{"seed_id":"2","domain":"science","prompt":"Explain gravity"}` + "\n",
) )
@ -102,15 +101,15 @@ func TestClassifyCorpus_Basic(t *testing.T) {
t.Errorf("Skipped = %d, want 0", stats.Skipped) t.Errorf("Skipped = %d, want 0", stats.Skipped)
} }
lines := strings.Split(strings.TrimSpace(output.String()), "\n") lines := core.Split(core.Trim(output.String()), "\n")
if len(lines) != 2 { if len(lines) != 2 {
t.Fatalf("output lines = %d, want 2", len(lines)) t.Fatalf("output lines = %d, want 2", len(lines))
} }
for i, line := range lines { for i, line := range lines {
var record map[string]any var record map[string]any
if err := json.Unmarshal([]byte(line), &record); err != nil { if r := core.JSONUnmarshal([]byte(line), &record); !r.OK {
t.Fatalf("line %d: unmarshal: %v", i, err) t.Fatalf("line %d: unmarshal: %v", i, r.Value)
} }
if record["domain_1b"] != "technical" { if record["domain_1b"] != "technical" {
t.Errorf("line %d: domain_1b = %v, want %q", i, record["domain_1b"], "technical") t.Errorf("line %d: domain_1b = %v, want %q", i, record["domain_1b"], "technical")
@ -133,7 +132,7 @@ func TestClassifyCorpus_SkipsMalformed(t *testing.T) {
}, },
} }
input := strings.NewReader( input := core.NewReader(
"not valid json\n" + "not valid json\n" +
`{"seed_id":"1","domain":"general","prompt":"Hello world"}` + "\n" + `{"seed_id":"1","domain":"general","prompt":"Hello world"}` + "\n" +
`{"seed_id":"2","domain":"general"}` + "\n", `{"seed_id":"2","domain":"general"}` + "\n",
@ -157,7 +156,7 @@ func TestClassifyCorpus_DomainMapping(t *testing.T) {
classifyFunc: func(_ context.Context, prompts []string, _ ...inference.GenerateOption) ([]inference.ClassifyResult, error) { classifyFunc: func(_ context.Context, prompts []string, _ ...inference.GenerateOption) ([]inference.ClassifyResult, error) {
results := make([]inference.ClassifyResult, len(prompts)) results := make([]inference.ClassifyResult, len(prompts))
for i, p := range prompts { for i, p := range prompts {
if strings.Contains(p, "Delete") { if core.Contains(p, "Delete") {
results[i] = inference.ClassifyResult{Token: inference.Token{Text: "technical"}} results[i] = inference.ClassifyResult{Token: inference.Token{Text: "technical"}}
} else { } else {
results[i] = inference.ClassifyResult{Token: inference.Token{Text: "ethical"}} results[i] = inference.ClassifyResult{Token: inference.Token{Text: "ethical"}}
@ -167,7 +166,7 @@ func TestClassifyCorpus_DomainMapping(t *testing.T) {
}, },
} }
input := strings.NewReader( input := core.NewReader(
`{"prompt":"Delete the file now"}` + "\n" + `{"prompt":"Delete the file now"}` + "\n" +
`{"prompt":"Is it right to lie?"}` + "\n", `{"prompt":"Is it right to lie?"}` + "\n",
) )

View file

@ -1,9 +1,9 @@
package i18n package i18n
import ( import (
"fmt"
"testing" "testing"
"dappco.re/go/core"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -123,7 +123,7 @@ func TestSubject_String_Good(t *testing.T) {
func TestSubject_String_Good_Stringer(t *testing.T) { func TestSubject_String_Good_Stringer(t *testing.T) {
// Use a type that implements fmt.Stringer // Use a type that implements fmt.Stringer
subj := S("error", fmt.Errorf("something broke")) subj := S("error", core.NewError("something broke"))
assert.Equal(t, "something broke", subj.String()) assert.Equal(t, "something broke", subj.String())
} }

View file

@ -4,7 +4,6 @@ package i18n
import ( import (
"context" "context"
"fmt"
"io/fs" "io/fs"
"sync" "sync"
@ -59,7 +58,7 @@ func NewCoreService(opts ServiceOptions) func(*core.Core) (any, error) {
if opts.Language != "" { if opts.Language != "" {
if langErr := svc.SetLanguage(opts.Language); langErr != nil { if langErr := svc.SetLanguage(opts.Language); langErr != nil {
return nil, fmt.Errorf("i18n: invalid language %q: %w", opts.Language, langErr) return nil, core.Wrap(langErr, "NewCoreService", core.Sprintf("i18n: invalid language %q", opts.Language))
} }
} }

7
go.mod
View file

@ -5,15 +5,12 @@ go 1.26.0
require golang.org/x/text v0.35.0 require golang.org/x/text v0.35.0
require ( require (
dappco.re/go/core v0.5.0 dappco.re/go/core v0.8.0-alpha.1
dappco.re/go/core/log v0.1.0 dappco.re/go/core/log v0.1.0
forge.lthn.ai/core/go-inference v0.1.4 forge.lthn.ai/core/go-inference v0.1.4
) )
require ( require github.com/kr/text v0.2.0 // indirect
forge.lthn.ai/core/go-log v0.0.4 // indirect
github.com/kr/text v0.2.0 // indirect
)
require ( require (
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect

4
go.sum
View file

@ -1,5 +1,5 @@
dappco.re/go/core v0.5.0 h1:P5DJoaCiK5Q+af5UiTdWqUIW4W4qYKzpgGK50thm21U= dappco.re/go/core v0.8.0-alpha.1 h1:gj7+Scv+L63Z7wMxbJYHhaRFkHJo2u4MMPuUSv/Dhtk=
dappco.re/go/core v0.5.0/go.mod h1:f2/tBZ3+3IqDrg2F5F598llv0nmb/4gJVCFzM5geE4A= dappco.re/go/core v0.8.0-alpha.1/go.mod h1:f2/tBZ3+3IqDrg2F5F598llv0nmb/4gJVCFzM5geE4A=
dappco.re/go/core/log v0.1.0 h1:pa71Vq2TD2aoEUQWFKwNcaJ3GBY8HbaNGqtE688Unyc= dappco.re/go/core/log v0.1.0 h1:pa71Vq2TD2aoEUQWFKwNcaJ3GBY8HbaNGqtE688Unyc=
dappco.re/go/core/log v0.1.0/go.mod h1:Nkqb8gsXhZAO8VLpx7B8i1iAmohhzqA20b9Zr8VUcJs= dappco.re/go/core/log v0.1.0/go.mod h1:Nkqb8gsXhZAO8VLpx7B8i1iAmohhzqA20b9Zr8VUcJs=
forge.lthn.ai/core/go-inference v0.1.4 h1:fuAgWbqsEDajHniqAKyvHYbRcBrkGEiGSqR2pfTMRY0= forge.lthn.ai/core/go-inference v0.1.4 h1:fuAgWbqsEDajHniqAKyvHYbRcBrkGEiGSqR2pfTMRY0=

View file

@ -2,9 +2,10 @@ package i18n
import ( import (
"maps" "maps"
"strings"
"text/template" "text/template"
"unicode" "unicode"
"dappco.re/go/core"
) )
// GetGrammarData returns the grammar data for the specified language. // GetGrammarData returns the grammar data for the specified language.
@ -55,7 +56,7 @@ func getVerbForm(lang, verb, form string) string {
if data == nil || data.Verbs == nil { if data == nil || data.Verbs == nil {
return "" return ""
} }
verb = strings.ToLower(verb) verb = core.Lower(verb)
if forms, ok := data.Verbs[verb]; ok { if forms, ok := data.Verbs[verb]; ok {
switch form { switch form {
case "past": case "past":
@ -72,7 +73,7 @@ func getWord(lang, word string) string {
if data == nil || data.Words == nil { if data == nil || data.Words == nil {
return "" return ""
} }
return data.Words[strings.ToLower(word)] return data.Words[core.Lower(word)]
} }
func getPunct(lang, rule, defaultVal string) string { func getPunct(lang, rule, defaultVal string) string {
@ -98,7 +99,7 @@ func getNounForm(lang, noun, form string) string {
if data == nil || data.Nouns == nil { if data == nil || data.Nouns == nil {
return "" return ""
} }
noun = strings.ToLower(noun) noun = core.Lower(noun)
if forms, ok := data.Nouns[noun]; ok { if forms, ok := data.Nouns[noun]; ok {
switch form { switch form {
case "one": case "one":
@ -126,7 +127,7 @@ func currentLangForGrammar() string {
// PastTense("run") // "ran" // PastTense("run") // "ran"
// PastTense("copy") // "copied" // PastTense("copy") // "copied"
func PastTense(verb string) string { func PastTense(verb string) string {
verb = strings.ToLower(strings.TrimSpace(verb)) verb = core.Lower(core.Trim(verb))
if verb == "" { if verb == "" {
return "" return ""
} }
@ -140,16 +141,16 @@ func PastTense(verb string) string {
} }
func applyRegularPastTense(verb string) string { func applyRegularPastTense(verb string) string {
if strings.HasSuffix(verb, "ed") && len(verb) > 2 { if core.HasSuffix(verb, "ed") && len(verb) > 2 {
thirdFromEnd := verb[len(verb)-3] thirdFromEnd := verb[len(verb)-3]
if !isVowel(rune(thirdFromEnd)) && thirdFromEnd != 'e' { if !isVowel(rune(thirdFromEnd)) && thirdFromEnd != 'e' {
return verb return verb
} }
} }
if strings.HasSuffix(verb, "e") { if core.HasSuffix(verb, "e") {
return verb + "d" return verb + "d"
} }
if strings.HasSuffix(verb, "y") && len(verb) > 1 { if core.HasSuffix(verb, "y") && len(verb) > 1 {
prev := rune(verb[len(verb)-2]) prev := rune(verb[len(verb)-2])
if !isVowel(prev) { if !isVowel(prev) {
return verb[:len(verb)-1] + "ied" return verb[:len(verb)-1] + "ied"
@ -189,7 +190,7 @@ func shouldDoubleConsonant(verb string) bool {
// Gerund("run") // "running" // Gerund("run") // "running"
// Gerund("die") // "dying" // Gerund("die") // "dying"
func Gerund(verb string) string { func Gerund(verb string) string {
verb = strings.ToLower(strings.TrimSpace(verb)) verb = core.Lower(core.Trim(verb))
if verb == "" { if verb == "" {
return "" return ""
} }
@ -203,10 +204,10 @@ func Gerund(verb string) string {
} }
func applyRegularGerund(verb string) string { func applyRegularGerund(verb string) string {
if strings.HasSuffix(verb, "ie") { if core.HasSuffix(verb, "ie") {
return verb[:len(verb)-2] + "ying" return verb[:len(verb)-2] + "ying"
} }
if strings.HasSuffix(verb, "e") && len(verb) > 1 { if core.HasSuffix(verb, "e") && len(verb) > 1 {
secondLast := rune(verb[len(verb)-2]) secondLast := rune(verb[len(verb)-2])
if secondLast != 'e' && secondLast != 'y' && secondLast != 'o' { if secondLast != 'e' && secondLast != 'y' && secondLast != 'o' {
return verb[:len(verb)-1] + "ing" return verb[:len(verb)-1] + "ing"
@ -232,20 +233,20 @@ func Pluralize(noun string, count int) string {
// PluralForm returns the plural form of a noun. // PluralForm returns the plural form of a noun.
func PluralForm(noun string) string { func PluralForm(noun string) string {
noun = strings.TrimSpace(noun) noun = core.Trim(noun)
if noun == "" { if noun == "" {
return "" return ""
} }
lower := strings.ToLower(noun) lower := core.Lower(noun)
if form := getNounForm(currentLangForGrammar(), lower, "other"); form != "" { if form := getNounForm(currentLangForGrammar(), lower, "other"); form != "" {
if unicode.IsUpper(rune(noun[0])) && len(form) > 0 { if unicode.IsUpper(rune(noun[0])) && len(form) > 0 {
return strings.ToUpper(string(form[0])) + form[1:] return core.Upper(string(form[0])) + form[1:]
} }
return form return form
} }
if plural, ok := irregularNouns[lower]; ok { if plural, ok := irregularNouns[lower]; ok {
if unicode.IsUpper(rune(noun[0])) { if unicode.IsUpper(rune(noun[0])) {
return strings.ToUpper(string(plural[0])) + plural[1:] return core.Upper(string(plural[0])) + plural[1:]
} }
return plural return plural
} }
@ -253,28 +254,28 @@ func PluralForm(noun string) string {
} }
func applyRegularPlural(noun string) string { func applyRegularPlural(noun string) string {
lower := strings.ToLower(noun) lower := core.Lower(noun)
if strings.HasSuffix(lower, "s") || if core.HasSuffix(lower, "s") ||
strings.HasSuffix(lower, "ss") || core.HasSuffix(lower, "ss") ||
strings.HasSuffix(lower, "sh") || core.HasSuffix(lower, "sh") ||
strings.HasSuffix(lower, "ch") || core.HasSuffix(lower, "ch") ||
strings.HasSuffix(lower, "x") || core.HasSuffix(lower, "x") ||
strings.HasSuffix(lower, "z") { core.HasSuffix(lower, "z") {
return noun + "es" return noun + "es"
} }
if strings.HasSuffix(lower, "y") && len(noun) > 1 { if core.HasSuffix(lower, "y") && len(noun) > 1 {
prev := rune(lower[len(lower)-2]) prev := rune(lower[len(lower)-2])
if !isVowel(prev) { if !isVowel(prev) {
return noun[:len(noun)-1] + "ies" return noun[:len(noun)-1] + "ies"
} }
} }
if strings.HasSuffix(lower, "f") { if core.HasSuffix(lower, "f") {
return noun[:len(noun)-1] + "ves" return noun[:len(noun)-1] + "ves"
} }
if strings.HasSuffix(lower, "fe") { if core.HasSuffix(lower, "fe") {
return noun[:len(noun)-2] + "ves" return noun[:len(noun)-2] + "ves"
} }
if strings.HasSuffix(lower, "o") && len(noun) > 1 { if core.HasSuffix(lower, "o") && len(noun) > 1 {
prev := rune(lower[len(lower)-2]) prev := rune(lower[len(lower)-2])
if !isVowel(prev) { if !isVowel(prev) {
if lower == "hero" || lower == "potato" || lower == "tomato" || lower == "echo" || lower == "veto" { if lower == "hero" || lower == "potato" || lower == "tomato" || lower == "echo" || lower == "veto" {
@ -295,14 +296,14 @@ func Article(word string) string {
if word == "" { if word == "" {
return "" return ""
} }
lower := strings.ToLower(strings.TrimSpace(word)) lower := core.Lower(core.Trim(word))
for key := range consonantSounds { for key := range consonantSounds {
if strings.HasPrefix(lower, key) { if core.HasPrefix(lower, key) {
return "a" return "a"
} }
} }
for key := range vowelSounds { for key := range vowelSounds {
if strings.HasPrefix(lower, key) { if core.HasPrefix(lower, key) {
return "an" return "an"
} }
} }
@ -322,7 +323,7 @@ func isVowel(r rune) bool {
// Title capitalises the first letter of each word. // Title capitalises the first letter of each word.
func Title(s string) string { func Title(s string) string {
var b strings.Builder b := core.NewBuilder()
b.Grow(len(s)) b.Grow(len(s))
prev := ' ' prev := ' '
for _, r := range s { for _, r := range s {
@ -345,8 +346,8 @@ func Quote(s string) string {
func TemplateFuncs() template.FuncMap { func TemplateFuncs() template.FuncMap {
return template.FuncMap{ return template.FuncMap{
"title": Title, "title": Title,
"lower": strings.ToLower, "lower": core.Lower,
"upper": strings.ToUpper, "upper": core.Upper,
"past": PastTense, "past": PastTense,
"gerund": Gerund, "gerund": Gerund,
"plural": Pluralize, "plural": Pluralize,

View file

@ -1,19 +1,18 @@
package i18n package i18n
import ( import (
"fmt" "dappco.re/go/core"
"strings"
) )
// LabelHandler handles i18n.label.{word} -> "Status:" patterns. // LabelHandler handles i18n.label.{word} -> "Status:" patterns.
type LabelHandler struct{} type LabelHandler struct{}
func (h LabelHandler) Match(key string) bool { func (h LabelHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.label.") return core.HasPrefix(key, "i18n.label.")
} }
func (h LabelHandler) Handle(key string, args []any, next func() string) string { func (h LabelHandler) Handle(key string, args []any, next func() string) string {
word := strings.TrimPrefix(key, "i18n.label.") word := core.TrimPrefix(key, "i18n.label.")
return Label(word) return Label(word)
} }
@ -21,11 +20,11 @@ func (h LabelHandler) Handle(key string, args []any, next func() string) string
type ProgressHandler struct{} type ProgressHandler struct{}
func (h ProgressHandler) Match(key string) bool { func (h ProgressHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.progress.") return core.HasPrefix(key, "i18n.progress.")
} }
func (h ProgressHandler) Handle(key string, args []any, next func() string) string { func (h ProgressHandler) Handle(key string, args []any, next func() string) string {
verb := strings.TrimPrefix(key, "i18n.progress.") verb := core.TrimPrefix(key, "i18n.progress.")
if len(args) > 0 { if len(args) > 0 {
if subj, ok := args[0].(string); ok { if subj, ok := args[0].(string); ok {
return ProgressSubject(verb, subj) return ProgressSubject(verb, subj)
@ -38,14 +37,14 @@ func (h ProgressHandler) Handle(key string, args []any, next func() string) stri
type CountHandler struct{} type CountHandler struct{}
func (h CountHandler) Match(key string) bool { func (h CountHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.count.") return core.HasPrefix(key, "i18n.count.")
} }
func (h CountHandler) Handle(key string, args []any, next func() string) string { func (h CountHandler) Handle(key string, args []any, next func() string) string {
noun := strings.TrimPrefix(key, "i18n.count.") noun := core.TrimPrefix(key, "i18n.count.")
if len(args) > 0 { if len(args) > 0 {
count := toInt(args[0]) count := toInt(args[0])
return fmt.Sprintf("%d %s", count, Pluralize(noun, count)) return core.Sprintf("%d %s", count, Pluralize(noun, count))
} }
return noun return noun
} }
@ -54,11 +53,11 @@ func (h CountHandler) Handle(key string, args []any, next func() string) string
type DoneHandler struct{} type DoneHandler struct{}
func (h DoneHandler) Match(key string) bool { func (h DoneHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.done.") return core.HasPrefix(key, "i18n.done.")
} }
func (h DoneHandler) Handle(key string, args []any, next func() string) string { func (h DoneHandler) Handle(key string, args []any, next func() string) string {
verb := strings.TrimPrefix(key, "i18n.done.") verb := core.TrimPrefix(key, "i18n.done.")
if len(args) > 0 { if len(args) > 0 {
if subj, ok := args[0].(string); ok { if subj, ok := args[0].(string); ok {
return ActionResult(verb, subj) return ActionResult(verb, subj)
@ -71,11 +70,11 @@ func (h DoneHandler) Handle(key string, args []any, next func() string) string {
type FailHandler struct{} type FailHandler struct{}
func (h FailHandler) Match(key string) bool { func (h FailHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.fail.") return core.HasPrefix(key, "i18n.fail.")
} }
func (h FailHandler) Handle(key string, args []any, next func() string) string { func (h FailHandler) Handle(key string, args []any, next func() string) string {
verb := strings.TrimPrefix(key, "i18n.fail.") verb := core.TrimPrefix(key, "i18n.fail.")
if len(args) > 0 { if len(args) > 0 {
if subj, ok := args[0].(string); ok { if subj, ok := args[0].(string); ok {
return ActionFailed(verb, subj) return ActionFailed(verb, subj)
@ -88,14 +87,14 @@ func (h FailHandler) Handle(key string, args []any, next func() string) string {
type NumericHandler struct{} type NumericHandler struct{}
func (h NumericHandler) Match(key string) bool { func (h NumericHandler) Match(key string) bool {
return strings.HasPrefix(key, "i18n.numeric.") return core.HasPrefix(key, "i18n.numeric.")
} }
func (h NumericHandler) Handle(key string, args []any, next func() string) string { func (h NumericHandler) Handle(key string, args []any, next func() string) string {
if len(args) == 0 { if len(args) == 0 {
return next() return next()
} }
format := strings.TrimPrefix(key, "i18n.numeric.") format := core.TrimPrefix(key, "i18n.numeric.")
switch format { switch format {
case "number", "int": case "number", "int":
return FormatNumber(toInt64(args[0])) return FormatNumber(toInt64(args[0]))

View file

@ -2,9 +2,9 @@ package i18n
import ( import (
"bytes" "bytes"
"errors"
"strings"
"text/template" "text/template"
"dappco.re/go/core"
) )
// T translates a message using the default service. // T translates a message using the default service.
@ -24,7 +24,7 @@ func Raw(messageID string, args ...any) string {
} }
// ErrServiceNotInitialised is returned when the service is not initialised. // ErrServiceNotInitialised is returned when the service is not initialised.
var ErrServiceNotInitialised = errors.New("i18n: service not initialised") var ErrServiceNotInitialised = core.NewError("i18n: service not initialised")
// ErrServiceNotInitialized is deprecated: use ErrServiceNotInitialised. // ErrServiceNotInitialized is deprecated: use ErrServiceNotInitialised.
var ErrServiceNotInitialized = ErrServiceNotInitialised var ErrServiceNotInitialized = ErrServiceNotInitialised
@ -109,7 +109,7 @@ func executeIntentTemplate(tmplStr string, data templateData) string {
} }
func applyTemplate(text string, data any) string { func applyTemplate(text string, data any) string {
if !strings.Contains(text, "{{") { if !core.Contains(text, "{{") {
return text return text
} }
if cached, ok := templateCache.Load(text); ok { if cached, ok := templateCache.Load(text); ok {

View file

@ -2,10 +2,10 @@ package integration
import ( import (
"context" "context"
"fmt"
"sort" "sort"
"testing" "testing"
"dappco.re/go/core"
i18n "dappco.re/go/core/i18n" i18n "dappco.re/go/core/i18n"
"forge.lthn.ai/core/go-inference" "forge.lthn.ai/core/go-inference"
_ "forge.lthn.ai/core/go-mlx" // registers Metal backend _ "forge.lthn.ai/core/go-mlx" // registers Metal backend
@ -549,7 +549,7 @@ func TestCalibrateDomains_1Bvs27B(t *testing.T) {
disagreements++ disagreements++
truth := "" truth := ""
if r.TrueDomain != "" { if r.TrueDomain != "" {
truth = fmt.Sprintf(" [truth=%s]", r.TrueDomain) truth = core.Sprintf(" [truth=%s]", r.TrueDomain)
} }
t.Logf(" DISAGREE: 1B=%s 27B=%s%s | %.60s", r.DomainA, r.DomainB, truth, r.Text) t.Logf(" DISAGREE: 1B=%s 27B=%s%s | %.60s", r.DomainA, r.DomainB, truth, r.Text)
if disagreements >= 50 { if disagreements >= 50 {

View file

@ -3,12 +3,10 @@ package integration
import ( import (
"bytes" "bytes"
"context" "context"
"encoding/json"
"fmt"
"strings"
"testing" "testing"
"time" "time"
"dappco.re/go/core"
i18n "dappco.re/go/core/i18n" i18n "dappco.re/go/core/i18n"
"forge.lthn.ai/core/go-inference" "forge.lthn.ai/core/go-inference"
_ "forge.lthn.ai/core/go-mlx" // registers Metal backend _ "forge.lthn.ai/core/go-mlx" // registers Metal backend
@ -24,9 +22,9 @@ func TestClassifyCorpus_Integration(t *testing.T) {
// Build 50 technical prompts for throughput measurement // Build 50 technical prompts for throughput measurement
var lines []string var lines []string
for i := 0; i < 50; i++ { for i := 0; i < 50; i++ {
lines = append(lines, fmt.Sprintf(`{"id":%d,"prompt":"Delete the configuration file and rebuild the project"}`, i)) lines = append(lines, core.Sprintf(`{"id":%d,"prompt":"Delete the configuration file and rebuild the project"}`, i))
} }
input := strings.NewReader(strings.Join(lines, "\n") + "\n") input := core.NewReader(core.Join("\n", lines...) + "\n")
var output bytes.Buffer var output bytes.Buffer
start := time.Now() start := time.Now()
@ -58,10 +56,10 @@ func TestClassifyCorpus_Integration(t *testing.T) {
} }
// Also inspect the output JSONL for misclassified entries // Also inspect the output JSONL for misclassified entries
outLines := strings.Split(strings.TrimSpace(output.String()), "\n") outLines := core.Split(core.Trim(output.String()), "\n")
for _, line := range outLines { for _, line := range outLines {
var record map[string]any var record map[string]any
if err := json.Unmarshal([]byte(line), &record); err == nil { if r := core.JSONUnmarshal([]byte(line), &record); r.OK {
if record["domain_1b"] != "technical" { if record["domain_1b"] != "technical" {
t.Logf(" misclassified: id=%v domain_1b=%v", record["id"], record["domain_1b"]) t.Logf(" misclassified: id=%v domain_1b=%v", record["id"], record["domain_1b"])
} }

View file

@ -1,12 +1,12 @@
package i18n package i18n
import ( import (
"encoding/json"
"io/fs" "io/fs"
"path" "path"
"strings" "strings"
"sync" "sync"
"dappco.re/go/core"
log "dappco.re/go/core/log" log "dappco.re/go/core/log"
) )
@ -29,8 +29,8 @@ func NewFSLoader(fsys fs.FS, dir string) *FSLoader {
func (l *FSLoader) Load(lang string) (map[string]Message, *GrammarData, error) { func (l *FSLoader) Load(lang string) (map[string]Message, *GrammarData, error) {
variants := []string{ variants := []string{
lang + ".json", lang + ".json",
strings.ReplaceAll(lang, "-", "_") + ".json", core.Replace(lang, "-", "_") + ".json",
strings.ReplaceAll(lang, "_", "-") + ".json", core.Replace(lang, "_", "-") + ".json",
} }
var data []byte var data []byte
@ -47,8 +47,8 @@ func (l *FSLoader) Load(lang string) (map[string]Message, *GrammarData, error) {
} }
var raw map[string]any var raw map[string]any
if err := json.Unmarshal(data, &raw); err != nil { if r := core.JSONUnmarshal(data, &raw); !r.OK {
return nil, nil, log.E("FSLoader.Load", "invalid JSON in locale: "+lang, err) return nil, nil, log.E("FSLoader.Load", "invalid JSON in locale: "+lang, r.Value.(error))
} }
messages := make(map[string]Message) messages := make(map[string]Message)
@ -72,11 +72,11 @@ func (l *FSLoader) Languages() []string {
return return
} }
for _, entry := range entries { for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".json") { if entry.IsDir() || !core.HasSuffix(entry.Name(), ".json") {
continue continue
} }
lang := strings.TrimSuffix(entry.Name(), ".json") lang := core.TrimSuffix(entry.Name(), ".json")
lang = strings.ReplaceAll(lang, "_", "-") lang = core.Replace(lang, "_", "-")
l.languages = append(l.languages, lang) l.languages = append(l.languages, lang)
} }
}) })
@ -106,9 +106,9 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
switch v := value.(type) { switch v := value.(type) {
case string: case string:
if grammar != nil && strings.HasPrefix(fullKey, "gram.word.") { if grammar != nil && core.HasPrefix(fullKey, "gram.word.") {
wordKey := strings.TrimPrefix(fullKey, "gram.word.") wordKey := core.TrimPrefix(fullKey, "gram.word.")
grammar.Words[strings.ToLower(wordKey)] = v grammar.Words[core.Lower(wordKey)] = v
continue continue
} }
out[fullKey] = Message{Text: v} out[fullKey] = Message{Text: v}
@ -127,12 +127,12 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if gerund, ok := v["gerund"].(string); ok { if gerund, ok := v["gerund"].(string); ok {
forms.Gerund = gerund forms.Gerund = gerund
} }
grammar.Verbs[strings.ToLower(verbName)] = forms grammar.Verbs[core.Lower(verbName)] = forms
continue continue
} }
// Noun form object (under gram.noun.* or has gender field) // Noun form object (under gram.noun.* or has gender field)
if grammar != nil && (strings.HasPrefix(fullKey, "gram.noun.") || isNounFormObject(v)) { if grammar != nil && (core.HasPrefix(fullKey, "gram.noun.") || isNounFormObject(v)) {
nounName := key nounName := key
if after, ok := strings.CutPrefix(fullKey, "gram.noun."); ok { if after, ok := strings.CutPrefix(fullKey, "gram.noun."); ok {
nounName = after nounName = after
@ -150,7 +150,7 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if gender, ok := v["gender"].(string); ok { if gender, ok := v["gender"].(string); ok {
forms.Gender = gender forms.Gender = gender
} }
grammar.Nouns[strings.ToLower(nounName)] = forms grammar.Nouns[core.Lower(nounName)] = forms
continue continue
} }
} }
@ -161,7 +161,7 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if arr, ok := nd.([]any); ok { if arr, ok := nd.([]any); ok {
for _, item := range arr { for _, item := range arr {
if s, ok := item.(string); ok { if s, ok := item.(string); ok {
grammar.Signals.NounDeterminers = append(grammar.Signals.NounDeterminers, strings.ToLower(s)) grammar.Signals.NounDeterminers = append(grammar.Signals.NounDeterminers, core.Lower(s))
} }
} }
} }
@ -170,7 +170,7 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if arr, ok := va.([]any); ok { if arr, ok := va.([]any); ok {
for _, item := range arr { for _, item := range arr {
if s, ok := item.(string); ok { if s, ok := item.(string); ok {
grammar.Signals.VerbAuxiliaries = append(grammar.Signals.VerbAuxiliaries, strings.ToLower(s)) grammar.Signals.VerbAuxiliaries = append(grammar.Signals.VerbAuxiliaries, core.Lower(s))
} }
} }
} }
@ -179,7 +179,7 @@ func flattenWithGrammar(prefix string, data map[string]any, out map[string]Messa
if arr, ok := vi.([]any); ok { if arr, ok := vi.([]any); ok {
for _, item := range arr { for _, item := range arr {
if s, ok := item.(string); ok { if s, ok := item.(string); ok {
grammar.Signals.VerbInfinitive = append(grammar.Signals.VerbInfinitive, strings.ToLower(s)) grammar.Signals.VerbInfinitive = append(grammar.Signals.VerbInfinitive, core.Lower(s))
} }
} }
} }

View file

@ -2,8 +2,8 @@ package i18n
import ( import (
"os" "os"
"strings"
"dappco.re/go/core"
"golang.org/x/text/language" "golang.org/x/text/language"
) )
@ -95,8 +95,8 @@ func detectLanguage(supported []language.Tag) string {
if langEnv == "" { if langEnv == "" {
return "" return ""
} }
baseLang := strings.Split(langEnv, ".")[0] baseLang := core.Split(langEnv, ".")[0]
baseLang = strings.ReplaceAll(baseLang, "_", "-") baseLang = core.Replace(baseLang, "_", "-")
parsedLang, err := language.Parse(baseLang) parsedLang, err := language.Parse(baseLang)
if err != nil { if err != nil {
return "" return ""

View file

@ -1,15 +1,15 @@
package i18n package i18n
import ( import (
"fmt"
"math" "math"
"strconv" "strconv"
"strings"
"dappco.re/go/core"
) )
func getNumberFormat() NumberFormat { func getNumberFormat() NumberFormat {
lang := currentLangForGrammar() lang := currentLangForGrammar()
if idx := strings.IndexAny(lang, "-_"); idx > 0 { if idx := indexAny(lang, "-_"); idx > 0 {
lang = lang[:idx] lang = lang[:idx]
} }
if fmt, ok := numberFormats[lang]; ok { if fmt, ok := numberFormats[lang]; ok {
@ -42,8 +42,8 @@ func FormatDecimalN(f float64, decimals int) string {
if fracInt == 0 { if fracInt == 0 {
return intStr return intStr
} }
fracStr := fmt.Sprintf("%0*d", decimals, fracInt) fracStr := core.Sprintf("%0*d", decimals, fracInt)
fracStr = strings.TrimRight(fracStr, "0") fracStr = trimRight(fracStr, "0")
return intStr + nf.DecimalSep + fracStr return intStr + nf.DecimalSep + fracStr
} }
@ -57,7 +57,7 @@ func FormatPercent(f float64) string {
} else { } else {
numStr = FormatDecimalN(pct, 1) numStr = FormatDecimalN(pct, 1)
} }
return fmt.Sprintf(nf.PercentFmt, numStr) return core.Sprintf(nf.PercentFmt, numStr)
} }
// FormatBytes formats bytes as human-readable size. // FormatBytes formats bytes as human-readable size.
@ -85,31 +85,31 @@ func FormatBytes(bytes int64) string {
value = float64(bytes) / KB value = float64(bytes) / KB
unit = "KB" unit = "KB"
default: default:
return fmt.Sprintf("%d B", bytes) return core.Sprintf("%d B", bytes)
} }
intPart := int64(value) intPart := int64(value)
fracPart := value - float64(intPart) fracPart := value - float64(intPart)
if fracPart < 0.05 { if fracPart < 0.05 {
return fmt.Sprintf("%d %s", intPart, unit) return core.Sprintf("%d %s", intPart, unit)
} }
fracDigit := int(math.Round(fracPart * 10)) fracDigit := int(math.Round(fracPart * 10))
if fracDigit == 10 { if fracDigit == 10 {
return fmt.Sprintf("%d %s", intPart+1, unit) return core.Sprintf("%d %s", intPart+1, unit)
} }
return fmt.Sprintf("%d%s%d %s", intPart, nf.DecimalSep, fracDigit, unit) return core.Sprintf("%d%s%d %s", intPart, nf.DecimalSep, fracDigit, unit)
} }
// FormatOrdinal formats a number as an ordinal. // FormatOrdinal formats a number as an ordinal.
func FormatOrdinal(n int) string { func FormatOrdinal(n int) string {
lang := currentLangForGrammar() lang := currentLangForGrammar()
if idx := strings.IndexAny(lang, "-_"); idx > 0 { if idx := indexAny(lang, "-_"); idx > 0 {
lang = lang[:idx] lang = lang[:idx]
} }
switch lang { switch lang {
case "en": case "en":
return formatEnglishOrdinal(n) return formatEnglishOrdinal(n)
default: default:
return fmt.Sprintf("%d.", n) return core.Sprintf("%d.", n)
} }
} }
@ -119,17 +119,17 @@ func formatEnglishOrdinal(n int) string {
abs = -abs abs = -abs
} }
if abs%100 >= 11 && abs%100 <= 13 { if abs%100 >= 11 && abs%100 <= 13 {
return fmt.Sprintf("%dth", n) return core.Sprintf("%dth", n)
} }
switch abs % 10 { switch abs % 10 {
case 1: case 1:
return fmt.Sprintf("%dst", n) return core.Sprintf("%dst", n)
case 2: case 2:
return fmt.Sprintf("%dnd", n) return core.Sprintf("%dnd", n)
case 3: case 3:
return fmt.Sprintf("%drd", n) return core.Sprintf("%drd", n)
default: default:
return fmt.Sprintf("%dth", n) return core.Sprintf("%dth", n)
} }
} }
@ -148,7 +148,7 @@ func formatIntWithSep(n int64, sep string) string {
} }
return str return str
} }
var result strings.Builder result := core.NewBuilder()
for i, c := range str { for i, c := range str {
if i > 0 && (len(str)-i)%3 == 0 { if i > 0 && (len(str)-i)%3 == 0 {
result.WriteString(sep) result.WriteString(sep)
@ -160,3 +160,34 @@ func formatIntWithSep(n int64, sep string) string {
} }
return result.String() return result.String()
} }
// indexAny returns the index of the first occurrence of any char in chars, or -1.
func indexAny(s, chars string) int {
for i, c := range s {
for _, ch := range chars {
if c == ch {
return i
}
}
}
return -1
}
// trimRight returns s with all trailing occurrences of cutset removed.
func trimRight(s, cutset string) string {
for len(s) > 0 {
found := false
r := rune(s[len(s)-1])
for _, c := range cutset {
if r == c {
found = true
break
}
}
if !found {
break
}
s = s[:len(s)-1]
}
return s
}

View file

@ -1,10 +1,10 @@
package reversal package reversal
import ( import (
"fmt"
"sort" "sort"
"strings"
"testing" "testing"
"dappco.re/go/core"
) )
// Domain categories for classification ground truth. // Domain categories for classification ground truth.
@ -418,17 +418,17 @@ func TestClassification_LeaveOneOut(t *testing.T) {
// Print confusion matrix // Print confusion matrix
t.Log("\nConfusion matrix (rows=actual, cols=predicted):") t.Log("\nConfusion matrix (rows=actual, cols=predicted):")
var header strings.Builder header := core.NewBuilder()
header.WriteString(fmt.Sprintf(" %-10s", "")) header.WriteString(core.Sprintf(" %-10s", ""))
for _, d := range domains { for _, d := range domains {
header.WriteString(fmt.Sprintf(" %10s", d[:4])) header.WriteString(core.Sprintf(" %10s", d[:4]))
} }
t.Log(header.String()) t.Log(header.String())
for _, actual := range domains { for _, actual := range domains {
var row strings.Builder row := core.NewBuilder()
row.WriteString(fmt.Sprintf(" %-10s", actual[:4])) row.WriteString(core.Sprintf(" %-10s", actual[:4]))
for _, predicted := range domains { for _, predicted := range domains {
row.WriteString(fmt.Sprintf(" %10d", confusion[actual][predicted])) row.WriteString(core.Sprintf(" %10d", confusion[actual][predicted]))
} }
t.Log(row.String()) t.Log(row.String())
} }
@ -506,14 +506,14 @@ func TestClassification_TenseProfile(t *testing.T) {
} }
} }
var parts strings.Builder parts := core.NewBuilder()
parts.WriteString(fmt.Sprintf("%-10s verbs=%d", d, totalVerbs)) parts.WriteString(core.Sprintf("%-10s verbs=%d", d, totalVerbs))
for _, tense := range tenses { for _, tense := range tenses {
pct := 0.0 pct := 0.0
if totalVerbs > 0 { if totalVerbs > 0 {
pct = float64(tenseCounts[tense]) / float64(totalVerbs) * 100 pct = float64(tenseCounts[tense]) / float64(totalVerbs) * 100
} }
parts.WriteString(fmt.Sprintf(" %s=%.0f%%", tense, pct)) parts.WriteString(core.Sprintf(" %s=%.0f%%", tense, pct))
} }
t.Log(parts.String()) t.Log(parts.String())
} }
@ -550,12 +550,12 @@ func TestClassification_TopVerbs(t *testing.T) {
sort.Slice(sorted, func(i, j int) bool { return sorted[i].count > sorted[j].count }) sort.Slice(sorted, func(i, j int) bool { return sorted[i].count > sorted[j].count })
top := min(len(sorted), 8) top := min(len(sorted), 8)
var verbs strings.Builder verbs := core.NewBuilder()
for i := 0; i < top; i++ { for i := 0; i < top; i++ {
if i > 0 { if i > 0 {
verbs.WriteString(", ") verbs.WriteString(", ")
} }
verbs.WriteString(fmt.Sprintf("%s(%d)", sorted[i].verb, sorted[i].count)) verbs.WriteString(core.Sprintf("%s(%d)", sorted[i].verb, sorted[i].count))
} }
t.Logf("%-10s unique=%d top: %s", d, len(verbCounts), verbs.String()) t.Logf("%-10s unique=%d top: %s", d, len(verbCounts), verbs.String())
} }

View file

@ -1,9 +1,9 @@
package reversal package reversal
import ( import (
"strings"
"unicode" "unicode"
"dappco.re/go/core"
i18n "dappco.re/go/core/i18n" i18n "dappco.re/go/core/i18n"
) )
@ -26,7 +26,7 @@ func NewMultiplierForLang(lang string) *Multiplier {
// Expand produces: original + tense flips (past, gerund) + number flips (plural toggle) + combinations. // Expand produces: original + tense flips (past, gerund) + number flips (plural toggle) + combinations.
// All output is deterministic and grammatically correct. // All output is deterministic and grammatically correct.
func (m *Multiplier) Expand(text string) []string { func (m *Multiplier) Expand(text string) []string {
text = strings.TrimSpace(text) text = core.Trim(text)
if text == "" { if text == "" {
return nil return nil
} }
@ -140,7 +140,7 @@ func (m *Multiplier) applyVerbTransform(tokens []Token, vi int, targetTense stri
result[vi] = Token{ result[vi] = Token{
Raw: newForm, Raw: newForm,
Lower: strings.ToLower(newForm), Lower: core.Lower(newForm),
Type: TokenVerb, Type: TokenVerb,
Confidence: 1.0, Confidence: 1.0,
VerbInfo: VerbMatch{ VerbInfo: VerbMatch{
@ -191,7 +191,7 @@ func (m *Multiplier) applyNounTransformOnTokens(tokens []Token, ni int) []Token
result[ni] = Token{ result[ni] = Token{
Raw: newForm, Raw: newForm,
Lower: strings.ToLower(newForm), Lower: core.Lower(newForm),
Type: TokenNoun, Type: TokenNoun,
Confidence: 1.0, Confidence: 1.0,
NounInfo: NounMatch{ NounInfo: NounMatch{
@ -206,7 +206,7 @@ func (m *Multiplier) applyNounTransformOnTokens(tokens []Token, ni int) []Token
// reconstruct joins tokens back into a string, preserving spacing. // reconstruct joins tokens back into a string, preserving spacing.
func reconstruct(tokens []Token) string { func reconstruct(tokens []Token) string {
var b strings.Builder b := core.NewBuilder()
for i, tok := range tokens { for i, tok := range tokens {
if i > 0 { if i > 0 {
// Punctuation tokens that were split from the previous word // Punctuation tokens that were split from the previous word
@ -235,7 +235,7 @@ func preserveCase(original, replacement string) string {
// If the original is all uppercase (like "DELETE"), make replacement all uppercase. // If the original is all uppercase (like "DELETE"), make replacement all uppercase.
if isAllUpper(original) && len(original) > 1 { if isAllUpper(original) && len(original) > 1 {
return strings.ToUpper(replacement) return core.Upper(replacement)
} }
// If the first character of the original is uppercase, capitalise the replacement. // If the first character of the original is uppercase, capitalise the replacement.

View file

@ -18,6 +18,7 @@ package reversal
import ( import (
"strings" "strings"
"dappco.re/go/core"
i18n "dappco.re/go/core/i18n" i18n "dappco.re/go/core/i18n"
) )
@ -209,7 +210,7 @@ func (t *Tokeniser) buildNounIndex() {
// Tier 3: Try reverse morphology rules and round-trip verify via // Tier 3: Try reverse morphology rules and round-trip verify via
// the forward function PluralForm(). // the forward function PluralForm().
func (t *Tokeniser) MatchNoun(word string) (NounMatch, bool) { func (t *Tokeniser) MatchNoun(word string) (NounMatch, bool) {
word = strings.ToLower(strings.TrimSpace(word)) word = core.Lower(core.Trim(word))
if word == "" { if word == "" {
return NounMatch{}, false return NounMatch{}, false
} }
@ -250,27 +251,27 @@ func (t *Tokeniser) reverseRegularPlural(word string) []string {
var candidates []string var candidates []string
// Rule: consonant + "ies" → consonant + "y" (e.g., "entries" → "entry") // Rule: consonant + "ies" → consonant + "y" (e.g., "entries" → "entry")
if strings.HasSuffix(word, "ies") && len(word) > 3 { if core.HasSuffix(word, "ies") && len(word) > 3 {
base := word[:len(word)-3] + "y" base := word[:len(word)-3] + "y"
candidates = append(candidates, base) candidates = append(candidates, base)
} }
// Rule: "ves" → "f" or "fe" (e.g., "wolves" → "wolf", "knives" → "knife") // Rule: "ves" → "f" or "fe" (e.g., "wolves" → "wolf", "knives" → "knife")
if strings.HasSuffix(word, "ves") && len(word) > 3 { if core.HasSuffix(word, "ves") && len(word) > 3 {
candidates = append(candidates, word[:len(word)-3]+"f") candidates = append(candidates, word[:len(word)-3]+"f")
candidates = append(candidates, word[:len(word)-3]+"fe") candidates = append(candidates, word[:len(word)-3]+"fe")
} }
// Rule: sibilant + "es" (e.g., "processes" → "process", "branches" → "branch") // Rule: sibilant + "es" (e.g., "processes" → "process", "branches" → "branch")
if strings.HasSuffix(word, "ses") || strings.HasSuffix(word, "xes") || if core.HasSuffix(word, "ses") || core.HasSuffix(word, "xes") ||
strings.HasSuffix(word, "zes") || strings.HasSuffix(word, "ches") || core.HasSuffix(word, "zes") || core.HasSuffix(word, "ches") ||
strings.HasSuffix(word, "shes") { core.HasSuffix(word, "shes") {
base := word[:len(word)-2] // strip "es" base := word[:len(word)-2] // strip "es"
candidates = append(candidates, base) candidates = append(candidates, base)
} }
// Rule: drop "s" (e.g., "servers" → "server") // Rule: drop "s" (e.g., "servers" → "server")
if strings.HasSuffix(word, "s") && len(word) > 1 { if core.HasSuffix(word, "s") && len(word) > 1 {
base := word[:len(word)-1] base := word[:len(word)-1]
candidates = append(candidates, base) candidates = append(candidates, base)
} }
@ -285,7 +286,7 @@ func (t *Tokeniser) reverseRegularPlural(word string) []string {
// Tier 3: Try reverse morphology rules and round-trip verify via // Tier 3: Try reverse morphology rules and round-trip verify via
// the forward functions PastTense() and Gerund(). // the forward functions PastTense() and Gerund().
func (t *Tokeniser) MatchVerb(word string) (VerbMatch, bool) { func (t *Tokeniser) MatchVerb(word string) (VerbMatch, bool) {
word = strings.ToLower(strings.TrimSpace(word)) word = core.Lower(core.Trim(word))
if word == "" { if word == "" {
return VerbMatch{}, false return VerbMatch{}, false
} }
@ -358,7 +359,7 @@ func (t *Tokeniser) bestRoundTrip(target string, candidates []string, forward fu
// Priority 3: prefer candidate not ending in "e" (avoids phantom verbs // Priority 3: prefer candidate not ending in "e" (avoids phantom verbs
// with CCe endings like "walke", "processe") // with CCe endings like "walke", "processe")
for _, m := range matches { for _, m := range matches {
if !strings.HasSuffix(m, "e") { if !core.HasSuffix(m, "e") {
return m return m
} }
} }
@ -402,12 +403,12 @@ func isVowelByte(b byte) bool {
func (t *Tokeniser) reverseRegularPast(word string) []string { func (t *Tokeniser) reverseRegularPast(word string) []string {
var candidates []string var candidates []string
if !strings.HasSuffix(word, "ed") { if !core.HasSuffix(word, "ed") {
return candidates return candidates
} }
// Rule: consonant + "ied" → consonant + "y" (e.g., "copied" → "copy") // Rule: consonant + "ied" → consonant + "y" (e.g., "copied" → "copy")
if strings.HasSuffix(word, "ied") && len(word) > 3 { if core.HasSuffix(word, "ied") && len(word) > 3 {
base := word[:len(word)-3] + "y" base := word[:len(word)-3] + "y"
candidates = append(candidates, base) candidates = append(candidates, base)
} }
@ -448,14 +449,14 @@ func (t *Tokeniser) reverseRegularPast(word string) []string {
func (t *Tokeniser) reverseRegularGerund(word string) []string { func (t *Tokeniser) reverseRegularGerund(word string) []string {
var candidates []string var candidates []string
if !strings.HasSuffix(word, "ing") || len(word) < 4 { if !core.HasSuffix(word, "ing") || len(word) < 4 {
return candidates return candidates
} }
stem := word[:len(word)-3] // strip "ing" stem := word[:len(word)-3] // strip "ing"
// Rule: "ying" → "ie" (e.g., "dying" → "die") // Rule: "ying" → "ie" (e.g., "dying" → "die")
if strings.HasSuffix(word, "ying") && len(word) > 4 { if core.HasSuffix(word, "ying") && len(word) > 4 {
base := word[:len(word)-4] + "ie" base := word[:len(word)-4] + "ie"
candidates = append(candidates, base) candidates = append(candidates, base)
} }
@ -488,15 +489,15 @@ func (t *Tokeniser) buildWordIndex() {
} }
for key, display := range data.Words { for key, display := range data.Words {
// Map the key itself (already lowercase) // Map the key itself (already lowercase)
t.words[strings.ToLower(key)] = key t.words[core.Lower(key)] = key
// Map the display form (e.g., "URL" → "url", "SSH" → "ssh") // Map the display form (e.g., "URL" → "url", "SSH" → "ssh")
t.words[strings.ToLower(display)] = key t.words[core.Lower(display)] = key
} }
} }
// IsDualClass returns true if the word exists in both verb and noun tables. // IsDualClass returns true if the word exists in both verb and noun tables.
func (t *Tokeniser) IsDualClass(word string) bool { func (t *Tokeniser) IsDualClass(word string) bool {
return t.dualClass[strings.ToLower(word)] return t.dualClass[core.Lower(word)]
} }
func (t *Tokeniser) buildDualClassIndex() { func (t *Tokeniser) buildDualClassIndex() {
@ -519,7 +520,7 @@ func (t *Tokeniser) buildSignalIndex() {
// falls back per-field rather than silently disabling signals. // falls back per-field rather than silently disabling signals.
if data != nil && len(data.Signals.NounDeterminers) > 0 { if data != nil && len(data.Signals.NounDeterminers) > 0 {
for _, w := range data.Signals.NounDeterminers { for _, w := range data.Signals.NounDeterminers {
t.nounDet[strings.ToLower(w)] = true t.nounDet[core.Lower(w)] = true
} }
} else { } else {
for _, w := range []string{ for _, w := range []string{
@ -534,7 +535,7 @@ func (t *Tokeniser) buildSignalIndex() {
if data != nil && len(data.Signals.VerbAuxiliaries) > 0 { if data != nil && len(data.Signals.VerbAuxiliaries) > 0 {
for _, w := range data.Signals.VerbAuxiliaries { for _, w := range data.Signals.VerbAuxiliaries {
t.verbAux[strings.ToLower(w)] = true t.verbAux[core.Lower(w)] = true
} }
} else { } else {
for _, w := range []string{ for _, w := range []string{
@ -548,7 +549,7 @@ func (t *Tokeniser) buildSignalIndex() {
if data != nil && len(data.Signals.VerbInfinitive) > 0 { if data != nil && len(data.Signals.VerbInfinitive) > 0 {
for _, w := range data.Signals.VerbInfinitive { for _, w := range data.Signals.VerbInfinitive {
t.verbInf[strings.ToLower(w)] = true t.verbInf[core.Lower(w)] = true
} }
} else { } else {
t.verbInf["to"] = true t.verbInf["to"] = true
@ -570,7 +571,7 @@ func defaultWeights() map[string]float64 {
// MatchWord performs a case-insensitive lookup in the words map. // MatchWord performs a case-insensitive lookup in the words map.
// Returns the category key and true if found, or ("", false) otherwise. // Returns the category key and true if found, or ("", false) otherwise.
func (t *Tokeniser) MatchWord(word string) (string, bool) { func (t *Tokeniser) MatchWord(word string) (string, bool) {
cat, ok := t.words[strings.ToLower(word)] cat, ok := t.words[core.Lower(word)]
return cat, ok return cat, ok
} }
@ -583,13 +584,13 @@ func (t *Tokeniser) MatchArticle(word string) (string, bool) {
return "", false return "", false
} }
lower := strings.ToLower(word) lower := core.Lower(word)
if lower == strings.ToLower(data.Articles.IndefiniteDefault) || if lower == core.Lower(data.Articles.IndefiniteDefault) ||
lower == strings.ToLower(data.Articles.IndefiniteVowel) { lower == core.Lower(data.Articles.IndefiniteVowel) {
return "indefinite", true return "indefinite", true
} }
if lower == strings.ToLower(data.Articles.Definite) { if lower == core.Lower(data.Articles.Definite) {
return "definite", true return "definite", true
} }
@ -613,7 +614,7 @@ var clauseBoundaries = map[string]bool{
// Pass 1 classifies unambiguous tokens and marks dual-class base forms. // Pass 1 classifies unambiguous tokens and marks dual-class base forms.
// Pass 2 resolves ambiguous tokens using weighted disambiguation signals. // Pass 2 resolves ambiguous tokens using weighted disambiguation signals.
func (t *Tokeniser) Tokenise(text string) []Token { func (t *Tokeniser) Tokenise(text string) []Token {
text = strings.TrimSpace(text) text = core.Trim(text)
if text == "" { if text == "" {
return nil return nil
} }
@ -628,7 +629,7 @@ func (t *Tokeniser) Tokenise(text string) []Token {
// Classify the word portion (if any). // Classify the word portion (if any).
if word != "" { if word != "" {
tok := Token{Raw: raw, Lower: strings.ToLower(word)} tok := Token{Raw: raw, Lower: core.Lower(word)}
if artType, ok := t.MatchArticle(word); ok { if artType, ok := t.MatchArticle(word); ok {
// Articles are unambiguous. // Articles are unambiguous.
@ -943,7 +944,7 @@ func splitTrailingPunct(s string) (string, string) {
} }
// Check for "..." suffix first (3-char pattern). // Check for "..." suffix first (3-char pattern).
if strings.HasSuffix(s, "...") { if core.HasSuffix(s, "...") {
return s[:len(s)-3], "..." return s[:len(s)-3], "..."
} }
// Check single-char trailing punctuation. // Check single-char trailing punctuation.

View file

@ -2,16 +2,14 @@ package i18n
import ( import (
"embed" "embed"
"encoding/json"
"fmt"
"io/fs" "io/fs"
"maps" "maps"
"path" "path"
"slices" "slices"
"strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
"dappco.re/go/core"
log "dappco.re/go/core/log" log "dappco.re/go/core/log"
"golang.org/x/text/language" "golang.org/x/text/language"
) )
@ -185,8 +183,8 @@ func AddLoader(loader Loader) {
func (s *Service) loadJSON(lang string, data []byte) error { func (s *Service) loadJSON(lang string, data []byte) error {
var raw map[string]any var raw map[string]any
if err := json.Unmarshal(data, &raw); err != nil { if r := core.JSONUnmarshal(data, &raw); !r.OK {
return err return r.Value.(error)
} }
messages := make(map[string]Message) messages := make(map[string]Message)
grammarData := &GrammarData{ grammarData := &GrammarData{
@ -324,8 +322,8 @@ func (s *Service) resolveWithFallback(messageID string, data any) string {
if text := s.tryResolve(s.fallbackLang, messageID, data); text != "" { if text := s.tryResolve(s.fallbackLang, messageID, data); text != "" {
return text return text
} }
if strings.Contains(messageID, ".") { if core.Contains(messageID, ".") {
parts := strings.Split(messageID, ".") parts := core.Split(messageID, ".")
verb := parts[len(parts)-1] verb := parts[len(parts)-1]
commonKey := "common.action." + verb commonKey := "common.action." + verb
if text := s.tryResolve(s.currentLang, commonKey, data); text != "" { if text := s.tryResolve(s.currentLang, commonKey, data); text != "" {
@ -394,7 +392,7 @@ func (s *Service) getEffectiveFormality(data any) Formality {
return f return f
} }
case string: case string:
switch strings.ToLower(f) { switch core.Lower(f) {
case "formal": case "formal":
return FormalityFormal return FormalityFormal
case "informal": case "informal":
@ -408,7 +406,7 @@ func (s *Service) getEffectiveFormality(data any) Formality {
func (s *Service) handleMissingKey(key string, args []any) string { func (s *Service) handleMissingKey(key string, args []any) string {
switch s.mode { switch s.mode {
case ModeStrict: case ModeStrict:
panic(fmt.Sprintf("i18n: missing translation key %q", key)) panic(core.Sprintf("i18n: missing translation key %q", key))
case ModeCollect: case ModeCollect:
var argsMap map[string]any var argsMap map[string]any
if len(args) > 0 { if len(args) > 0 {
@ -506,7 +504,7 @@ func (s *Service) LoadFS(fsys fs.FS, dir string) error {
return log.E("Service.LoadFS", "read locales directory", err) return log.E("Service.LoadFS", "read locales directory", err)
} }
for _, entry := range entries { for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".json") { if entry.IsDir() || !core.HasSuffix(entry.Name(), ".json") {
continue continue
} }
filePath := path.Join(dir, entry.Name()) filePath := path.Join(dir, entry.Name())
@ -514,8 +512,8 @@ func (s *Service) LoadFS(fsys fs.FS, dir string) error {
if err != nil { if err != nil {
return log.E("Service.LoadFS", "read locale: "+entry.Name(), err) return log.E("Service.LoadFS", "read locale: "+entry.Name(), err)
} }
lang := strings.TrimSuffix(entry.Name(), ".json") lang := core.TrimSuffix(entry.Name(), ".json")
lang = strings.ReplaceAll(lang, "_", "-") lang = core.Replace(lang, "_", "-")
if err := s.loadJSON(lang, data); err != nil { if err := s.loadJSON(lang, data); err != nil {
return log.E("Service.LoadFS", "parse locale: "+entry.Name(), err) return log.E("Service.LoadFS", "parse locale: "+entry.Name(), err)
} }

View file

@ -1,8 +1,9 @@
package i18n package i18n
import ( import (
"fmt"
"time" "time"
"dappco.re/go/core"
) )
// TimeAgo returns a localised relative time string. // TimeAgo returns a localised relative time string.
@ -28,12 +29,12 @@ func TimeAgo(t time.Time) string {
func FormatAgo(count int, unit string) string { func FormatAgo(count int, unit string) string {
svc := Default() svc := Default()
if svc == nil { if svc == nil {
return fmt.Sprintf("%d %ss ago", count, unit) return core.Sprintf("%d %ss ago", count, unit)
} }
key := "time.ago." + unit key := "time.ago." + unit
result := svc.T(key, map[string]any{"Count": count}) result := svc.T(key, map[string]any{"Count": count})
if result == key { if result == key {
return fmt.Sprintf("%d %s ago", count, Pluralize(unit, count)) return core.Sprintf("%d %s ago", count, Pluralize(unit, count))
} }
return result return result
} }

View file

@ -4,9 +4,8 @@ package i18n
import ( import (
"context" "context"
"fmt"
"strings"
"dappco.re/go/core"
log "dappco.re/go/core/log" log "dappco.re/go/core/log"
"forge.lthn.ai/core/go-inference" "forge.lthn.ai/core/go-inference"
) )
@ -45,7 +44,7 @@ type IrregularResult struct {
// articlePrompt builds a fill-in-the-blank prompt for article prediction. // articlePrompt builds a fill-in-the-blank prompt for article prediction.
func articlePrompt(noun string) string { func articlePrompt(noun string) string {
return fmt.Sprintf( return core.Sprintf(
"Complete with the correct article (a/an/the): ___ %s. Answer with just the article:", "Complete with the correct article (a/an/the): ___ %s. Answer with just the article:",
noun, noun,
) )
@ -53,7 +52,7 @@ func articlePrompt(noun string) string {
// irregularPrompt builds a fill-in-the-blank prompt for irregular verb prediction. // irregularPrompt builds a fill-in-the-blank prompt for irregular verb prediction.
func irregularPrompt(verb, tense string) string { func irregularPrompt(verb, tense string) string {
return fmt.Sprintf( return core.Sprintf(
"What is the %s form of the verb '%s'? Answer with just the word:", "What is the %s form of the verb '%s'? Answer with just the word:",
tense, verb, tense, verb,
) )
@ -61,14 +60,14 @@ func irregularPrompt(verb, tense string) string {
// collectGenerated runs a single-token generation and returns the trimmed, lowercased output. // collectGenerated runs a single-token generation and returns the trimmed, lowercased output.
func collectGenerated(ctx context.Context, m inference.TextModel, prompt string) (string, error) { func collectGenerated(ctx context.Context, m inference.TextModel, prompt string) (string, error) {
var sb strings.Builder sb := core.NewBuilder()
for tok := range m.Generate(ctx, prompt, inference.WithMaxTokens(1), inference.WithTemperature(0.05)) { for tok := range m.Generate(ctx, prompt, inference.WithMaxTokens(1), inference.WithTemperature(0.05)) {
sb.WriteString(tok.Text) sb.WriteString(tok.Text)
} }
if err := m.Err(); err != nil { if err := m.Err(); err != nil {
return "", err return "", err
} }
return strings.TrimSpace(strings.ToLower(sb.String())), nil return core.Trim(core.Lower(sb.String())), nil
} }
// ValidateArticle checks whether a given article usage is grammatically correct // ValidateArticle checks whether a given article usage is grammatically correct
@ -80,7 +79,7 @@ func ValidateArticle(ctx context.Context, m inference.TextModel, noun string, ar
if err != nil { if err != nil {
return ArticleResult{}, log.E("ValidateArticle", "validate: "+noun, err) return ArticleResult{}, log.E("ValidateArticle", "validate: "+noun, err)
} }
given := strings.TrimSpace(strings.ToLower(article)) given := core.Trim(core.Lower(article))
return ArticleResult{ return ArticleResult{
Noun: noun, Noun: noun,
Given: given, Given: given,
@ -99,7 +98,7 @@ func ValidateIrregular(ctx context.Context, m inference.TextModel, verb string,
if err != nil { if err != nil {
return IrregularResult{}, log.E("ValidateIrregular", "validate: "+verb+" ("+tense+")", err) return IrregularResult{}, log.E("ValidateIrregular", "validate: "+verb+" ("+tense+")", err)
} }
given := strings.TrimSpace(strings.ToLower(form)) given := core.Trim(core.Lower(form))
return IrregularResult{ return IrregularResult{
Verb: verb, Verb: verb,
Tense: tense, Tense: tense,

View file

@ -4,10 +4,10 @@ package i18n
import ( import (
"context" "context"
"fmt"
"iter" "iter"
"testing" "testing"
"dappco.re/go/core"
"forge.lthn.ai/core/go-inference" "forge.lthn.ai/core/go-inference"
) )
@ -73,7 +73,7 @@ func newMockIrregularModel(forms map[string]string) *mockGenerateModel {
// containsVerb checks if the prompt contains the verb in the expected format. // containsVerb checks if the prompt contains the verb in the expected format.
func containsVerb(prompt, verb string) bool { func containsVerb(prompt, verb string) bool {
return len(prompt) > 0 && len(verb) > 0 && return len(prompt) > 0 && len(verb) > 0 &&
contains(prompt, fmt.Sprintf("'%s'", verb)) contains(prompt, core.Sprintf("'%s'", verb))
} }
// contains is a simple substring check (avoids importing strings in test). // contains is a simple substring check (avoids importing strings in test).