[agent/codex:gpt-5.4-mini] Read ~/spec/code/core/go/i18n/RFC.md fully. Find ONE feature... #15
3 changed files with 283 additions and 5 deletions
108
grammar.go
108
grammar.go
|
|
@ -32,12 +32,116 @@ func MergeGrammarData(lang string, data *GrammarData) {
|
|||
grammarCache[lang] = data
|
||||
return
|
||||
}
|
||||
if existing.Verbs == nil {
|
||||
existing.Verbs = make(map[string]VerbForms, len(data.Verbs))
|
||||
}
|
||||
if existing.Nouns == nil {
|
||||
existing.Nouns = make(map[string]NounForms, len(data.Nouns))
|
||||
}
|
||||
if existing.Words == nil {
|
||||
existing.Words = make(map[string]string, len(data.Words))
|
||||
}
|
||||
maps.Copy(existing.Verbs, data.Verbs)
|
||||
maps.Copy(existing.Nouns, data.Nouns)
|
||||
maps.Copy(existing.Words, data.Words)
|
||||
if data.Number != (NumberFormat{}) {
|
||||
existing.Number = data.Number
|
||||
mergeArticleForms(&existing.Articles, data.Articles)
|
||||
mergePunctuationRules(&existing.Punct, data.Punct)
|
||||
mergeSignalData(&existing.Signals, data.Signals)
|
||||
if data.Number.ThousandsSep != "" {
|
||||
existing.Number.ThousandsSep = data.Number.ThousandsSep
|
||||
}
|
||||
if data.Number.DecimalSep != "" {
|
||||
existing.Number.DecimalSep = data.Number.DecimalSep
|
||||
}
|
||||
if data.Number.PercentFmt != "" {
|
||||
existing.Number.PercentFmt = data.Number.PercentFmt
|
||||
}
|
||||
}
|
||||
|
||||
func mergeArticleForms(dst *ArticleForms, src ArticleForms) {
|
||||
if dst == nil {
|
||||
return
|
||||
}
|
||||
if src.IndefiniteDefault != "" {
|
||||
dst.IndefiniteDefault = src.IndefiniteDefault
|
||||
}
|
||||
if src.IndefiniteVowel != "" {
|
||||
dst.IndefiniteVowel = src.IndefiniteVowel
|
||||
}
|
||||
if src.Definite != "" {
|
||||
dst.Definite = src.Definite
|
||||
}
|
||||
if len(src.ByGender) == 0 {
|
||||
return
|
||||
}
|
||||
if dst.ByGender == nil {
|
||||
dst.ByGender = make(map[string]string, len(src.ByGender))
|
||||
}
|
||||
maps.Copy(dst.ByGender, src.ByGender)
|
||||
}
|
||||
|
||||
func mergePunctuationRules(dst *PunctuationRules, src PunctuationRules) {
|
||||
if dst == nil {
|
||||
return
|
||||
}
|
||||
if src.LabelSuffix != "" {
|
||||
dst.LabelSuffix = src.LabelSuffix
|
||||
}
|
||||
if src.ProgressSuffix != "" {
|
||||
dst.ProgressSuffix = src.ProgressSuffix
|
||||
}
|
||||
}
|
||||
|
||||
func mergeSignalData(dst *SignalData, src SignalData) {
|
||||
if dst == nil {
|
||||
return
|
||||
}
|
||||
if len(src.NounDeterminers) > 0 {
|
||||
dst.NounDeterminers = append(dst.NounDeterminers, src.NounDeterminers...)
|
||||
}
|
||||
if len(src.VerbAuxiliaries) > 0 {
|
||||
dst.VerbAuxiliaries = append(dst.VerbAuxiliaries, src.VerbAuxiliaries...)
|
||||
}
|
||||
if len(src.VerbInfinitive) > 0 {
|
||||
dst.VerbInfinitive = append(dst.VerbInfinitive, src.VerbInfinitive...)
|
||||
}
|
||||
if len(src.Priors) == 0 {
|
||||
return
|
||||
}
|
||||
if dst.Priors == nil {
|
||||
dst.Priors = make(map[string]map[string]float64, len(src.Priors))
|
||||
}
|
||||
for word, priors := range src.Priors {
|
||||
if dst.Priors[word] == nil {
|
||||
dst.Priors[word] = make(map[string]float64, len(priors))
|
||||
}
|
||||
maps.Copy(dst.Priors[word], priors)
|
||||
}
|
||||
}
|
||||
|
||||
func grammarDataHasContent(data *GrammarData) bool {
|
||||
if data == nil {
|
||||
return false
|
||||
}
|
||||
if len(data.Verbs) > 0 || len(data.Nouns) > 0 || len(data.Words) > 0 {
|
||||
return true
|
||||
}
|
||||
if data.Articles.IndefiniteDefault != "" ||
|
||||
data.Articles.IndefiniteVowel != "" ||
|
||||
data.Articles.Definite != "" ||
|
||||
len(data.Articles.ByGender) > 0 {
|
||||
return true
|
||||
}
|
||||
if data.Punct.LabelSuffix != "" || data.Punct.ProgressSuffix != "" {
|
||||
return true
|
||||
}
|
||||
if len(data.Signals.NounDeterminers) > 0 ||
|
||||
len(data.Signals.VerbAuxiliaries) > 0 ||
|
||||
len(data.Signals.VerbInfinitive) > 0 ||
|
||||
len(data.Signals.Priors) > 0 {
|
||||
return true
|
||||
}
|
||||
return data.Number != (NumberFormat{})
|
||||
}
|
||||
|
||||
// IrregularVerbs returns a copy of the irregular verb forms map.
|
||||
|
|
|
|||
174
loader_test.go
174
loader_test.go
|
|
@ -214,6 +214,180 @@ func TestFlattenWithGrammar(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestMergeGrammarData(t *testing.T) {
|
||||
const lang = "zz"
|
||||
original := GetGrammarData(lang)
|
||||
t.Cleanup(func() {
|
||||
SetGrammarData(lang, original)
|
||||
})
|
||||
|
||||
SetGrammarData(lang, &GrammarData{
|
||||
Verbs: map[string]VerbForms{
|
||||
"keep": {Past: "kept", Gerund: "keeping"},
|
||||
},
|
||||
Nouns: map[string]NounForms{
|
||||
"file": {One: "file", Other: "files"},
|
||||
},
|
||||
Words: map[string]string{
|
||||
"url": "URL",
|
||||
},
|
||||
Articles: ArticleForms{
|
||||
IndefiniteDefault: "a",
|
||||
IndefiniteVowel: "an",
|
||||
Definite: "the",
|
||||
ByGender: map[string]string{
|
||||
"m": "le",
|
||||
},
|
||||
},
|
||||
Punct: PunctuationRules{
|
||||
LabelSuffix: ":",
|
||||
ProgressSuffix: "...",
|
||||
},
|
||||
Signals: SignalData{
|
||||
NounDeterminers: []string{"the"},
|
||||
VerbAuxiliaries: []string{"will"},
|
||||
VerbInfinitive: []string{"to"},
|
||||
Priors: map[string]map[string]float64{
|
||||
"run": {
|
||||
"verb": 0.7,
|
||||
},
|
||||
},
|
||||
},
|
||||
Number: NumberFormat{
|
||||
ThousandsSep: ",",
|
||||
DecimalSep: ".",
|
||||
PercentFmt: "%s%%",
|
||||
},
|
||||
})
|
||||
|
||||
MergeGrammarData(lang, &GrammarData{
|
||||
Verbs: map[string]VerbForms{
|
||||
"add": {Past: "added", Gerund: "adding"},
|
||||
},
|
||||
Nouns: map[string]NounForms{
|
||||
"repo": {One: "repo", Other: "repos"},
|
||||
},
|
||||
Words: map[string]string{
|
||||
"api": "API",
|
||||
},
|
||||
Articles: ArticleForms{
|
||||
ByGender: map[string]string{
|
||||
"f": "la",
|
||||
},
|
||||
},
|
||||
Punct: PunctuationRules{
|
||||
LabelSuffix: " !",
|
||||
},
|
||||
Signals: SignalData{
|
||||
NounDeterminers: []string{"a"},
|
||||
VerbAuxiliaries: []string{"can"},
|
||||
VerbInfinitive: []string{"go"},
|
||||
Priors: map[string]map[string]float64{
|
||||
"run": {
|
||||
"noun": 0.3,
|
||||
},
|
||||
},
|
||||
},
|
||||
Number: NumberFormat{
|
||||
ThousandsSep: ".",
|
||||
},
|
||||
})
|
||||
|
||||
data := GetGrammarData(lang)
|
||||
if data == nil {
|
||||
t.Fatal("MergeGrammarData() cleared existing grammar data")
|
||||
}
|
||||
if _, ok := data.Verbs["keep"]; !ok {
|
||||
t.Error("existing verb entry was lost")
|
||||
}
|
||||
if _, ok := data.Verbs["add"]; !ok {
|
||||
t.Error("merged verb entry missing")
|
||||
}
|
||||
if _, ok := data.Nouns["file"]; !ok {
|
||||
t.Error("existing noun entry was lost")
|
||||
}
|
||||
if _, ok := data.Nouns["repo"]; !ok {
|
||||
t.Error("merged noun entry missing")
|
||||
}
|
||||
if data.Words["url"] != "URL" || data.Words["api"] != "API" {
|
||||
t.Errorf("words not merged correctly: %+v", data.Words)
|
||||
}
|
||||
if data.Articles.IndefiniteDefault != "a" || data.Articles.IndefiniteVowel != "an" || data.Articles.Definite != "the" {
|
||||
t.Errorf("article defaults changed unexpectedly: %+v", data.Articles)
|
||||
}
|
||||
if data.Articles.ByGender["m"] != "le" || data.Articles.ByGender["f"] != "la" {
|
||||
t.Errorf("article by_gender not merged correctly: %+v", data.Articles.ByGender)
|
||||
}
|
||||
if data.Punct.LabelSuffix != " !" || data.Punct.ProgressSuffix != "..." {
|
||||
t.Errorf("punctuation not merged correctly: %+v", data.Punct)
|
||||
}
|
||||
if len(data.Signals.NounDeterminers) != 2 || len(data.Signals.VerbAuxiliaries) != 2 || len(data.Signals.VerbInfinitive) != 2 {
|
||||
t.Errorf("signal slices not merged correctly: %+v", data.Signals)
|
||||
}
|
||||
if got := data.Signals.Priors["run"]["verb"]; got != 0.7 {
|
||||
t.Errorf("signal priors lost existing value: got %v", got)
|
||||
}
|
||||
if got := data.Signals.Priors["run"]["noun"]; got != 0.3 {
|
||||
t.Errorf("signal priors missing merged value: got %v", got)
|
||||
}
|
||||
if data.Number.ThousandsSep != "." || data.Number.DecimalSep != "." || data.Number.PercentFmt != "%s%%" {
|
||||
t.Errorf("number format not merged correctly: %+v", data.Number)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewWithLoader_LoadsGrammarOnlyLocale(t *testing.T) {
|
||||
loaderFS := fstest.MapFS{
|
||||
"fr.json": &fstest.MapFile{
|
||||
Data: []byte(`{
|
||||
"gram": {
|
||||
"article": {
|
||||
"indefinite": { "default": "el", "vowel": "l'" },
|
||||
"definite": "el",
|
||||
"by_gender": { "m": "el", "f": "la" }
|
||||
},
|
||||
"punct": { "label": " !", "progress": " ..." },
|
||||
"signal": {
|
||||
"noun_determiner": ["el"],
|
||||
"verb_auxiliary": ["va"],
|
||||
"verb_infinitive": ["a"]
|
||||
},
|
||||
"number": { "thousands": ".", "decimal": ",", "percent": "%s %%"}
|
||||
}
|
||||
}`),
|
||||
},
|
||||
}
|
||||
|
||||
svc, err := NewWithLoader(NewFSLoader(loaderFS, "."))
|
||||
if err != nil {
|
||||
t.Fatalf("NewWithLoader() failed: %v", err)
|
||||
}
|
||||
|
||||
data := GetGrammarData("fr")
|
||||
if data == nil {
|
||||
t.Fatal("grammar-only locale was not loaded")
|
||||
}
|
||||
if data.Articles.ByGender["f"] != "la" {
|
||||
t.Errorf("article by_gender[f] = %q, want %q", data.Articles.ByGender["f"], "la")
|
||||
}
|
||||
if data.Punct.LabelSuffix != " !" || data.Punct.ProgressSuffix != " ..." {
|
||||
t.Errorf("punctuation not loaded: %+v", data.Punct)
|
||||
}
|
||||
if len(data.Signals.NounDeterminers) != 1 || data.Signals.NounDeterminers[0] != "el" {
|
||||
t.Errorf("signals not loaded: %+v", data.Signals)
|
||||
}
|
||||
if data.Number.DecimalSep != "," || data.Number.ThousandsSep != "." {
|
||||
t.Errorf("number format not loaded: %+v", data.Number)
|
||||
}
|
||||
|
||||
if err := svc.SetLanguage("fr"); err != nil {
|
||||
t.Fatalf("SetLanguage(fr) failed: %v", err)
|
||||
}
|
||||
SetDefault(svc)
|
||||
if got := Label("status"); got != "Status !" {
|
||||
t.Errorf("Label(status) = %q, want %q", got, "Status !")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFlattenPluralObject(t *testing.T) {
|
||||
messages := make(map[string]Message)
|
||||
raw := map[string]any{
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ func NewWithLoader(loader Loader, opts ...Option) (*Service, error) {
|
|||
return nil, log.E("NewWithLoader", "load locale: "+lang, err)
|
||||
}
|
||||
s.messages[lang] = messages
|
||||
if grammar != nil && (len(grammar.Verbs) > 0 || len(grammar.Nouns) > 0 || len(grammar.Words) > 0) {
|
||||
if grammarDataHasContent(grammar) {
|
||||
SetGrammarData(lang, grammar)
|
||||
}
|
||||
tag := language.Make(lang)
|
||||
|
|
@ -198,7 +198,7 @@ func (s *Service) loadJSON(lang string, data []byte) error {
|
|||
} else {
|
||||
s.messages[lang] = messages
|
||||
}
|
||||
if len(grammarData.Verbs) > 0 || len(grammarData.Nouns) > 0 || len(grammarData.Words) > 0 {
|
||||
if grammarDataHasContent(grammarData) {
|
||||
MergeGrammarData(lang, grammarData)
|
||||
}
|
||||
return nil
|
||||
|
|
@ -481,7 +481,7 @@ func (s *Service) AddLoader(loader Loader) error {
|
|||
|
||||
// Merge grammar data into the global grammar store (merge, not replace,
|
||||
// so that multiple loaders contribute entries for the same language).
|
||||
if grammar != nil && (len(grammar.Verbs) > 0 || len(grammar.Nouns) > 0 || len(grammar.Words) > 0) {
|
||||
if grammarDataHasContent(grammar) {
|
||||
MergeGrammarData(lang, grammar)
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue