registerAgent hardcodes ConcurrentJobs: 5, but the concurrent test launches 10 goroutines. Override with ConcurrentJobs: 0 (unlimited) so all 10 dispatches succeed as intended. Co-Authored-By: Charon <developers@lethean.io>
273 lines
7.9 KiB
Go
273 lines
7.9 KiB
Go
package agentic
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"net/http"
|
|
"net/http/httptest"
|
|
"sync"
|
|
"testing"
|
|
"time"
|
|
|
|
"github.com/stretchr/testify/assert"
|
|
"github.com/stretchr/testify/require"
|
|
)
|
|
|
|
// setupDispatcher creates a Dispatcher with a memory registry, default router,
|
|
// and memory allowance store, pre-loaded with agents and allowances.
|
|
func setupDispatcher(t *testing.T, client *Client) (*Dispatcher, *MemoryRegistry, *MemoryStore) {
|
|
t.Helper()
|
|
|
|
reg := NewMemoryRegistry()
|
|
router := NewDefaultRouter()
|
|
store := NewMemoryStore()
|
|
svc := NewAllowanceService(store)
|
|
|
|
d := NewDispatcher(reg, router, svc, client)
|
|
return d, reg, store
|
|
}
|
|
|
|
func registerAgent(t *testing.T, reg *MemoryRegistry, store *MemoryStore, id string, caps []string, maxLoad int) {
|
|
t.Helper()
|
|
_ = reg.Register(AgentInfo{
|
|
ID: id,
|
|
Name: id,
|
|
Capabilities: caps,
|
|
Status: AgentAvailable,
|
|
LastHeartbeat: time.Now().UTC(),
|
|
MaxLoad: maxLoad,
|
|
})
|
|
_ = store.SetAllowance(&AgentAllowance{
|
|
AgentID: id,
|
|
DailyTokenLimit: 100000,
|
|
DailyJobLimit: 50,
|
|
ConcurrentJobs: 5,
|
|
})
|
|
}
|
|
|
|
// --- Dispatch tests ---
|
|
|
|
func TestDispatcher_Dispatch_Good_NilClient(t *testing.T) {
|
|
d, reg, store := setupDispatcher(t, nil)
|
|
registerAgent(t, reg, store, "agent-1", []string{"go"}, 5)
|
|
|
|
task := &Task{ID: "task-1", Labels: []string{"go"}, Priority: PriorityMedium}
|
|
agentID, err := d.Dispatch(context.Background(), task)
|
|
require.NoError(t, err)
|
|
assert.Equal(t, "agent-1", agentID)
|
|
|
|
// Verify usage was recorded.
|
|
usage, _ := store.GetUsage("agent-1")
|
|
assert.Equal(t, 1, usage.JobsStarted)
|
|
assert.Equal(t, 1, usage.ActiveJobs)
|
|
}
|
|
|
|
func TestDispatcher_Dispatch_Good_WithHTTPClient(t *testing.T) {
|
|
claimedTask := Task{ID: "task-1", Status: StatusInProgress, ClaimedBy: "agent-1"}
|
|
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method == http.MethodPost && r.URL.Path == "/api/tasks/task-1/claim" {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
_ = json.NewEncoder(w).Encode(ClaimResponse{Task: &claimedTask})
|
|
return
|
|
}
|
|
w.WriteHeader(http.StatusNotFound)
|
|
}))
|
|
defer server.Close()
|
|
|
|
client := NewClient(server.URL, "test-token")
|
|
d, reg, store := setupDispatcher(t, client)
|
|
registerAgent(t, reg, store, "agent-1", nil, 5)
|
|
|
|
task := &Task{ID: "task-1", Priority: PriorityHigh}
|
|
agentID, err := d.Dispatch(context.Background(), task)
|
|
require.NoError(t, err)
|
|
assert.Equal(t, "agent-1", agentID)
|
|
|
|
// Verify usage recorded.
|
|
usage, _ := store.GetUsage("agent-1")
|
|
assert.Equal(t, 1, usage.JobsStarted)
|
|
}
|
|
|
|
func TestDispatcher_Dispatch_Good_PicksBestAgent(t *testing.T) {
|
|
d, reg, store := setupDispatcher(t, nil)
|
|
registerAgent(t, reg, store, "heavy", []string{"go"}, 5)
|
|
registerAgent(t, reg, store, "light", []string{"go"}, 5)
|
|
|
|
// Give "heavy" some load.
|
|
_ = reg.Register(AgentInfo{
|
|
ID: "heavy",
|
|
Name: "heavy",
|
|
Capabilities: []string{"go"},
|
|
Status: AgentAvailable,
|
|
LastHeartbeat: time.Now().UTC(),
|
|
CurrentLoad: 4,
|
|
MaxLoad: 5,
|
|
})
|
|
|
|
task := &Task{ID: "task-1", Labels: []string{"go"}, Priority: PriorityMedium}
|
|
agentID, err := d.Dispatch(context.Background(), task)
|
|
require.NoError(t, err)
|
|
assert.Equal(t, "light", agentID) // light has score 1.0, heavy has 0.2
|
|
}
|
|
|
|
func TestDispatcher_Dispatch_Bad_NoAgents(t *testing.T) {
|
|
d, _, _ := setupDispatcher(t, nil)
|
|
|
|
task := &Task{ID: "task-1", Priority: PriorityMedium}
|
|
_, err := d.Dispatch(context.Background(), task)
|
|
require.Error(t, err)
|
|
}
|
|
|
|
func TestDispatcher_Dispatch_Bad_AllowanceExceeded(t *testing.T) {
|
|
d, reg, store := setupDispatcher(t, nil)
|
|
registerAgent(t, reg, store, "agent-1", nil, 5)
|
|
|
|
// Exhaust the agent's daily job limit.
|
|
_ = store.SetAllowance(&AgentAllowance{
|
|
AgentID: "agent-1",
|
|
DailyJobLimit: 1,
|
|
})
|
|
_ = store.IncrementUsage("agent-1", 0, 1)
|
|
|
|
task := &Task{ID: "task-1", Priority: PriorityMedium}
|
|
_, err := d.Dispatch(context.Background(), task)
|
|
require.Error(t, err)
|
|
assert.Contains(t, err.Error(), "quota exceeded")
|
|
}
|
|
|
|
func TestDispatcher_Dispatch_Bad_ClaimFails(t *testing.T) {
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
w.WriteHeader(http.StatusConflict)
|
|
_ = json.NewEncoder(w).Encode(APIError{Code: 409, Message: "already claimed"})
|
|
}))
|
|
defer server.Close()
|
|
|
|
client := NewClient(server.URL, "test-token")
|
|
d, reg, store := setupDispatcher(t, client)
|
|
registerAgent(t, reg, store, "agent-1", nil, 5)
|
|
|
|
task := &Task{ID: "task-1", Priority: PriorityMedium}
|
|
_, err := d.Dispatch(context.Background(), task)
|
|
require.Error(t, err)
|
|
assert.Contains(t, err.Error(), "claim task")
|
|
|
|
// Verify usage was NOT recorded when claim fails.
|
|
usage, _ := store.GetUsage("agent-1")
|
|
assert.Equal(t, 0, usage.JobsStarted)
|
|
}
|
|
|
|
// --- DispatchLoop tests ---
|
|
|
|
func TestDispatcher_DispatchLoop_Good_Cancellation(t *testing.T) {
|
|
d, _, _ := setupDispatcher(t, nil)
|
|
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
cancel() // Cancel immediately.
|
|
|
|
err := d.DispatchLoop(ctx, 100*time.Millisecond)
|
|
require.ErrorIs(t, err, context.Canceled)
|
|
}
|
|
|
|
func TestDispatcher_DispatchLoop_Good_DispatchesPendingTasks(t *testing.T) {
|
|
pendingTasks := []Task{
|
|
{ID: "task-1", Status: StatusPending, Priority: PriorityMedium},
|
|
{ID: "task-2", Status: StatusPending, Priority: PriorityHigh},
|
|
}
|
|
|
|
var mu sync.Mutex
|
|
claimedIDs := make(map[string]bool)
|
|
|
|
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
switch {
|
|
case r.Method == http.MethodGet && r.URL.Path == "/api/tasks":
|
|
w.Header().Set("Content-Type", "application/json")
|
|
mu.Lock()
|
|
// Return only tasks not yet claimed.
|
|
var remaining []Task
|
|
for _, t := range pendingTasks {
|
|
if !claimedIDs[t.ID] {
|
|
remaining = append(remaining, t)
|
|
}
|
|
}
|
|
mu.Unlock()
|
|
_ = json.NewEncoder(w).Encode(remaining)
|
|
|
|
case r.Method == http.MethodPost:
|
|
// Extract task ID from claim URL.
|
|
w.Header().Set("Content-Type", "application/json")
|
|
// Parse the task ID from the path.
|
|
for _, t := range pendingTasks {
|
|
if r.URL.Path == "/api/tasks/"+t.ID+"/claim" {
|
|
mu.Lock()
|
|
claimedIDs[t.ID] = true
|
|
mu.Unlock()
|
|
claimed := t
|
|
claimed.Status = StatusInProgress
|
|
_ = json.NewEncoder(w).Encode(ClaimResponse{Task: &claimed})
|
|
return
|
|
}
|
|
}
|
|
w.WriteHeader(http.StatusNotFound)
|
|
|
|
default:
|
|
w.WriteHeader(http.StatusNotFound)
|
|
}
|
|
}))
|
|
defer server.Close()
|
|
|
|
client := NewClient(server.URL, "test-token")
|
|
d, reg, store := setupDispatcher(t, client)
|
|
registerAgent(t, reg, store, "agent-1", nil, 10)
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond)
|
|
defer cancel()
|
|
|
|
err := d.DispatchLoop(ctx, 50*time.Millisecond)
|
|
require.ErrorIs(t, err, context.DeadlineExceeded)
|
|
|
|
// Verify tasks were claimed.
|
|
mu.Lock()
|
|
defer mu.Unlock()
|
|
assert.True(t, claimedIDs["task-1"])
|
|
assert.True(t, claimedIDs["task-2"])
|
|
}
|
|
|
|
func TestDispatcher_DispatchLoop_Good_NilClientSkipsTick(t *testing.T) {
|
|
d, _, _ := setupDispatcher(t, nil)
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond)
|
|
defer cancel()
|
|
|
|
err := d.DispatchLoop(ctx, 50*time.Millisecond)
|
|
require.ErrorIs(t, err, context.DeadlineExceeded)
|
|
// No panics — nil client is handled gracefully.
|
|
}
|
|
|
|
// --- Concurrent dispatch ---
|
|
|
|
func TestDispatcher_Dispatch_Good_Concurrent(t *testing.T) {
|
|
d, reg, store := setupDispatcher(t, nil)
|
|
registerAgent(t, reg, store, "agent-1", nil, 0)
|
|
// Override allowance to truly unlimited (registerAgent hardcodes ConcurrentJobs: 5)
|
|
_ = store.SetAllowance(&AgentAllowance{
|
|
AgentID: "agent-1",
|
|
DailyJobLimit: 100,
|
|
ConcurrentJobs: 0, // 0 = unlimited
|
|
})
|
|
|
|
var wg sync.WaitGroup
|
|
for i := 0; i < 10; i++ {
|
|
wg.Add(1)
|
|
go func(n int) {
|
|
defer wg.Done()
|
|
task := &Task{ID: "task-" + string(rune('a'+n)), Priority: PriorityMedium}
|
|
_, _ = d.Dispatch(context.Background(), task)
|
|
}(i)
|
|
}
|
|
wg.Wait()
|
|
|
|
// Verify usage was recorded for all dispatches.
|
|
usage, _ := store.GetUsage("agent-1")
|
|
assert.Equal(t, 10, usage.JobsStarted)
|
|
}
|