fix(difficulty): correct LWMA algorithm and hardfork-aware target

Rewrites the LWMA difficulty algorithm to match the C++ daemon exactly:
- Uses N=60 window with linear weighting (position 1..n)
- Clamps solve times to [-6T, 6T]
- Excludes genesis block from the difficulty window
- Selects target based on hardfork: 120s pre-HF2, 240s post-HF2

On testnet, HF2 activates at height 10 (active from height 11),
doubling the target from 120s to 240s. The previous fixed 120s target
produced exactly half the expected difficulty from height 11 onward.

Integration test verifies all 2576 testnet blocks match the daemon.

Co-Authored-By: Charon <charon@lethean.io>
This commit is contained in:
Claude 2026-02-21 22:32:57 +00:00
parent ff8f300601
commit 3c76dd7070
No known key found for this signature in database
GPG key ID: AF404715446AEB41
7 changed files with 207 additions and 76 deletions

View file

@ -14,20 +14,37 @@ import (
// NextDifficulty computes the expected difficulty for the block at the given
// height, using the LWMA algorithm over stored block history.
func (c *Chain) NextDifficulty(height uint64) (uint64, error) {
//
// The genesis block (height 0) is excluded from the difficulty window,
// matching the C++ daemon's load_targetdata_cache which skips index 0.
//
// The target block time depends on the hardfork schedule: 120s pre-HF2,
// 240s post-HF2 (matching DIFFICULTY_POW_TARGET_HF6 in the C++ source).
func (c *Chain) NextDifficulty(height uint64, forks []config.HardFork) (uint64, error) {
if height == 0 {
return 1, nil
}
// Determine how far back to look.
lookback := height
if lookback > difficulty.BlocksCount {
lookback = difficulty.BlocksCount
// LWMA needs N+1 entries (N solve-time intervals).
// Start from height 1 — genesis is excluded from the difficulty window.
maxLookback := difficulty.LWMAWindow + 1
lookback := height // height excludes genesis since we start from 1
if lookback > maxLookback {
lookback = maxLookback
}
// Start from max(1, height - lookback) to exclude genesis.
startHeight := height - lookback
count := int(lookback)
if startHeight == 0 {
startHeight = 1
lookback = height - 1
}
if lookback == 0 {
return 1, nil
}
count := int(lookback)
timestamps := make([]uint64, count)
cumulDiffs := make([]*big.Int, count)
@ -43,6 +60,13 @@ func (c *Chain) NextDifficulty(height uint64) (uint64, error) {
cumulDiffs[i] = new(big.Int).SetUint64(meta.CumulativeDiff)
}
result := difficulty.NextDifficulty(timestamps, cumulDiffs, config.BlockTarget)
// Determine the target block time based on hardfork status.
// HF2 doubles the target from 120s to 240s.
target := config.DifficultyPowTarget
if config.IsHardForkActive(forks, config.HF2, height) {
target = config.DifficultyPowTargetHF6
}
result := difficulty.NextDifficulty(timestamps, cumulDiffs, target)
return result.Uint64(), nil
}

View file

@ -8,18 +8,25 @@ package chain
import (
"testing"
store "forge.lthn.ai/core/go-store"
"forge.lthn.ai/core/go-blockchain/config"
"forge.lthn.ai/core/go-blockchain/types"
store "forge.lthn.ai/core/go-store"
"github.com/stretchr/testify/require"
)
// preHF2Forks is a fork schedule where HF2 never activates,
// so the target stays at 120s.
var preHF2Forks = []config.HardFork{
{Version: config.HF0Initial, Height: 0},
}
func TestNextDifficulty_Genesis(t *testing.T) {
s, err := store.New(":memory:")
require.NoError(t, err)
defer s.Close()
c := New(s)
diff, err := c.NextDifficulty(0)
diff, err := c.NextDifficulty(0, preHF2Forks)
require.NoError(t, err)
require.Equal(t, uint64(1), diff)
}
@ -31,7 +38,8 @@ func TestNextDifficulty_FewBlocks(t *testing.T) {
c := New(s)
// Store 5 blocks with constant 120s intervals and difficulty 1000.
// Store genesis + 4 blocks with constant 120s intervals and difficulty 1000.
// Genesis at height 0 is excluded from the LWMA window.
baseDiff := uint64(1000)
for i := uint64(0); i < 5; i++ {
err := c.PutBlock(&types.Block{}, &BlockMeta{
@ -44,17 +52,16 @@ func TestNextDifficulty_FewBlocks(t *testing.T) {
require.NoError(t, err)
}
// Next difficulty for height 5 should be approximately 1000.
diff, err := c.NextDifficulty(5)
// Next difficulty for height 5 uses blocks 1-4 (n=3 intervals).
// LWMA formula with constant D and T gives D/n = 1000/3 ≈ 333.
diff, err := c.NextDifficulty(5, preHF2Forks)
require.NoError(t, err)
require.Greater(t, diff, uint64(0))
// With constant intervals at target, difficulty should be close to base.
// Allow 10% tolerance.
low := baseDiff - baseDiff/10
high := baseDiff + baseDiff/10
require.GreaterOrEqual(t, diff, low, "difficulty %d below expected range [%d, %d]", diff, low, high)
require.LessOrEqual(t, diff, high, "difficulty %d above expected range [%d, %d]", diff, low, high)
// LWMA gives total_work * T * (n+1) / (2 * weighted_solvetimes * n).
// For constant intervals: D/n = 1000/3 = 333.
expected := uint64(333)
require.Equal(t, expected, diff)
}
func TestNextDifficulty_EmptyChain(t *testing.T) {
@ -65,7 +72,7 @@ func TestNextDifficulty_EmptyChain(t *testing.T) {
c := New(s)
// Height 1 with no blocks stored — should return starter difficulty.
diff, err := c.NextDifficulty(1)
diff, err := c.NextDifficulty(1, preHF2Forks)
require.NoError(t, err)
require.Equal(t, uint64(1), diff)
}

View file

@ -168,6 +168,61 @@ func TestIntegration_SyncWithSignatures(t *testing.T) {
require.Equal(t, remoteHeight, finalHeight)
}
func TestIntegration_DifficultyMatchesRPC(t *testing.T) {
if testing.Short() {
t.Skip("skipping difficulty comparison test in short mode")
}
client := rpc.NewClientWithHTTP(testnetRPCAddr, &http.Client{Timeout: 60 * time.Second})
_, err := client.GetHeight()
if err != nil {
t.Skipf("testnet daemon not reachable: %v", err)
}
s, err := store.New(":memory:")
require.NoError(t, err)
defer s.Close()
c := New(s)
// Sync a portion of the chain via RPC (which stores daemon-provided difficulty).
opts := SyncOptions{
VerifySignatures: false,
Forks: config.TestnetForks,
}
err = c.Sync(context.Background(), client, opts)
require.NoError(t, err)
finalHeight, _ := c.Height()
t.Logf("synced %d blocks, checking difficulty computation", finalHeight)
// For each block from height 1 onwards, verify our NextDifficulty matches
// the daemon-provided difficulty stored in BlockMeta.
mismatches := 0
for h := uint64(1); h < finalHeight; h++ {
meta, err := c.getBlockMeta(h)
require.NoError(t, err)
computed, err := c.NextDifficulty(h, config.TestnetForks)
require.NoError(t, err)
if computed != meta.Difficulty {
if mismatches < 10 {
t.Logf("difficulty mismatch at height %d: computed=%d, daemon=%d",
h, computed, meta.Difficulty)
}
mismatches++
}
}
if mismatches > 0 {
t.Errorf("%d/%d blocks have difficulty mismatches", mismatches, finalHeight-1)
} else {
t.Logf("all %d blocks have matching difficulty", finalHeight-1)
}
}
func TestIntegration_P2PSync(t *testing.T) {
if testing.Short() {
t.Skip("skipping P2P sync test in short mode")

View file

@ -116,7 +116,7 @@ func (c *Chain) P2PSync(ctx context.Context, conn P2PConnection, opts SyncOption
log.Printf("p2p sync: processing block %d", blockHeight)
}
blockDiff, err := c.NextDifficulty(blockHeight)
blockDiff, err := c.NextDifficulty(blockHeight, opts.Forks)
if err != nil {
return fmt.Errorf("p2p sync: compute difficulty for block %d: %w", blockHeight, err)
}

View file

@ -13,7 +13,9 @@
//
// The algorithm examines a window of recent block timestamps and cumulative
// difficulties to calculate the next target difficulty, ensuring blocks
// arrive at the desired interval on average.
// arrive at the desired interval on average. Each solve-time interval is
// weighted linearly by its recency — more recent intervals have greater
// influence on the result.
package difficulty
import (
@ -22,18 +24,22 @@ import (
// Algorithm constants matching the C++ source.
const (
// Window is the number of blocks in the difficulty calculation window.
// Window is the number of blocks in the legacy difficulty window.
Window uint64 = 720
// Lag is the additional lookback beyond the window.
// Lag is the additional lookback beyond the window (legacy).
Lag uint64 = 15
// Cut is the number of extreme timestamps trimmed from each end after
// sorting. This dampens the effect of outlier timestamps.
// Cut is the number of extreme timestamps trimmed (legacy).
Cut uint64 = 60
// BlocksCount is the total number of blocks considered (Window + Lag).
// Used by legacy algorithms; the LWMA uses LWMAWindow instead.
BlocksCount uint64 = Window + Lag
// LWMAWindow is the number of solve-time intervals used by the LWMA
// algorithm (N=60). This means we need N+1 = 61 block entries.
LWMAWindow uint64 = 60
)
// StarterDifficulty is the minimum difficulty returned when there is
@ -43,53 +49,73 @@ var StarterDifficulty = big.NewInt(1)
// NextDifficulty calculates the next block difficulty using the LWMA algorithm.
//
// Parameters:
// - timestamps: block timestamps for the last BlocksCount blocks, ordered
// from oldest to newest.
// - timestamps: block timestamps ordered from oldest to newest.
// - cumulativeDiffs: cumulative difficulties corresponding to each block.
// - target: the desired block interval in seconds (e.g. 120 for PoW/PoS).
//
// Returns the calculated difficulty for the next block.
//
// If the input slices are too short to perform a meaningful calculation, the
// function returns StarterDifficulty.
// The algorithm matches the C++ next_difficulty_lwma() in difficulty.cpp:
//
// next_D = total_work * T * (n+1) / (2 * weighted_solvetimes * n)
//
// where each solve-time interval i is weighted by its position (1..n),
// giving more influence to recent blocks.
func NextDifficulty(timestamps []uint64, cumulativeDiffs []*big.Int, target uint64) *big.Int {
// Need at least 2 entries to compute a time span and difficulty delta.
// Need at least 2 entries to compute one solve-time interval.
if len(timestamps) < 2 || len(cumulativeDiffs) < 2 {
return new(big.Int).Set(StarterDifficulty)
}
length := uint64(len(timestamps))
if length > BlocksCount {
length = BlocksCount
length := len(timestamps)
// Trim to at most N+1 entries (N solve-time intervals).
maxEntries := int(LWMAWindow) + 1
if length > maxEntries {
// Keep the most recent entries.
offset := length - maxEntries
timestamps = timestamps[offset:]
cumulativeDiffs = cumulativeDiffs[offset:]
length = maxEntries
}
// Use the available window, but ensure we have at least 2 points.
windowSize := length
if windowSize < 2 {
// n = number of solve-time intervals.
n := int64(length - 1)
T := int64(target)
// Compute linearly weighted solve-times.
// Weight i (1..n) gives more recent intervals higher influence.
var weightedSolveTimes int64
for i := int64(1); i <= n; i++ {
st := int64(timestamps[i]) - int64(timestamps[i-1])
// Clamp to [-6T, 6T] to limit timestamp manipulation impact.
if st < -(6 * T) {
st = -(6 * T)
}
if st > 6*T {
st = 6 * T
}
weightedSolveTimes += st * i
}
// Guard against zero or negative (pathological timestamps).
if weightedSolveTimes <= 0 {
weightedSolveTimes = 1
}
// Total work across the window.
totalWork := new(big.Int).Sub(cumulativeDiffs[n], cumulativeDiffs[0])
if totalWork.Sign() <= 0 {
return new(big.Int).Set(StarterDifficulty)
}
// Calculate the time span across the window.
// Use only the last windowSize entries.
startIdx := uint64(len(timestamps)) - windowSize
endIdx := uint64(len(timestamps)) - 1
// LWMA formula: next_D = total_work * T * (n+1) / (2 * weighted_solvetimes * n)
numerator := new(big.Int).Mul(totalWork, big.NewInt(T*(n+1)))
denominator := big.NewInt(2 * weightedSolveTimes * n)
timeSpan := timestamps[endIdx] - timestamps[startIdx]
if timeSpan == 0 {
timeSpan = 1 // prevent division by zero
}
// Calculate the difficulty delta across the same window.
diffDelta := new(big.Int).Sub(cumulativeDiffs[endIdx], cumulativeDiffs[startIdx])
if diffDelta.Sign() <= 0 {
return new(big.Int).Set(StarterDifficulty)
}
// LWMA core: nextDiff = diffDelta * target / timeSpan
// This keeps the difficulty proportional to the hash rate needed to
// maintain the target block interval.
nextDiff := new(big.Int).Mul(diffDelta, new(big.Int).SetUint64(target))
nextDiff.Div(nextDiff, new(big.Int).SetUint64(timeSpan))
nextDiff := new(big.Int).Div(numerator, denominator)
// Ensure we never return zero difficulty.
if nextDiff.Sign() <= 0 {

View file

@ -14,7 +14,7 @@ import (
func TestNextDifficulty_Good(t *testing.T) {
// Synthetic test: constant block times at exactly the target interval.
// With perfectly timed blocks, the difficulty should remain stable.
// With the LWMA-1 formula, constant D gives next_D = D/n for full window.
target := config.BlockTarget
const numBlocks = 100
@ -32,16 +32,11 @@ func TestNextDifficulty_Good(t *testing.T) {
t.Fatalf("NextDifficulty returned non-positive value: %s", result)
}
// With constant intervals, the result should be approximately equal to
// the base difficulty. Allow some tolerance due to integer arithmetic.
expected := baseDifficulty
tolerance := new(big.Int).Div(expected, big.NewInt(10)) // 10% tolerance
diff := new(big.Int).Sub(result, expected)
diff.Abs(diff)
if diff.Cmp(tolerance) > 0 {
t.Errorf("NextDifficulty with constant intervals: got %s, expected ~%s (tolerance %s)",
result, expected, tolerance)
// LWMA trims to last 61 entries (N+1=61), giving n=60 intervals.
// Formula: D/n = 1000/60 = 16.
expected := big.NewInt(16)
if result.Cmp(expected) != 0 {
t.Errorf("NextDifficulty with constant intervals: got %s, expected %s", result, expected)
}
}
@ -64,7 +59,8 @@ func TestNextDifficultySingleEntry_Good(t *testing.T) {
}
func TestNextDifficultyFastBlocks_Good(t *testing.T) {
// When blocks come faster than the target, difficulty should increase.
// When blocks come faster than the target, difficulty should increase
// relative to the constant-rate result.
target := config.BlockTarget
const numBlocks = 50
const actualInterval uint64 = 60 // half the target — blocks are too fast
@ -78,14 +74,24 @@ func TestNextDifficultyFastBlocks_Good(t *testing.T) {
cumulativeDiffs[i] = new(big.Int).Mul(baseDifficulty, big.NewInt(int64(i)))
}
result := NextDifficulty(timestamps, cumulativeDiffs, target)
if result.Cmp(baseDifficulty) <= 0 {
t.Errorf("expected difficulty > %s for fast blocks, got %s", baseDifficulty, result)
resultFast := NextDifficulty(timestamps, cumulativeDiffs, target)
// Now compute with on-target intervals for comparison.
timestampsTarget := make([]uint64, numBlocks)
for i := 0; i < numBlocks; i++ {
timestampsTarget[i] = uint64(i) * target
}
resultTarget := NextDifficulty(timestampsTarget, cumulativeDiffs, target)
if resultFast.Cmp(resultTarget) <= 0 {
t.Errorf("fast blocks (%s) should produce higher difficulty than target-rate blocks (%s)",
resultFast, resultTarget)
}
}
func TestNextDifficultySlowBlocks_Good(t *testing.T) {
// When blocks come slower than the target, difficulty should decrease.
// When blocks come slower than the target, difficulty should decrease
// relative to the constant-rate result.
target := config.BlockTarget
const numBlocks = 50
const actualInterval uint64 = 240 // double the target — blocks are too slow
@ -99,9 +105,18 @@ func TestNextDifficultySlowBlocks_Good(t *testing.T) {
cumulativeDiffs[i] = new(big.Int).Mul(baseDifficulty, big.NewInt(int64(i)))
}
result := NextDifficulty(timestamps, cumulativeDiffs, target)
if result.Cmp(baseDifficulty) >= 0 {
t.Errorf("expected difficulty < %s for slow blocks, got %s", baseDifficulty, result)
resultSlow := NextDifficulty(timestamps, cumulativeDiffs, target)
// Compute with on-target intervals for comparison.
timestampsTarget := make([]uint64, numBlocks)
for i := 0; i < numBlocks; i++ {
timestampsTarget[i] = uint64(i) * target
}
resultTarget := NextDifficulty(timestampsTarget, cumulativeDiffs, target)
if resultSlow.Cmp(resultTarget) >= 0 {
t.Errorf("slow blocks (%s) should produce lower difficulty than target-rate blocks (%s)",
resultSlow, resultTarget)
}
}
@ -128,4 +143,7 @@ func TestConstants_Good(t *testing.T) {
if BlocksCount != 735 {
t.Errorf("BlocksCount: got %d, want 735", BlocksCount)
}
if LWMAWindow != 60 {
t.Errorf("LWMAWindow: got %d, want 60", LWMAWindow)
}
}

View file

@ -56,7 +56,8 @@ type BlockDetails struct {
TotalFee uint64 `json:"total_fee"`
ID string `json:"id"`
PrevID string `json:"prev_id"`
Difficulty string `json:"difficulty"`
Difficulty string `json:"difficulty"`
CumulativeDiffPrecise string `json:"cumulative_diff_precise"`
Type uint64 `json:"type"`
IsOrphan bool `json:"is_orphan"`
CumulativeSize uint64 `json:"block_cumulative_size"`