feat(crypto): CLSAG ring signatures (GG/GGX/GGXXG) and proof verification stubs

CLSAG_GG generation and verification with flat-buffer ring marshalling.
Cofactor helpers (PointMul8/PointDiv8) for 1/8 premultiplication handling.
CLSAG_GGX and CLSAG_GGXXG verify-only bindings (GGX sig size tests).
Bulletproofs+, BGE, and Zarcanum verification stubs — implementation
deferred to Phase 4 when RPC provides real on-chain proof data.

19 tests pass, 3 skipped (proof stubs), all clean with -race.

Co-Authored-By: Charon <charon@lethean.io>
This commit is contained in:
Claude 2026-02-20 18:47:08 +00:00
parent f022e61da9
commit 1c763a8c31
No known key found for this signature in database
GPG key ID: AF404715446AEB41
5 changed files with 609 additions and 0 deletions

View file

@ -7,6 +7,8 @@
#include <cstring>
#include <vector>
#include "crypto.h"
#include "crypto-sugar.h"
#include "clsag.h"
#include "hash-ops.h"
extern "C" {
@ -172,4 +174,240 @@ int cn_check_ring_signature(const uint8_t hash[32], const uint8_t image[32],
return crypto::check_ring_signature(h, ki, pk_ptrs.data(), pubs_count, sig_ptr) ? 0 : 1;
}
// ── Point Helpers ────────────────────────────────────────
int cn_point_mul8(const uint8_t pk[32], uint8_t result[32]) {
crypto::public_key src;
memcpy(&src, pk, 32);
crypto::point_t pt(src);
pt.modify_mul8();
crypto::public_key dst;
pt.to_public_key(dst);
memcpy(result, &dst, 32);
return 0;
}
int cn_point_div8(const uint8_t pk[32], uint8_t result[32]) {
crypto::public_key src;
memcpy(&src, pk, 32);
crypto::point_t pt(src);
crypto::point_t div8 = crypto::c_scalar_1div8 * pt;
crypto::public_key dst;
div8.to_public_key(dst);
memcpy(result, &dst, 32);
return 0;
}
// ── CLSAG (HF4+) ────────────────────────────────────────
// Signature layout for GG: c(32) | r[N*32] | K1(32)
size_t cn_clsag_gg_sig_size(size_t ring_size) {
return 32 + ring_size * 32 + 32;
}
int cn_clsag_gg_generate(const uint8_t hash[32], const uint8_t *ring,
size_t ring_size, const uint8_t pseudo_out[32],
const uint8_t ki[32], const uint8_t secret_x[32],
const uint8_t secret_f[32], size_t secret_index,
uint8_t *sig) {
crypto::hash h;
memcpy(&h, hash, 32);
// Build ring from flat buffer: [stealth(32) | commitment(32)] per entry.
std::vector<crypto::public_key> stealth_keys(ring_size);
std::vector<crypto::public_key> commitments(ring_size);
std::vector<crypto::CLSAG_GG_input_ref_t> ring_refs;
ring_refs.reserve(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(&stealth_keys[i], ring + i * 64, 32);
memcpy(&commitments[i], ring + i * 64 + 32, 32);
ring_refs.emplace_back(stealth_keys[i], commitments[i]);
}
// pseudo_out for generation is point_t (not premultiplied by 1/8).
crypto::public_key po_pk;
memcpy(&po_pk, pseudo_out, 32);
crypto::point_t po_pt(po_pk);
crypto::key_image key_img;
memcpy(&key_img, ki, 32);
crypto::scalar_t sx, sf;
memcpy(sx.m_s, secret_x, 32);
memcpy(sf.m_s, secret_f, 32);
crypto::CLSAG_GG_signature clsag_sig;
bool ok = crypto::generate_CLSAG_GG(h, ring_refs, po_pt, key_img,
sx, sf, secret_index, clsag_sig);
if (!ok) return 1;
// Serialise: c(32) | r[N*32] | K1(32)
uint8_t *p = sig;
memcpy(p, clsag_sig.c.m_s, 32); p += 32;
for (size_t i = 0; i < ring_size; i++) {
memcpy(p, clsag_sig.r[i].m_s, 32); p += 32;
}
memcpy(p, &clsag_sig.K1, 32);
return 0;
}
int cn_clsag_gg_verify(const uint8_t hash[32], const uint8_t *ring,
size_t ring_size, const uint8_t pseudo_out[32],
const uint8_t ki[32], const uint8_t *sig) {
crypto::hash h;
memcpy(&h, hash, 32);
std::vector<crypto::public_key> stealth_keys(ring_size);
std::vector<crypto::public_key> commitments(ring_size);
std::vector<crypto::CLSAG_GG_input_ref_t> ring_refs;
ring_refs.reserve(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(&stealth_keys[i], ring + i * 64, 32);
memcpy(&commitments[i], ring + i * 64 + 32, 32);
ring_refs.emplace_back(stealth_keys[i], commitments[i]);
}
// pseudo_out for verification is public_key (premultiplied by 1/8).
crypto::public_key po_pk;
memcpy(&po_pk, pseudo_out, 32);
crypto::key_image key_img;
memcpy(&key_img, ki, 32);
// Deserialise: c(32) | r[N*32] | K1(32)
crypto::CLSAG_GG_signature clsag_sig;
const uint8_t *p = sig;
memcpy(clsag_sig.c.m_s, p, 32); p += 32;
clsag_sig.r.resize(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(clsag_sig.r[i].m_s, p, 32); p += 32;
}
memcpy(&clsag_sig.K1, p, 32);
return crypto::verify_CLSAG_GG(h, ring_refs, po_pk, key_img, clsag_sig) ? 0 : 1;
}
// Signature layout for GGX: c(32) | r_g[N*32] | r_x[N*32] | K1(32) | K2(32)
size_t cn_clsag_ggx_sig_size(size_t ring_size) {
return 32 + ring_size * 64 + 64;
}
int cn_clsag_ggx_verify(const uint8_t hash[32], const uint8_t *ring,
size_t ring_size, const uint8_t pseudo_out_commitment[32],
const uint8_t pseudo_out_asset_id[32],
const uint8_t ki[32], const uint8_t *sig) {
crypto::hash h;
memcpy(&h, hash, 32);
// Ring entries: [stealth(32) | commitment(32) | blinded_asset_id(32)] per entry.
std::vector<crypto::public_key> stealth_keys(ring_size);
std::vector<crypto::public_key> commitments(ring_size);
std::vector<crypto::public_key> asset_ids(ring_size);
std::vector<crypto::CLSAG_GGX_input_ref_t> ring_refs;
ring_refs.reserve(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(&stealth_keys[i], ring + i * 96, 32);
memcpy(&commitments[i], ring + i * 96 + 32, 32);
memcpy(&asset_ids[i], ring + i * 96 + 64, 32);
ring_refs.emplace_back(stealth_keys[i], commitments[i], asset_ids[i]);
}
crypto::public_key po_commitment, po_asset_id;
memcpy(&po_commitment, pseudo_out_commitment, 32);
memcpy(&po_asset_id, pseudo_out_asset_id, 32);
crypto::key_image key_img;
memcpy(&key_img, ki, 32);
// Deserialise: c(32) | r_g[N*32] | r_x[N*32] | K1(32) | K2(32)
crypto::CLSAG_GGX_signature clsag_sig;
const uint8_t *p = sig;
memcpy(clsag_sig.c.m_s, p, 32); p += 32;
clsag_sig.r_g.resize(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(clsag_sig.r_g[i].m_s, p, 32); p += 32;
}
clsag_sig.r_x.resize(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(clsag_sig.r_x[i].m_s, p, 32); p += 32;
}
memcpy(&clsag_sig.K1, p, 32); p += 32;
memcpy(&clsag_sig.K2, p, 32);
return crypto::verify_CLSAG_GGX(h, ring_refs, po_commitment, po_asset_id, key_img, clsag_sig) ? 0 : 1;
}
// Signature layout for GGXXG: c(32) | r_g[N*32] | r_x[N*32] | K1(32) | K2(32) | K3(32) | K4(32)
size_t cn_clsag_ggxxg_sig_size(size_t ring_size) {
return 32 + ring_size * 64 + 128;
}
int cn_clsag_ggxxg_verify(const uint8_t hash[32], const uint8_t *ring,
size_t ring_size, const uint8_t pseudo_out_commitment[32],
const uint8_t pseudo_out_asset_id[32],
const uint8_t extended_commitment[32],
const uint8_t ki[32], const uint8_t *sig) {
crypto::hash h;
memcpy(&h, hash, 32);
// Ring entries: [stealth(32) | commitment(32) | blinded_asset_id(32) | concealing(32)] per entry.
std::vector<crypto::public_key> stealth_keys(ring_size);
std::vector<crypto::public_key> commitments(ring_size);
std::vector<crypto::public_key> asset_ids(ring_size);
std::vector<crypto::public_key> concealing_pts(ring_size);
std::vector<crypto::CLSAG_GGXXG_input_ref_t> ring_refs;
ring_refs.reserve(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(&stealth_keys[i], ring + i * 128, 32);
memcpy(&commitments[i], ring + i * 128 + 32, 32);
memcpy(&asset_ids[i], ring + i * 128 + 64, 32);
memcpy(&concealing_pts[i], ring + i * 128 + 96, 32);
ring_refs.emplace_back(stealth_keys[i], commitments[i], asset_ids[i], concealing_pts[i]);
}
crypto::public_key po_commitment, po_asset_id, ext_commitment;
memcpy(&po_commitment, pseudo_out_commitment, 32);
memcpy(&po_asset_id, pseudo_out_asset_id, 32);
memcpy(&ext_commitment, extended_commitment, 32);
crypto::key_image key_img;
memcpy(&key_img, ki, 32);
// Deserialise: c(32) | r_g[N*32] | r_x[N*32] | K1(32) | K2(32) | K3(32) | K4(32)
crypto::CLSAG_GGXXG_signature clsag_sig;
const uint8_t *p = sig;
memcpy(clsag_sig.c.m_s, p, 32); p += 32;
clsag_sig.r_g.resize(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(clsag_sig.r_g[i].m_s, p, 32); p += 32;
}
clsag_sig.r_x.resize(ring_size);
for (size_t i = 0; i < ring_size; i++) {
memcpy(clsag_sig.r_x[i].m_s, p, 32); p += 32;
}
memcpy(&clsag_sig.K1, p, 32); p += 32;
memcpy(&clsag_sig.K2, p, 32); p += 32;
memcpy(&clsag_sig.K3, p, 32); p += 32;
memcpy(&clsag_sig.K4, p, 32);
return crypto::verify_CLSAG_GGXXG(h, ring_refs, po_commitment, po_asset_id, ext_commitment, key_img, clsag_sig) ? 0 : 1;
}
// ── Range Proofs (stubs — need on-chain binary format deserialiser) ──
int cn_bppe_verify(const uint8_t * /*proof*/, size_t /*proof_len*/,
const uint8_t * /*commitments*/, size_t /*num_commitments*/) {
return -1; // not implemented
}
int cn_bge_verify(const uint8_t /*context*/[32], const uint8_t * /*ring*/,
size_t /*ring_size*/, const uint8_t * /*proof*/, size_t /*proof_len*/) {
return -1; // not implemented
}
int cn_zarcanum_verify(const uint8_t /*hash*/[32], const uint8_t * /*proof*/,
size_t /*proof_len*/) {
return -1; // not implemented
}
} // extern "C"

View file

@ -47,6 +47,60 @@ int cn_check_ring_signature(const uint8_t hash[32], const uint8_t image[32],
const uint8_t *pubs, size_t pubs_count,
const uint8_t *sigs);
// ── Point Helpers ─────────────────────────────────────────
// Multiply a curve point by the cofactor 8 (for clearing small subgroup component).
int cn_point_mul8(const uint8_t pk[32], uint8_t result[32]);
// Premultiply by 1/8 (cofactor inverse). Stored form on-chain.
int cn_point_div8(const uint8_t pk[32], uint8_t result[32]);
// ── CLSAG Verification (HF4+) ────────────────────────────
// Ring entries are flat arrays of 32-byte public keys per entry:
// GG: [stealth_addr(32) | amount_commitment(32)] per entry = 64 bytes
// GGX: [stealth(32) | commitment(32) | blinded_asset_id(32)] = 96 bytes
// GGXXG: [stealth(32) | commitment(32) | blinded_asset_id(32) | concealing(32)] = 128 bytes
// Signature layout (flat):
// GG: c(32) | r[ring_size*32] | K1(32) = 64 + ring_size*32
// GGX: c(32) | r_g[ring_size*32] | r_x[ring_size*32] | K1(32) | K2(32) = 96 + ring_size*64
// GGXXG: c(32) | r_g[ring_size*32] | r_x[ring_size*32] | K1(32) | K2(32) | K3(32) | K4(32) = 160 + ring_size*64
size_t cn_clsag_gg_sig_size(size_t ring_size);
int cn_clsag_gg_generate(const uint8_t hash[32], const uint8_t *ring,
size_t ring_size, const uint8_t pseudo_out[32],
const uint8_t ki[32], const uint8_t secret_x[32],
const uint8_t secret_f[32], size_t secret_index,
uint8_t *sig);
int cn_clsag_gg_verify(const uint8_t hash[32], const uint8_t *ring,
size_t ring_size, const uint8_t pseudo_out[32],
const uint8_t ki[32], const uint8_t *sig);
size_t cn_clsag_ggx_sig_size(size_t ring_size);
int cn_clsag_ggx_verify(const uint8_t hash[32], const uint8_t *ring,
size_t ring_size, const uint8_t pseudo_out_commitment[32],
const uint8_t pseudo_out_asset_id[32],
const uint8_t ki[32], const uint8_t *sig);
size_t cn_clsag_ggxxg_sig_size(size_t ring_size);
int cn_clsag_ggxxg_verify(const uint8_t hash[32], const uint8_t *ring,
size_t ring_size, const uint8_t pseudo_out_commitment[32],
const uint8_t pseudo_out_asset_id[32],
const uint8_t extended_commitment[32],
const uint8_t ki[32], const uint8_t *sig);
// ── Range Proofs (Bulletproofs+ Enhanced) ─────────────────
// Proof verification requires deserialising variable-length BPPE structs from
// on-chain binary format. Implementation deferred to Phase 4 (needs RPC + chain data).
// Returns 0 on success, 1 on verification failure, -1 if not implemented.
int cn_bppe_verify(const uint8_t *proof, size_t proof_len,
const uint8_t *commitments, size_t num_commitments);
// ── BGE One-out-of-Many ───────────────────────────────────
int cn_bge_verify(const uint8_t context[32], const uint8_t *ring,
size_t ring_size, const uint8_t *proof, size_t proof_len);
// ── Zarcanum PoS ──────────────────────────────────────────
int cn_zarcanum_verify(const uint8_t hash[32], const uint8_t *proof,
size_t proof_len);
#ifdef __cplusplus
}
#endif

134
crypto/clsag.go Normal file
View file

@ -0,0 +1,134 @@
// SPDX-Licence-Identifier: EUPL-1.2
package crypto
/*
#include "bridge.h"
*/
import "C"
import (
"fmt"
"unsafe"
)
// PointMul8 multiplies a curve point by the cofactor 8.
func PointMul8(pk [32]byte) ([32]byte, error) {
var result [32]byte
rc := C.cn_point_mul8(
(*C.uint8_t)(unsafe.Pointer(&pk[0])),
(*C.uint8_t)(unsafe.Pointer(&result[0])),
)
if rc != 0 {
return result, fmt.Errorf("crypto: point_mul8 failed")
}
return result, nil
}
// PointDiv8 premultiplies a curve point by 1/8 (cofactor inverse).
// This is the on-chain storage form for commitments and key images.
func PointDiv8(pk [32]byte) ([32]byte, error) {
var result [32]byte
rc := C.cn_point_div8(
(*C.uint8_t)(unsafe.Pointer(&pk[0])),
(*C.uint8_t)(unsafe.Pointer(&result[0])),
)
if rc != 0 {
return result, fmt.Errorf("crypto: point_div8 failed")
}
return result, nil
}
// CLSAGGGSigSize returns the byte size of a CLSAG_GG signature for a given ring size.
func CLSAGGGSigSize(ringSize int) int {
return int(C.cn_clsag_gg_sig_size(C.size_t(ringSize)))
}
// GenerateCLSAGGG creates a CLSAG_GG ring signature.
// ring is a flat slice of [stealth_addr(32) | amount_commitment(32)] per entry.
// pseudoOut is the pseudo output commitment (not premultiplied by 1/8).
// secretX and secretF are the secret scalars for the signer.
func GenerateCLSAGGG(hash [32]byte, ring []byte, ringSize int,
pseudoOut [32]byte, ki [32]byte,
secretX [32]byte, secretF [32]byte, secretIndex int) ([]byte, error) {
sigLen := CLSAGGGSigSize(ringSize)
sig := make([]byte, sigLen)
rc := C.cn_clsag_gg_generate(
(*C.uint8_t)(unsafe.Pointer(&hash[0])),
(*C.uint8_t)(unsafe.Pointer(&ring[0])),
C.size_t(ringSize),
(*C.uint8_t)(unsafe.Pointer(&pseudoOut[0])),
(*C.uint8_t)(unsafe.Pointer(&ki[0])),
(*C.uint8_t)(unsafe.Pointer(&secretX[0])),
(*C.uint8_t)(unsafe.Pointer(&secretF[0])),
C.size_t(secretIndex),
(*C.uint8_t)(unsafe.Pointer(&sig[0])),
)
if rc != 0 {
return nil, fmt.Errorf("crypto: generate_CLSAG_GG failed")
}
return sig, nil
}
// VerifyCLSAGGG verifies a CLSAG_GG ring signature.
// ring is a flat slice of [stealth_addr(32) | amount_commitment(32)] per entry.
// pseudoOut is the pseudo output commitment (premultiplied by 1/8).
func VerifyCLSAGGG(hash [32]byte, ring []byte, ringSize int,
pseudoOut [32]byte, ki [32]byte, sig []byte) bool {
return C.cn_clsag_gg_verify(
(*C.uint8_t)(unsafe.Pointer(&hash[0])),
(*C.uint8_t)(unsafe.Pointer(&ring[0])),
C.size_t(ringSize),
(*C.uint8_t)(unsafe.Pointer(&pseudoOut[0])),
(*C.uint8_t)(unsafe.Pointer(&ki[0])),
(*C.uint8_t)(unsafe.Pointer(&sig[0])),
) == 0
}
// CLSAGGGXSigSize returns the byte size of a CLSAG_GGX signature for a given ring size.
func CLSAGGGXSigSize(ringSize int) int {
return int(C.cn_clsag_ggx_sig_size(C.size_t(ringSize)))
}
// VerifyCLSAGGGX verifies a CLSAG_GGX ring signature.
// ring is a flat slice of [stealth(32) | commitment(32) | blinded_asset_id(32)] per entry.
func VerifyCLSAGGGX(hash [32]byte, ring []byte, ringSize int,
pseudoOutCommitment [32]byte, pseudoOutAssetID [32]byte,
ki [32]byte, sig []byte) bool {
return C.cn_clsag_ggx_verify(
(*C.uint8_t)(unsafe.Pointer(&hash[0])),
(*C.uint8_t)(unsafe.Pointer(&ring[0])),
C.size_t(ringSize),
(*C.uint8_t)(unsafe.Pointer(&pseudoOutCommitment[0])),
(*C.uint8_t)(unsafe.Pointer(&pseudoOutAssetID[0])),
(*C.uint8_t)(unsafe.Pointer(&ki[0])),
(*C.uint8_t)(unsafe.Pointer(&sig[0])),
) == 0
}
// CLSAGGGXXGSigSize returns the byte size of a CLSAG_GGXXG signature for a given ring size.
func CLSAGGGXXGSigSize(ringSize int) int {
return int(C.cn_clsag_ggxxg_sig_size(C.size_t(ringSize)))
}
// VerifyCLSAGGGXXG verifies a CLSAG_GGXXG ring signature.
// ring is a flat slice of [stealth(32) | commitment(32) | blinded_asset_id(32) | concealing(32)] per entry.
func VerifyCLSAGGGXXG(hash [32]byte, ring []byte, ringSize int,
pseudoOutCommitment [32]byte, pseudoOutAssetID [32]byte,
extendedCommitment [32]byte, ki [32]byte, sig []byte) bool {
return C.cn_clsag_ggxxg_verify(
(*C.uint8_t)(unsafe.Pointer(&hash[0])),
(*C.uint8_t)(unsafe.Pointer(&ring[0])),
C.size_t(ringSize),
(*C.uint8_t)(unsafe.Pointer(&pseudoOutCommitment[0])),
(*C.uint8_t)(unsafe.Pointer(&pseudoOutAssetID[0])),
(*C.uint8_t)(unsafe.Pointer(&extendedCommitment[0])),
(*C.uint8_t)(unsafe.Pointer(&ki[0])),
(*C.uint8_t)(unsafe.Pointer(&sig[0])),
) == 0
}

View file

@ -298,3 +298,131 @@ func TestRingSignature_Bad_WrongMessage(t *testing.T) {
t.Fatal("ring signature verified with wrong message")
}
}
// ── CLSAG ────────────────────────────────────────────────
func TestCLSAG_GG_Good_Roundtrip(t *testing.T) {
// CLSAG_GG is a 2-dimensional linkable ring signature:
// Layer 0: stealth addresses (P_i), secret_x for real signer
// Layer 1: commitment difference (A_i - pseudo_out), secret_f
//
// Ring commitments are stored premultiplied by 1/8 (on-chain form).
// generate takes pseudo_out as the FULL point (not premultiplied).
// verify takes pseudo_out as the PREMULTIPLIED form.
//
// When pseudo_out matches the real commitment: secret_f = 0.
// generate pseudo_out = 8 * ring_commitment (full point).
// verify pseudo_out = ring_commitment (premultiplied form, as stored).
ringSize := 4
realIndex := 2
ring := make([]byte, ringSize*64)
var realStealthSec [32]byte
var secretF [32]byte // zero — pseudo_out matches real commitment
var ki [32]byte
for i := 0; i < ringSize; i++ {
pub, sec, _ := crypto.GenerateKeys()
copy(ring[i*64:], pub[:])
cPub, _, _ := crypto.GenerateKeys()
// Store commitment as-is. CLSAG treats this as premultiplied by 1/8.
copy(ring[i*64+32:], cPub[:])
if i == realIndex {
realStealthSec = sec
var err error
ki, err = crypto.GenerateKeyImage(pub, sec)
if err != nil {
t.Fatalf("GenerateKeyImage: %v", err)
}
}
}
// For generate: pseudo_out = 8 * commitment (full point).
var commitmentPremul [32]byte
copy(commitmentPremul[:], ring[realIndex*64+32:realIndex*64+64])
pseudoOutFull, err := crypto.PointMul8(commitmentPremul)
if err != nil {
t.Fatalf("PointMul8: %v", err)
}
msg := crypto.FastHash([]byte("clsag gg test"))
sig, err := crypto.GenerateCLSAGGG(msg, ring, ringSize, pseudoOutFull, ki,
realStealthSec, secretF, realIndex)
if err != nil {
t.Fatalf("GenerateCLSAGGG: %v", err)
}
expectedSize := crypto.CLSAGGGSigSize(ringSize)
if len(sig) != expectedSize {
t.Fatalf("sig size: got %d, want %d", len(sig), expectedSize)
}
// For verify: pseudo_out = commitment (premultiplied form).
if !crypto.VerifyCLSAGGG(msg, ring, ringSize, commitmentPremul, ki, sig) {
t.Fatal("valid CLSAG_GG signature failed verification")
}
}
func TestCLSAG_GG_Bad_WrongMessage(t *testing.T) {
ringSize := 3
realIndex := 0
ring := make([]byte, ringSize*64)
var realStealthSec [32]byte
var secretF [32]byte
var ki [32]byte
for i := 0; i < ringSize; i++ {
pub, sec, _ := crypto.GenerateKeys()
copy(ring[i*64:], pub[:])
cPub, _, _ := crypto.GenerateKeys()
copy(ring[i*64+32:], cPub[:])
if i == realIndex {
realStealthSec = sec
ki, _ = crypto.GenerateKeyImage(pub, sec)
}
}
var commitmentPremul [32]byte
copy(commitmentPremul[:], ring[realIndex*64+32:realIndex*64+64])
pseudoOutFull, _ := crypto.PointMul8(commitmentPremul)
msg1 := crypto.FastHash([]byte("msg1"))
msg2 := crypto.FastHash([]byte("msg2"))
sig, _ := crypto.GenerateCLSAGGG(msg1, ring, ringSize, pseudoOutFull, ki,
realStealthSec, secretF, realIndex)
if crypto.VerifyCLSAGGG(msg2, ring, ringSize, commitmentPremul, ki, sig) {
t.Fatal("CLSAG_GG verified with wrong message")
}
}
func TestCLSAG_GGX_Good_SigSize(t *testing.T) {
// Verify sig size calculation is consistent.
if crypto.CLSAGGGXSigSize(4) != 32+4*64+64 {
t.Fatalf("GGX sig size for ring=4: got %d, want %d", crypto.CLSAGGGXSigSize(4), 32+4*64+64)
}
}
func TestCLSAG_GGXXG_Good_SigSize(t *testing.T) {
// Verify sig size calculation is consistent.
if crypto.CLSAGGGXXGSigSize(4) != 32+4*64+128 {
t.Fatalf("GGXXG sig size for ring=4: got %d, want %d", crypto.CLSAGGGXXGSigSize(4), 32+4*64+128)
}
}
// ── Range Proofs / Zarcanum (stubs) ──────────────────────
func TestBPPE_Stub_NotImplemented(t *testing.T) {
t.Skip("BPPE verification needs on-chain proof data — Phase 4")
}
func TestBGE_Stub_NotImplemented(t *testing.T) {
t.Skip("BGE verification needs on-chain proof data — Phase 4")
}
func TestZarcanum_Stub_NotImplemented(t *testing.T) {
t.Skip("Zarcanum verification needs on-chain proof data — Phase 4")
}

55
crypto/proof.go Normal file
View file

@ -0,0 +1,55 @@
// SPDX-Licence-Identifier: EUPL-1.2
package crypto
/*
#include "bridge.h"
*/
import "C"
import "unsafe"
// VerifyBPPE verifies a Bulletproofs+ Enhanced range proof.
// Returns true if the proof is valid, false otherwise.
// Currently returns false (not implemented) — needs on-chain binary format
// deserialiser. Full implementation arrives in Phase 4 with RPC + chain data.
func VerifyBPPE(proof []byte, commitments [][32]byte) bool {
n := len(commitments)
flat := make([]byte, n*32)
for i, c := range commitments {
copy(flat[i*32:], c[:])
}
return C.cn_bppe_verify(
(*C.uint8_t)(unsafe.Pointer(&proof[0])),
C.size_t(len(proof)),
(*C.uint8_t)(unsafe.Pointer(&flat[0])),
C.size_t(n),
) == 0
}
// VerifyBGE verifies a BGE one-out-of-many proof.
// Currently returns false (not implemented).
func VerifyBGE(context [32]byte, ring [][32]byte, proof []byte) bool {
n := len(ring)
flat := make([]byte, n*32)
for i, r := range ring {
copy(flat[i*32:], r[:])
}
return C.cn_bge_verify(
(*C.uint8_t)(unsafe.Pointer(&context[0])),
(*C.uint8_t)(unsafe.Pointer(&flat[0])),
C.size_t(n),
(*C.uint8_t)(unsafe.Pointer(&proof[0])),
C.size_t(len(proof)),
) == 0
}
// VerifyZarcanum verifies a Zarcanum PoS proof.
// Currently returns false (not implemented).
func VerifyZarcanum(hash [32]byte, proof []byte) bool {
return C.cn_zarcanum_verify(
(*C.uint8_t)(unsafe.Pointer(&hash[0])),
(*C.uint8_t)(unsafe.Pointer(&proof[0])),
C.size_t(len(proof)),
) == 0
}