forked from Snider/Poindexter
Compare commits
No commits in common. "main" and "gh-pages" have entirely different histories.
151 changed files with 17948 additions and 13748 deletions
178
.github/workflows/ci.yml
vendored
178
.github/workflows/ci.yml
vendored
|
|
@ -1,178 +0,0 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["**"]
|
||||
pull_request:
|
||||
branches: ["**"]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
jobs:
|
||||
build-test-wasm:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.23.x'
|
||||
|
||||
- name: Install extra tools
|
||||
run: |
|
||||
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest
|
||||
go install golang.org/x/vuln/cmd/govulncheck@latest
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Go env
|
||||
run: go env
|
||||
|
||||
- name: CI checks (lint, tests, coverage, etc.)
|
||||
run: make ci
|
||||
|
||||
- name: Benchmarks (linear)
|
||||
run: go test -bench . -benchmem -run=^$ ./... | tee bench-linear.txt
|
||||
|
||||
- name: Coverage summary (linear)
|
||||
run: |
|
||||
if [ -f coverage.out ]; then
|
||||
go tool cover -func=coverage.out > coverage-summary.md;
|
||||
else
|
||||
echo "coverage.out not found" > coverage-summary.md;
|
||||
fi
|
||||
|
||||
- name: Upload coverage summary (linear)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-summary
|
||||
path: coverage-summary.md
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload benchmarks (linear)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bench-linear
|
||||
path: bench-linear.txt
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Build WebAssembly module
|
||||
run: make wasm-build
|
||||
|
||||
- name: Prepare npm package folder
|
||||
run: make npm-pack
|
||||
|
||||
- name: WASM smoke test (Node)
|
||||
run: node npm/poindexter-wasm/smoke.mjs
|
||||
|
||||
- name: Create npm tarball
|
||||
id: npm_pack
|
||||
run: |
|
||||
echo "tarball=$(npm pack ./npm/poindexter-wasm)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload dist (WASM artifacts)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: poindexter-wasm-dist
|
||||
if-no-files-found: error
|
||||
path: |
|
||||
dist/poindexter.wasm
|
||||
dist/wasm_exec.js
|
||||
|
||||
- name: Upload npm package folder
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: npm-poindexter-wasm
|
||||
if-no-files-found: error
|
||||
path: npm/poindexter-wasm/**
|
||||
|
||||
- name: Upload npm tarball
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: npm-poindexter-wasm-tarball
|
||||
if-no-files-found: error
|
||||
path: ${{ steps.npm_pack.outputs.tarball }}
|
||||
|
||||
build-test-gonum:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.23.x'
|
||||
|
||||
- name: Install extra tools
|
||||
run: |
|
||||
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest
|
||||
go install golang.org/x/vuln/cmd/govulncheck@latest
|
||||
|
||||
- name: Go env
|
||||
run: go env
|
||||
|
||||
- name: Lint
|
||||
run: golangci-lint run
|
||||
|
||||
- name: Build (gonum tag)
|
||||
run: go build -tags=gonum ./...
|
||||
|
||||
- name: Unit tests + race + coverage (gonum tag)
|
||||
run: go test -tags=gonum -race -coverpkg=./... -coverprofile=coverage-gonum.out -covermode=atomic ./...
|
||||
|
||||
- name: Fuzz (10s per fuzz test, gonum tag)
|
||||
run: |
|
||||
set -e
|
||||
for pkg in $(go list ./...); do
|
||||
FUZZES=$(go test -tags=gonum -list '^Fuzz' "$pkg" | grep '^Fuzz' || true)
|
||||
if [ -z "$FUZZES" ]; then
|
||||
echo "==> Skipping $pkg (no fuzz targets)"
|
||||
continue
|
||||
fi
|
||||
for fz in $FUZZES; do
|
||||
echo "==> Fuzzing $pkg :: $fz for 10s"
|
||||
go test -tags=gonum -run=NONE -fuzz=^${fz}$ -fuzztime=10s "$pkg"
|
||||
done
|
||||
done
|
||||
|
||||
- name: Benchmarks (gonum tag)
|
||||
run: go test -tags=gonum -bench . -benchmem -run=^$ ./... | tee bench-gonum.txt
|
||||
|
||||
- name: Upload coverage (gonum)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-gonum
|
||||
path: coverage-gonum.out
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Coverage summary (gonum)
|
||||
run: |
|
||||
if [ -f coverage-gonum.out ]; then
|
||||
go tool cover -func=coverage-gonum.out > coverage-summary-gonum.md;
|
||||
else
|
||||
echo "coverage-gonum.out not found" > coverage-summary-gonum.md;
|
||||
fi
|
||||
|
||||
- name: Upload coverage summary (gonum)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage-summary-gonum
|
||||
path: coverage-summary-gonum.md
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload benchmarks (gonum)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bench-gonum
|
||||
path: bench-gonum.txt
|
||||
if-no-files-found: error
|
||||
30
.github/workflows/docs.yml
vendored
30
.github/workflows/docs.yml
vendored
|
|
@ -1,30 +0,0 @@
|
|||
name: Deploy Documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install mkdocs-material
|
||||
|
||||
- name: Deploy to GitHub Pages
|
||||
run: |
|
||||
mkdocs gh-deploy --force
|
||||
44
.github/workflows/release.yml
vendored
44
.github/workflows/release.yml
vendored
|
|
@ -1,44 +0,0 @@
|
|||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Goreleaser publish
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # needed for changelog and tags
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.23.x'
|
||||
cache: true
|
||||
|
||||
- name: Tidy check
|
||||
run: |
|
||||
go mod tidy
|
||||
git diff --exit-code -- go.mod go.sum
|
||||
|
||||
- name: Build
|
||||
run: go build ./...
|
||||
|
||||
- name: Test (race)
|
||||
run: go test -race ./...
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
version: latest
|
||||
args: release --clean --config .goreleaser.yaml
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
33
.github/workflows/test.yml
vendored
33
.github/workflows/test.yml
vendored
|
|
@ -1,33 +0,0 @@
|
|||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: stable
|
||||
|
||||
- name: Run tests
|
||||
run: go test -v -race -coverprofile=coverage.txt -covermode=atomic ./...
|
||||
|
||||
- name: Upload coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: ./coverage.txt
|
||||
fail_ci_if_error: false
|
||||
59
.gitignore
vendored
59
.gitignore
vendored
|
|
@ -1,59 +0,0 @@
|
|||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
bench.txt
|
||||
coverage.html
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
vendor/
|
||||
|
||||
# Go workspace file
|
||||
go.work
|
||||
|
||||
# Build output
|
||||
dist/
|
||||
build/
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# MkDocs
|
||||
site/
|
||||
|
||||
# IDEs
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
version: "2"
|
||||
run:
|
||||
timeout: 5m
|
||||
linters:
|
||||
enable:
|
||||
- govet
|
||||
- staticcheck
|
||||
- ineffassign
|
||||
- misspell
|
||||
- errcheck
|
||||
issues:
|
||||
max-issues-per-linter: 0
|
||||
max-same-issues: 0
|
||||
exclude-rules:
|
||||
- path: _test\.go
|
||||
linters:
|
||||
- errcheck
|
||||
linters-settings:
|
||||
errcheck:
|
||||
# Using default settings; test file exclusions are handled by exclude-rules above.
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
# GoReleaser config for Poindexter (library)
|
||||
# This configuration focuses on generating GitHub Releases with changelog notes.
|
||||
# Since Poindexter is a library (no CLI binaries), we skip building archives.
|
||||
|
||||
project_name: poindexter
|
||||
|
||||
dist: dist
|
||||
|
||||
before:
|
||||
hooks:
|
||||
- go mod tidy
|
||||
|
||||
builds: [] # no binaries to build for this library
|
||||
|
||||
dockers: []
|
||||
|
||||
archives: [] # do not produce tarballs/zip since there are no binaries
|
||||
|
||||
checksum:
|
||||
name_template: "checksums.txt"
|
||||
|
||||
changelog:
|
||||
use: github
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^chore:'
|
||||
- '^test:'
|
||||
- '^ci:'
|
||||
- 'README'
|
||||
|
||||
release:
|
||||
prerelease: false
|
||||
draft: false
|
||||
mode: replace
|
||||
footer: |
|
||||
--
|
||||
Generated by GoReleaser. See CHANGELOG.md for curated notes.
|
||||
|
||||
report_sizes: true
|
||||
|
||||
# Snapshot configuration for non-tag builds (optional local use)
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}-next+{{ .ShortCommit }}"
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
# NOTE: This file is intentionally a mirror of .goreleaser.yaml.
|
||||
# Canonical configuration lives in .goreleaser.yaml; keep both in sync.
|
||||
# CI release workflow explicitly uses .goreleaser.yaml to avoid ambiguity.
|
||||
|
||||
# GoReleaser config for Poindexter (library)
|
||||
# This configuration focuses on generating GitHub Releases with changelog notes.
|
||||
# Since Poindexter is a library (no CLI binaries), we skip building archives.
|
||||
|
||||
project_name: poindexter
|
||||
|
||||
dist: dist
|
||||
|
||||
before:
|
||||
hooks:
|
||||
- go mod tidy
|
||||
|
||||
builds: [] # no binaries to build for this library
|
||||
|
||||
dockers: []
|
||||
|
||||
archives: [] # do not produce tarballs/zip since there are no binaries
|
||||
|
||||
checksum:
|
||||
name_template: "checksums.txt"
|
||||
|
||||
changelog:
|
||||
use: github
|
||||
sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- '^docs:'
|
||||
- '^chore:'
|
||||
- '^test:'
|
||||
- '^ci:'
|
||||
- 'README'
|
||||
|
||||
release:
|
||||
prerelease: false
|
||||
draft: false
|
||||
mode: replace
|
||||
footer: |
|
||||
--
|
||||
Generated by GoReleaser. See CHANGELOG.md for curated notes.
|
||||
|
||||
report_sizes: true
|
||||
|
||||
# Snapshot configuration for non-tag builds (optional local use)
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}-next+{{ .ShortCommit }}"
|
||||
0
.nojekyll
Normal file
0
.nojekyll
Normal file
755
404.html
Normal file
755
404.html
Normal file
|
|
@ -0,0 +1,755 @@
|
|||
|
||||
<!doctype html>
|
||||
<html lang="en" class="no-js">
|
||||
<head>
|
||||
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
|
||||
<meta name="description" content="Poindexter Go Library Documentation">
|
||||
|
||||
|
||||
<meta name="author" content="Snider">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<link rel="icon" href="/assets/images/favicon.png">
|
||||
<meta name="generator" content="mkdocs-1.6.1, mkdocs-material-9.7.1">
|
||||
|
||||
|
||||
|
||||
<title>Poindexter</title>
|
||||
|
||||
|
||||
|
||||
<link rel="stylesheet" href="/assets/stylesheets/main.484c7ddc.min.css">
|
||||
|
||||
|
||||
<link rel="stylesheet" href="/assets/stylesheets/palette.ab4e12ef.min.css">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,300i,400,400i,700,700i%7CRoboto+Mono:400,400i,700,700i&display=fallback">
|
||||
<style>:root{--md-text-font:"Roboto";--md-code-font:"Roboto Mono"}</style>
|
||||
|
||||
|
||||
|
||||
<script>__md_scope=new URL("/",location),__md_hash=e=>[...e].reduce(((e,_)=>(e<<5)-e+_.charCodeAt(0)),0),__md_get=(e,_=localStorage,t=__md_scope)=>JSON.parse(_.getItem(t.pathname+"."+e)),__md_set=(e,_,t=localStorage,a=__md_scope)=>{try{t.setItem(a.pathname+"."+e,JSON.stringify(_))}catch(e){}}</script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<body dir="ltr" data-md-color-scheme="default" data-md-color-primary="indigo" data-md-color-accent="indigo">
|
||||
|
||||
|
||||
<input class="md-toggle" data-md-toggle="drawer" type="checkbox" id="__drawer" autocomplete="off">
|
||||
<input class="md-toggle" data-md-toggle="search" type="checkbox" id="__search" autocomplete="off">
|
||||
<label class="md-overlay" for="__drawer"></label>
|
||||
<div data-md-component="skip">
|
||||
|
||||
</div>
|
||||
<div data-md-component="announce">
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
<header class="md-header" data-md-component="header">
|
||||
<nav class="md-header__inner md-grid" aria-label="Header">
|
||||
<a href="/." title="Poindexter" class="md-header__button md-logo" aria-label="Poindexter" data-md-component="logo">
|
||||
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M12 8a3 3 0 0 0 3-3 3 3 0 0 0-3-3 3 3 0 0 0-3 3 3 3 0 0 0 3 3m0 3.54C9.64 9.35 6.5 8 3 8v11c3.5 0 6.64 1.35 9 3.54 2.36-2.19 5.5-3.54 9-3.54V8c-3.5 0-6.64 1.35-9 3.54"/></svg>
|
||||
|
||||
</a>
|
||||
<label class="md-header__button md-icon" for="__drawer">
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M3 6h18v2H3zm0 5h18v2H3zm0 5h18v2H3z"/></svg>
|
||||
</label>
|
||||
<div class="md-header__title" data-md-component="header-title">
|
||||
<div class="md-header__ellipsis">
|
||||
<div class="md-header__topic">
|
||||
<span class="md-ellipsis">
|
||||
Poindexter
|
||||
</span>
|
||||
</div>
|
||||
<div class="md-header__topic" data-md-component="header-topic">
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<form class="md-header__option" data-md-component="palette">
|
||||
|
||||
|
||||
|
||||
|
||||
<input class="md-option" data-md-color-media="" data-md-color-scheme="default" data-md-color-primary="indigo" data-md-color-accent="indigo" aria-label="Switch to dark mode" type="radio" name="__palette" id="__palette_0">
|
||||
|
||||
<label class="md-header__button md-icon" title="Switch to dark mode" for="__palette_1" hidden>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M12 8a4 4 0 0 0-4 4 4 4 0 0 0 4 4 4 4 0 0 0 4-4 4 4 0 0 0-4-4m0 10a6 6 0 0 1-6-6 6 6 0 0 1 6-6 6 6 0 0 1 6 6 6 6 0 0 1-6 6m8-9.31V4h-4.69L12 .69 8.69 4H4v4.69L.69 12 4 15.31V20h4.69L12 23.31 15.31 20H20v-4.69L23.31 12z"/></svg>
|
||||
</label>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<input class="md-option" data-md-color-media="" data-md-color-scheme="slate" data-md-color-primary="indigo" data-md-color-accent="indigo" aria-label="Switch to light mode" type="radio" name="__palette" id="__palette_1">
|
||||
|
||||
<label class="md-header__button md-icon" title="Switch to light mode" for="__palette_0" hidden>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M12 18c-.89 0-1.74-.2-2.5-.55C11.56 16.5 13 14.42 13 12s-1.44-4.5-3.5-5.45C10.26 6.2 11.11 6 12 6a6 6 0 0 1 6 6 6 6 0 0 1-6 6m8-9.31V4h-4.69L12 .69 8.69 4H4v4.69L.69 12 4 15.31V20h4.69L12 23.31 15.31 20H20v-4.69L23.31 12z"/></svg>
|
||||
</label>
|
||||
|
||||
|
||||
</form>
|
||||
|
||||
|
||||
|
||||
<script>var palette=__md_get("__palette");if(palette&&palette.color){if("(prefers-color-scheme)"===palette.color.media){var media=matchMedia("(prefers-color-scheme: light)"),input=document.querySelector(media.matches?"[data-md-color-media='(prefers-color-scheme: light)']":"[data-md-color-media='(prefers-color-scheme: dark)']");palette.color.media=input.getAttribute("data-md-color-media"),palette.color.scheme=input.getAttribute("data-md-color-scheme"),palette.color.primary=input.getAttribute("data-md-color-primary"),palette.color.accent=input.getAttribute("data-md-color-accent")}for(var[key,value]of Object.entries(palette.color))document.body.setAttribute("data-md-color-"+key,value)}</script>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<label class="md-header__button md-icon" for="__search">
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M9.5 3A6.5 6.5 0 0 1 16 9.5c0 1.61-.59 3.09-1.56 4.23l.27.27h.79l5 5-1.5 1.5-5-5v-.79l-.27-.27A6.52 6.52 0 0 1 9.5 16 6.5 6.5 0 0 1 3 9.5 6.5 6.5 0 0 1 9.5 3m0 2C7 5 5 7 5 9.5S7 14 9.5 14 14 12 14 9.5 12 5 9.5 5"/></svg>
|
||||
</label>
|
||||
<div class="md-search" data-md-component="search" role="dialog">
|
||||
<label class="md-search__overlay" for="__search"></label>
|
||||
<div class="md-search__inner" role="search">
|
||||
<form class="md-search__form" name="search">
|
||||
<input type="text" class="md-search__input" name="query" aria-label="Search" placeholder="Search" autocapitalize="off" autocorrect="off" autocomplete="off" spellcheck="false" data-md-component="search-query" required>
|
||||
<label class="md-search__icon md-icon" for="__search">
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M9.5 3A6.5 6.5 0 0 1 16 9.5c0 1.61-.59 3.09-1.56 4.23l.27.27h.79l5 5-1.5 1.5-5-5v-.79l-.27-.27A6.52 6.52 0 0 1 9.5 16 6.5 6.5 0 0 1 3 9.5 6.5 6.5 0 0 1 9.5 3m0 2C7 5 5 7 5 9.5S7 14 9.5 14 14 12 14 9.5 12 5 9.5 5"/></svg>
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M20 11v2H8l5.5 5.5-1.42 1.42L4.16 12l7.92-7.92L13.5 5.5 8 11z"/></svg>
|
||||
</label>
|
||||
<nav class="md-search__options" aria-label="Search">
|
||||
|
||||
<button type="reset" class="md-search__icon md-icon" title="Clear" aria-label="Clear" tabindex="-1">
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M19 6.41 17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12z"/></svg>
|
||||
</button>
|
||||
</nav>
|
||||
|
||||
<div class="md-search__suggest" data-md-component="search-suggest"></div>
|
||||
|
||||
</form>
|
||||
<div class="md-search__output">
|
||||
<div class="md-search__scrollwrap" tabindex="0" data-md-scrollfix>
|
||||
<div class="md-search-result" data-md-component="search-result">
|
||||
<div class="md-search-result__meta">
|
||||
Initializing search
|
||||
</div>
|
||||
<ol class="md-search-result__list" role="presentation"></ol>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div class="md-header__source">
|
||||
<a href="https://github.com/Snider/Poindexter" title="Go to repository" class="md-source" data-md-component="source">
|
||||
<div class="md-source__icon md-icon">
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><!--! Font Awesome Free 7.1.0 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) Copyright 2025 Fonticons, Inc.--><path d="M439.6 236.1 244 40.5c-5.4-5.5-12.8-8.5-20.4-8.5s-15 3-20.4 8.4L162.5 81l51.5 51.5c27.1-9.1 52.7 16.8 43.4 43.7l49.7 49.7c34.2-11.8 61.2 31 35.5 56.7-26.5 26.5-70.2-2.9-56-37.3L240.3 199v121.9c25.3 12.5 22.3 41.8 9.1 55-6.4 6.4-15.2 10.1-24.3 10.1s-17.8-3.6-24.3-10.1c-17.6-17.6-11.1-46.9 11.2-56v-123c-20.8-8.5-24.6-30.7-18.6-45L142.6 101 8.5 235.1C3 240.6 0 247.9 0 255.5s3 15 8.5 20.4l195.6 195.7c5.4 5.4 12.7 8.4 20.4 8.4s15-3 20.4-8.4l194.7-194.7c5.4-5.4 8.4-12.8 8.4-20.4s-3-15-8.4-20.4"/></svg>
|
||||
</div>
|
||||
<div class="md-source__repository">
|
||||
Snider/Poindexter
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
</nav>
|
||||
|
||||
</header>
|
||||
|
||||
<div class="md-container" data-md-component="container">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<nav class="md-tabs" aria-label="Tabs" data-md-component="tabs">
|
||||
<div class="md-grid">
|
||||
<ul class="md-tabs__list">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-tabs__item">
|
||||
<a href="/." class="md-tabs__link">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Home
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-tabs__item">
|
||||
<a href="/getting-started/" class="md-tabs__link">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Getting Started
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-tabs__item">
|
||||
<a href="/wasm/" class="md-tabs__link">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
WebAssembly (Browser)
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-tabs__item">
|
||||
<a href="/dht-best-ping/" class="md-tabs__link">
|
||||
|
||||
|
||||
|
||||
Examples
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-tabs__item">
|
||||
<a href="/api/" class="md-tabs__link">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
API Reference
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-tabs__item">
|
||||
<a href="/perf/" class="md-tabs__link">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Performance
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-tabs__item">
|
||||
<a href="/license/" class="md-tabs__link">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
License
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
</ul>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
|
||||
|
||||
<main class="md-main" data-md-component="main">
|
||||
<div class="md-main__inner md-grid">
|
||||
|
||||
|
||||
|
||||
<div class="md-sidebar md-sidebar--primary" data-md-component="sidebar" data-md-type="navigation" >
|
||||
<div class="md-sidebar__scrollwrap">
|
||||
<div class="md-sidebar__inner">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<nav class="md-nav md-nav--primary md-nav--lifted" aria-label="Navigation" data-md-level="0">
|
||||
<label class="md-nav__title" for="__drawer">
|
||||
<a href="/." title="Poindexter" class="md-nav__button md-logo" aria-label="Poindexter" data-md-component="logo">
|
||||
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M12 8a3 3 0 0 0 3-3 3 3 0 0 0-3-3 3 3 0 0 0-3 3 3 3 0 0 0 3 3m0 3.54C9.64 9.35 6.5 8 3 8v11c3.5 0 6.64 1.35 9 3.54 2.36-2.19 5.5-3.54 9-3.54V8c-3.5 0-6.64 1.35-9 3.54"/></svg>
|
||||
|
||||
</a>
|
||||
Poindexter
|
||||
</label>
|
||||
|
||||
<div class="md-nav__source">
|
||||
<a href="https://github.com/Snider/Poindexter" title="Go to repository" class="md-source" data-md-component="source">
|
||||
<div class="md-source__icon md-icon">
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><!--! Font Awesome Free 7.1.0 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) Copyright 2025 Fonticons, Inc.--><path d="M439.6 236.1 244 40.5c-5.4-5.5-12.8-8.5-20.4-8.5s-15 3-20.4 8.4L162.5 81l51.5 51.5c27.1-9.1 52.7 16.8 43.4 43.7l49.7 49.7c34.2-11.8 61.2 31 35.5 56.7-26.5 26.5-70.2-2.9-56-37.3L240.3 199v121.9c25.3 12.5 22.3 41.8 9.1 55-6.4 6.4-15.2 10.1-24.3 10.1s-17.8-3.6-24.3-10.1c-17.6-17.6-11.1-46.9 11.2-56v-123c-20.8-8.5-24.6-30.7-18.6-45L142.6 101 8.5 235.1C3 240.6 0 247.9 0 255.5s3 15 8.5 20.4l195.6 195.7c5.4 5.4 12.7 8.4 20.4 8.4s15-3 20.4-8.4l194.7-194.7c5.4-5.4 8.4-12.8 8.4-20.4s-3-15-8.4-20.4"/></svg>
|
||||
</div>
|
||||
<div class="md-source__repository">
|
||||
Snider/Poindexter
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<ul class="md-nav__list" data-md-scrollfix>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item">
|
||||
<a href="/." class="md-nav__link">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
Home
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item">
|
||||
<a href="/getting-started/" class="md-nav__link">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
Getting Started
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item">
|
||||
<a href="/wasm/" class="md-nav__link">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
WebAssembly (Browser)
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item md-nav__item--nested">
|
||||
|
||||
|
||||
|
||||
<input class="md-nav__toggle md-toggle " type="checkbox" id="__nav_4" >
|
||||
|
||||
|
||||
<label class="md-nav__link" for="__nav_4" id="__nav_4_label" tabindex="0">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
Examples
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
<span class="md-nav__icon md-icon"></span>
|
||||
</label>
|
||||
|
||||
<nav class="md-nav" data-md-level="1" aria-labelledby="__nav_4_label" aria-expanded="false">
|
||||
<label class="md-nav__title" for="__nav_4">
|
||||
<span class="md-nav__icon md-icon"></span>
|
||||
|
||||
|
||||
Examples
|
||||
|
||||
|
||||
</label>
|
||||
<ul class="md-nav__list" data-md-scrollfix>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item">
|
||||
<a href="/dht-best-ping/" class="md-nav__link">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
Best Ping Peer (DHT)
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item">
|
||||
<a href="/kdtree-multidimensional/" class="md-nav__link">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
Multi-Dimensional KDTree (DHT)
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
</ul>
|
||||
</nav>
|
||||
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item">
|
||||
<a href="/api/" class="md-nav__link">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
API Reference
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item">
|
||||
<a href="/perf/" class="md-nav__link">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
Performance
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<li class="md-nav__item">
|
||||
<a href="/license/" class="md-nav__link">
|
||||
|
||||
|
||||
|
||||
<span class="md-ellipsis">
|
||||
|
||||
|
||||
License
|
||||
|
||||
|
||||
|
||||
</span>
|
||||
|
||||
|
||||
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div class="md-sidebar md-sidebar--secondary" data-md-component="sidebar" data-md-type="toc" >
|
||||
<div class="md-sidebar__scrollwrap">
|
||||
<div class="md-sidebar__inner">
|
||||
|
||||
|
||||
<nav class="md-nav md-nav--secondary" aria-label="Table of contents">
|
||||
|
||||
|
||||
|
||||
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div class="md-content" data-md-component="content">
|
||||
|
||||
<article class="md-content__inner md-typeset">
|
||||
|
||||
<h1>404 - Not found</h1>
|
||||
|
||||
</article>
|
||||
</div>
|
||||
|
||||
|
||||
<script>var tabs=__md_get("__tabs");if(Array.isArray(tabs))e:for(var set of document.querySelectorAll(".tabbed-set")){var labels=set.querySelector(".tabbed-labels");for(var tab of tabs)for(var label of labels.getElementsByTagName("label"))if(label.innerText.trim()===tab){var input=document.getElementById(label.htmlFor);input.checked=!0;continue e}}</script>
|
||||
|
||||
<script>var target=document.getElementById(location.hash.slice(1));target&&target.name&&(target.checked=target.name.startsWith("__tabbed_"))</script>
|
||||
</div>
|
||||
|
||||
<button type="button" class="md-top md-icon" data-md-component="top" hidden>
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M13 20h-2V8l-5.5 5.5-1.42-1.42L12 4.16l7.92 7.92-1.42 1.42L13 8z"/></svg>
|
||||
Back to top
|
||||
</button>
|
||||
|
||||
</main>
|
||||
|
||||
<footer class="md-footer">
|
||||
|
||||
<div class="md-footer-meta md-typeset">
|
||||
<div class="md-footer-meta__inner md-grid">
|
||||
<div class="md-copyright">
|
||||
|
||||
<div class="md-copyright__highlight">
|
||||
Copyright © 2025 Snider
|
||||
</div>
|
||||
|
||||
|
||||
Made with
|
||||
<a href="https://squidfunk.github.io/mkdocs-material/" target="_blank" rel="noopener">
|
||||
Material for MkDocs
|
||||
</a>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
|
||||
</div>
|
||||
<div class="md-dialog" data-md-component="dialog">
|
||||
<div class="md-dialog__inner md-typeset"></div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<script id="__config" type="application/json">{"annotate": null, "base": "/", "features": ["navigation.tabs", "navigation.sections", "navigation.top", "search.suggest", "search.highlight", "content.tabs.link", "content.code.annotation", "content.code.copy"], "search": "/assets/javascripts/workers/search.2c215733.min.js", "tags": null, "translations": {"clipboard.copied": "Copied to clipboard", "clipboard.copy": "Copy to clipboard", "search.result.more.one": "1 more on this page", "search.result.more.other": "# more on this page", "search.result.none": "No matching documents", "search.result.one": "1 matching document", "search.result.other": "# matching documents", "search.result.placeholder": "Type to start searching", "search.result.term.missing": "Missing", "select.version": "Select version"}, "version": null}</script>
|
||||
|
||||
|
||||
<script src="/assets/javascripts/bundle.79ae519e.min.js"></script>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
66
CHANGELOG.md
66
CHANGELOG.md
|
|
@ -1,66 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on Keep a Changelog and this project adheres to Semantic Versioning.
|
||||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- Dual-backend benchmarks (Linear vs Gonum) with deterministic datasets (uniform/clustered) in 2D/4D for N=1k/10k; artifacts uploaded in CI as `bench-linear.txt` and `bench-gonum.txt`.
|
||||
- Documentation: Performance guide updated to cover backend selection, how to run both backends, CI artifact links, and guidance on when each backend is preferred.
|
||||
- Documentation: Performance guide now includes a Sample results table sourced from a recent local run.
|
||||
- Documentation: README gained a “Backend selection” section with default behavior, build tag usage, overrides, and supported metrics notes.
|
||||
- Documentation: API reference (`docs/api.md`) now documents `KDBackend`, `WithBackend`, default selection, and supported metrics for the optimized backend.
|
||||
- Examples: Added `examples/wasm-browser/` minimal browser demo (ESM + HTML) for the WASM build.
|
||||
- pkg.go.dev Examples: `ExampleNewKDTreeFromDim_Insert`, `ExampleKDTree_TiesBehavior`, `ExampleKDTree_Radius_none`.
|
||||
- Lint: enable `errcheck` in `.golangci.yml` with test-file exclusion to reduce noise.
|
||||
- CI: enable module cache in `actions/setup-go` to speed up workflows.
|
||||
|
||||
### Fixed
|
||||
- go vet failures in examples due to misnamed `Example*` functions; renamed to avoid referencing non-existent methods and identifiers.
|
||||
- Stabilized `ExampleKDTree_Nearest` to avoid a tie case; adjusted query and expected output.
|
||||
- Relaxed floating-point equality in `TestWeightedCosineDistance_Basics` to use an epsilon, avoiding spurious failures on some toolchains.
|
||||
|
||||
## [0.3.0] - 2025-11-03
|
||||
### Added
|
||||
- New distance metrics: `CosineDistance` and `WeightedCosineDistance` (1 - cosine similarity), with robust zero-vector handling and bounds.
|
||||
- N-D normalization helpers: `ComputeNormStatsND`, `BuildND`, `BuildNDWithStats` for arbitrary dimensions, with validation errors (`ErrInvalidFeatures`, `ErrInvalidWeights`, `ErrInvalidInvert`, `ErrStatsDimMismatch`).
|
||||
- Tests: unit tests for cosine/weighted-cosine metrics; parity tests between `Build4D` and `BuildND`; error-path tests; extended fuzz to include cosine metrics.
|
||||
- pkg.go.dev examples: `ExampleBuildND`, `ExampleBuildNDWithStats`, `ExampleCosineDistance`.
|
||||
|
||||
### Changed
|
||||
- Version bumped to `0.3.0`.
|
||||
- README: list Cosine among supported metrics.
|
||||
|
||||
## [0.2.1] - 2025-11-03
|
||||
### Added
|
||||
- Normalization stats helpers: `AxisStats`, `NormStats`, `ComputeNormStats2D/3D/4D`.
|
||||
- Builders that reuse stats: `Build2DWithStats`, `Build3DWithStats`, `Build4DWithStats`.
|
||||
- CI: coverage integration (`-coverprofile`), Codecov upload and badge.
|
||||
- CI: benchmark runs publish artifacts per Go version.
|
||||
- Docs: Performance page (`docs/perf.md`) and MkDocs nav entry.
|
||||
- pkg.go.dev examples: `ExampleBuild2DWithStats`, `ExampleBuild4DWithStats`.
|
||||
- Tests for stats parity, min==max safety, and dynamic update with reused stats.
|
||||
- Docs: API reference section “KDTree Normalization Stats (reuse across updates)”; updated multi-dimensional docs with WithStats snippet.
|
||||
|
||||
### Changed
|
||||
- Bumped version to `0.2.1`.
|
||||
|
||||
### Previously added in Unreleased
|
||||
- README badges (pkg.go.dev, CI, Go Report Card, govulncheck) and KDTree performance/concurrency notes.
|
||||
- Examples directory with runnable programs: 1D ping, 2D ping+hop, 3D ping+hop+geo, 4D ping+hop+geo+score.
|
||||
- CI workflow (Go 1.22/1.23): tidy check, build, vet, test -race, build examples, govulncheck, golangci-lint.
|
||||
- Lint configuration (.golangci.yml) with a pragmatic ruleset.
|
||||
- Contributor docs: CONTRIBUTING.md, CODE_OF_CONDUCT.md, SECURITY.md.
|
||||
- pkg.go.dev example functions for KDTree usage and helpers.
|
||||
- Fuzz tests and benchmarks for KDTree (Nearest/KNearest/Radius and metrics).
|
||||
|
||||
## [0.2.0] - 2025-10-??
|
||||
### Added
|
||||
- KDTree public API with generic payloads and helper builders (Build2D/3D/4D).
|
||||
- Docs pages for DHT examples and multi-dimensional KDTree usage.
|
||||
|
||||
[Unreleased]: https://github.com/Snider/Poindexter/compare/v0.3.0...HEAD
|
||||
[0.3.0]: https://github.com/Snider/Poindexter/releases/tag/v0.3.0
|
||||
[0.2.1]: https://github.com/Snider/Poindexter/releases/tag/v0.2.1
|
||||
[0.2.0]: https://github.com/Snider/Poindexter/releases/tag/v0.2.0
|
||||
166
CLAUDE.md
166
CLAUDE.md
|
|
@ -1,166 +0,0 @@
|
|||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Poindexter is a Go library providing:
|
||||
- **Sorting utilities** with custom comparators (ints, strings, floats, generic types)
|
||||
- **KDTree** for nearest-neighbor search with multiple distance metrics (Euclidean, Manhattan, Chebyshev, Cosine)
|
||||
- **Helper functions** for building normalized/weighted KD points (2D/3D/4D/ND)
|
||||
- **DNS/RDAP tools** for domain, IP, and ASN lookups
|
||||
- **Analytics** for tree operations and peer selection tracking
|
||||
|
||||
## Build Commands
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
make test
|
||||
|
||||
# Run tests with race detector
|
||||
make race
|
||||
|
||||
# Run tests with coverage (writes coverage.out)
|
||||
make cover
|
||||
|
||||
# Run a single test
|
||||
go test -run TestFunctionName ./...
|
||||
|
||||
# Build all packages
|
||||
make build
|
||||
|
||||
# Build with gonum optimized backend
|
||||
go build -tags=gonum ./...
|
||||
|
||||
# Run tests with gonum backend
|
||||
go test -tags=gonum ./...
|
||||
|
||||
# Build WebAssembly module
|
||||
make wasm-build
|
||||
|
||||
# Run benchmarks (writes bench.txt)
|
||||
make bench
|
||||
|
||||
# Run benchmarks for specific backend
|
||||
make bench-linear # Linear backend
|
||||
make bench-gonum # Gonum backend (requires -tags=gonum)
|
||||
|
||||
# Fuzz testing (default 10s)
|
||||
make fuzz
|
||||
make fuzz FUZZTIME=30s
|
||||
|
||||
# Lint and static analysis
|
||||
make lint # requires golangci-lint
|
||||
make vuln # requires govulncheck
|
||||
|
||||
# CI-parity local run
|
||||
make ci
|
||||
|
||||
# Format code
|
||||
make fmt
|
||||
|
||||
# Tidy modules
|
||||
make tidy
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
**kdtree.go** - Main KDTree implementation:
|
||||
- `KDTree[T]` generic struct with payload type T
|
||||
- `KDPoint[T]` represents points with ID, Coords, and Value
|
||||
- Query methods: `Nearest()`, `KNearest()`, `Radius()`
|
||||
- Mutation methods: `Insert()`, `DeleteByID()`
|
||||
- Distance metrics implement `DistanceMetric` interface
|
||||
|
||||
**Dual Backend System:**
|
||||
- `kdtree_gonum.go` (build tag `gonum`) - Optimized median-split KD-tree with branch-and-bound pruning
|
||||
- `kdtree_gonum_stub.go` (default) - Stubs when gonum tag not set
|
||||
- Linear backend is always available; gonum backend is default when tag is enabled
|
||||
- Backend selected via `WithBackend()` option or defaults based on build tags
|
||||
|
||||
**kdtree_helpers.go** - Point construction utilities:
|
||||
- `BuildND()`, `Build2D()`, `Build3D()`, `Build4D()` - Build normalized/weighted KD points
|
||||
- `ComputeNormStatsND()` - Compute per-axis min/max for normalization
|
||||
- Supports axis inversion and per-axis weights
|
||||
|
||||
**kdtree_analytics.go** - Operational metrics:
|
||||
- `TreeAnalytics` - Query/insert/delete counts, timing stats
|
||||
- `PeerAnalytics` - Per-peer selection tracking for DHT/NAT routing
|
||||
- Distribution statistics and NAT routing metrics
|
||||
|
||||
**sort.go** - Sorting utilities:
|
||||
- Type-specific sorts: `SortInts()`, `SortStrings()`, `SortFloat64s()`
|
||||
- Generic sorts: `SortBy()`, `SortByKey()`
|
||||
- Binary search: `BinarySearch()`, `BinarySearchStrings()`
|
||||
|
||||
**dns_tools.go** - DNS and RDAP lookup utilities:
|
||||
- DNS record types and lookup functions
|
||||
- RDAP (modern WHOIS) for domains, IPs, ASNs
|
||||
- External tool link generators
|
||||
|
||||
### Examples Directory
|
||||
|
||||
Runnable examples demonstrating KDTree usage patterns:
|
||||
- `examples/dht_ping_1d/` - 1D DHT with ping latency
|
||||
- `examples/kdtree_2d_ping_hop/` - 2D with ping + hop count
|
||||
- `examples/kdtree_3d_ping_hop_geo/` - 3D with geographic distance
|
||||
- `examples/kdtree_4d_ping_hop_geo_score/` - 4D with trust scores
|
||||
- `examples/dht_helpers/` - Convenience wrappers for common DHT schemas
|
||||
- `examples/wasm-browser-ts/` - TypeScript + Vite browser demo
|
||||
|
||||
### WebAssembly
|
||||
|
||||
WASM module in `wasm/main.go` exposes KDTree functionality to JavaScript. Build outputs to `dist/`.
|
||||
|
||||
## Key Patterns
|
||||
|
||||
### KDTree Construction
|
||||
```go
|
||||
pts := []poindexter.KDPoint[string]{
|
||||
{ID: "A", Coords: []float64{0, 0}, Value: "alpha"},
|
||||
}
|
||||
tree, err := poindexter.NewKDTree(pts,
|
||||
poindexter.WithMetric(poindexter.EuclideanDistance{}),
|
||||
poindexter.WithBackend(poindexter.BackendGonum))
|
||||
```
|
||||
|
||||
### Building Normalized Points
|
||||
```go
|
||||
pts, err := poindexter.BuildND(items,
|
||||
func(p Peer) string { return p.ID }, // ID extractor
|
||||
[]func(Peer) float64{getPing, getHops}, // Feature extractors
|
||||
[]float64{1.0, 0.5}, // Per-axis weights
|
||||
[]bool{false, false}) // Inversion flags
|
||||
```
|
||||
|
||||
### Distance Metrics
|
||||
- `EuclideanDistance{}` - L2 norm (default)
|
||||
- `ManhattanDistance{}` - L1 norm
|
||||
- `ChebyshevDistance{}` - L-infinity (max) norm
|
||||
- `CosineDistance{}` - 1 - cosine similarity
|
||||
- `WeightedCosineDistance{Weights: []float64{...}}` - Weighted cosine
|
||||
|
||||
Note: Cosine metrics use linear backend only; L2/L1/L-infinity work with gonum backend.
|
||||
|
||||
## Testing
|
||||
|
||||
Tests are organized by component:
|
||||
- `kdtree_test.go`, `kdtree_nd_test.go` - Core KDTree tests
|
||||
- `kdtree_gonum_test.go` - Gonum backend specific tests
|
||||
- `kdtree_backend_parity_test.go` - Backend equivalence tests
|
||||
- `fuzz_kdtree_test.go` - Fuzz targets
|
||||
- `bench_*.go` - Benchmark suites
|
||||
|
||||
Coverage targets:
|
||||
```bash
|
||||
make cover # Summary
|
||||
make coverfunc # Per-function breakdown
|
||||
make cover-kdtree # kdtree.go only
|
||||
make coverhtml # HTML report
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
EUPL-1.2 (European Union Public Licence v1.2)
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
# Code of Conduct
|
||||
|
||||
This project has adopted the Contributor Covenant Code of Conduct.
|
||||
|
||||
- Version: [Contributor Covenant v2.1](https://www.contributor-covenant.org/version/2/1/code_of_conduct/)
|
||||
- FAQ: [Contributor Covenant FAQ](https://www.contributor-covenant.org/faq)
|
||||
- Translations: [Available Translations](https://www.contributor-covenant.org/translations)
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the maintainers via GitHub issues or by email listed on the repository profile. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
# Contributing to Poindexter
|
||||
|
||||
Thanks for your interest in contributing! This document describes how to build, test, lint, and propose changes.
|
||||
|
||||
## Getting started
|
||||
|
||||
- Go 1.23+
|
||||
- `git clone https://github.com/Snider/Poindexter`
|
||||
- `cd Poindexter`
|
||||
|
||||
## Build and test
|
||||
|
||||
- Tidy deps: `go mod tidy`
|
||||
- Build: `go build ./...`
|
||||
- Run tests: `go test ./...`
|
||||
- Run race tests: `go test -race ./...`
|
||||
- Run examples: `go run ./examples/...`
|
||||
|
||||
## Lint and vet
|
||||
|
||||
We use golangci-lint in CI. To run locally:
|
||||
|
||||
```
|
||||
# Install once
|
||||
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin
|
||||
|
||||
# Run
|
||||
golangci-lint run
|
||||
```
|
||||
|
||||
Also run `go vet ./...` periodically.
|
||||
|
||||
## Fuzzing and benchmarks
|
||||
|
||||
- Fuzz (manually): `go test -run=NONE -fuzz=Fuzz -fuzztime=10s`
|
||||
- Benchmarks: `go test -bench=. -benchmem`
|
||||
|
||||
## Pull requests
|
||||
|
||||
- Create a branch from `main`.
|
||||
- Ensure `go mod tidy` produces no changes.
|
||||
- Ensure `go test -race ./...` passes.
|
||||
- Ensure `golangci-lint run` has no issues.
|
||||
- Update CHANGELOG.md (Unreleased section) with a brief summary.
|
||||
|
||||
## Coding style
|
||||
|
||||
- Follow standard Go formatting and idioms.
|
||||
- Public APIs must have doc comments starting with the identifier name and should be concise.
|
||||
- Avoid breaking changes in minor versions; use SemVer.
|
||||
|
||||
## Release process
|
||||
|
||||
We use GoReleaser to publish GitHub Releases when a semver tag is pushed.
|
||||
|
||||
Steps for maintainers:
|
||||
- Ensure `CHANGELOG.md` has an entry for the version and links are updated at the bottom (Unreleased compares from latest tag).
|
||||
- Ensure `poindexter.Version()` returns the new version and tests pass.
|
||||
- Merge all changes to `main` and wait for CI to be green.
|
||||
- Create an annotated tag and push:
|
||||
|
||||
```bash
|
||||
VERSION=vX.Y.Z
|
||||
git tag -a "$VERSION" -m "Release $VERSION"
|
||||
git push origin "$VERSION"
|
||||
```
|
||||
- GitHub Actions workflow `.github/workflows/release.yml` will run tests and GoReleaser to publish the Release.
|
||||
- Verify the release notes and badges (README release badge updates automatically).
|
||||
|
||||
Optional:
|
||||
- Dry-run locally without publishing:
|
||||
|
||||
```bash
|
||||
goreleaser release --skip=publish --clean
|
||||
```
|
||||
|
||||
See `RELEASE.md` for more details.
|
||||
|
||||
Thanks for helping improve Poindexter!
|
||||
190
LICENSE
190
LICENSE
|
|
@ -1,190 +0,0 @@
|
|||
EUROPEAN UNION PUBLIC LICENCE v. 1.2
|
||||
EUPL © the European Union 2007, 2016
|
||||
|
||||
This European Union Public Licence (the ‘EUPL’) applies to the Work (as defined below) which is provided under the
|
||||
terms of this Licence. Any use of the Work, other than as authorised under this Licence is prohibited (to the extent such
|
||||
use is covered by a right of the copyright holder of the Work).
|
||||
The Work is provided under the terms of this Licence when the Licensor (as defined below) has placed the following
|
||||
notice immediately following the copyright notice for the Work:
|
||||
Licensed under the EUPL
|
||||
or has expressed by any other means his willingness to license under the EUPL.
|
||||
|
||||
1.Definitions
|
||||
In this Licence, the following terms have the following meaning:
|
||||
— ‘The Licence’:this Licence.
|
||||
— ‘The Original Work’:the work or software distributed or communicated by the Licensor under this Licence, available
|
||||
as Source Code and also as Executable Code as the case may be.
|
||||
— ‘Derivative Works’:the works or software that could be created by the Licensee, based upon the Original Work or
|
||||
modifications thereof. This Licence does not define the extent of modification or dependence on the Original Work
|
||||
required in order to classify a work as a Derivative Work; this extent is determined by copyright law applicable in
|
||||
the country mentioned in Article 15.
|
||||
— ‘The Work’:the Original Work or its Derivative Works.
|
||||
— ‘The Source Code’:the human-readable form of the Work which is the most convenient for people to study and
|
||||
modify.
|
||||
— ‘The Executable Code’:any code which has generally been compiled and which is meant to be interpreted by
|
||||
a computer as a program.
|
||||
— ‘The Licensor’:the natural or legal person that distributes or communicates the Work under the Licence.
|
||||
— ‘Contributor(s)’:any natural or legal person who modifies the Work under the Licence, or otherwise contributes to
|
||||
the creation of a Derivative Work.
|
||||
— ‘The Licensee’ or ‘You’:any natural or legal person who makes any usage of the Work under the terms of the
|
||||
Licence.
|
||||
— ‘Distribution’ or ‘Communication’:any act of selling, giving, lending, renting, distributing, communicating,
|
||||
transmitting, or otherwise making available, online or offline, copies of the Work or providing access to its essential
|
||||
functionalities at the disposal of any other natural or legal person.
|
||||
|
||||
2.Scope of the rights granted by the Licence
|
||||
The Licensor hereby grants You a worldwide, royalty-free, non-exclusive, sublicensable licence to do the following, for
|
||||
the duration of copyright vested in the Original Work:
|
||||
— use the Work in any circumstance and for all usage,
|
||||
— reproduce the Work,
|
||||
— modify the Work, and make Derivative Works based upon the Work,
|
||||
— communicate to the public, including the right to make available or display the Work or copies thereof to the public
|
||||
and perform publicly, as the case may be, the Work,
|
||||
— distribute the Work or copies thereof,
|
||||
— lend and rent the Work or copies thereof,
|
||||
— sublicense rights in the Work or copies thereof.
|
||||
Those rights can be exercised on any media, supports and formats, whether now known or later invented, as far as the
|
||||
applicable law permits so.
|
||||
In the countries where moral rights apply, the Licensor waives his right to exercise his moral right to the extent allowed
|
||||
by law in order to make effective the licence of the economic rights here above listed.
|
||||
The Licensor grants to the Licensee royalty-free, non-exclusive usage rights to any patents held by the Licensor, to the
|
||||
extent necessary to make use of the rights granted on the Work under this Licence.
|
||||
|
||||
3.Communication of the Source Code
|
||||
The Licensor may provide the Work either in its Source Code form, or as Executable Code. If the Work is provided as
|
||||
Executable Code, the Licensor provides in addition a machine-readable copy of the Source Code of the Work along with
|
||||
each copy of the Work that the Licensor distributes or indicates, in a notice following the copyright notice attached to
|
||||
the Work, a repository where the Source Code is easily and freely accessible for as long as the Licensor continues to
|
||||
distribute or communicate the Work.
|
||||
|
||||
4.Limitations on copyright
|
||||
Nothing in this Licence is intended to deprive the Licensee of the benefits from any exception or limitation to the
|
||||
exclusive rights of the rights owners in the Work, of the exhaustion of those rights or of other applicable limitations
|
||||
thereto.
|
||||
|
||||
5.Obligations of the Licensee
|
||||
The grant of the rights mentioned above is subject to some restrictions and obligations imposed on the Licensee. Those
|
||||
obligations are the following:
|
||||
|
||||
Attribution right: The Licensee shall keep intact all copyright, patent or trademarks notices and all notices that refer to
|
||||
the Licence and to the disclaimer of warranties. The Licensee must include a copy of such notices and a copy of the
|
||||
Licence with every copy of the Work he/she distributes or communicates. The Licensee must cause any Derivative Work
|
||||
to carry prominent notices stating that the Work has been modified and the date of modification.
|
||||
|
||||
Copyleft clause: If the Licensee distributes or communicates copies of the Original Works or Derivative Works, this
|
||||
Distribution or Communication will be done under the terms of this Licence or of a later version of this Licence unless
|
||||
the Original Work is expressly distributed only under this version of the Licence — for example by communicating
|
||||
‘EUPL v. 1.2 only’. The Licensee (becoming Licensor) cannot offer or impose any additional terms or conditions on the
|
||||
Work or Derivative Work that alter or restrict the terms of the Licence.
|
||||
|
||||
Compatibility clause: If the Licensee Distributes or Communicates Derivative Works or copies thereof based upon both
|
||||
the Work and another work licensed under a Compatible Licence, this Distribution or Communication can be done
|
||||
under the terms of this Compatible Licence. For the sake of this clause, ‘Compatible Licence’ refers to the licences listed
|
||||
in the appendix attached to this Licence. Should the Licensee's obligations under the Compatible Licence conflict with
|
||||
his/her obligations under this Licence, the obligations of the Compatible Licence shall prevail.
|
||||
|
||||
Provision of Source Code: When distributing or communicating copies of the Work, the Licensee will provide
|
||||
a machine-readable copy of the Source Code or indicate a repository where this Source will be easily and freely available
|
||||
for as long as the Licensee continues to distribute or communicate the Work.
|
||||
Legal Protection: This Licence does not grant permission to use the trade names, trademarks, service marks, or names
|
||||
of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the copyright notice.
|
||||
|
||||
6.Chain of Authorship
|
||||
The original Licensor warrants that the copyright in the Original Work granted hereunder is owned by him/her or
|
||||
licensed to him/her and that he/she has the power and authority to grant the Licence.
|
||||
Each Contributor warrants that the copyright in the modifications he/she brings to the Work are owned by him/her or
|
||||
licensed to him/her and that he/she has the power and authority to grant the Licence.
|
||||
Each time You accept the Licence, the original Licensor and subsequent Contributors grant You a licence to their contributions
|
||||
to the Work, under the terms of this Licence.
|
||||
|
||||
7.Disclaimer of Warranty
|
||||
The Work is a work in progress, which is continuously improved by numerous Contributors. It is not a finished work
|
||||
and may therefore contain defects or ‘bugs’ inherent to this type of development.
|
||||
For the above reason, the Work is provided under the Licence on an ‘as is’ basis and without warranties of any kind
|
||||
concerning the Work, including without limitation merchantability, fitness for a particular purpose, absence of defects or
|
||||
errors, accuracy, non-infringement of intellectual property rights other than copyright as stated in Article 6 of this
|
||||
Licence.
|
||||
This disclaimer of warranty is an essential part of the Licence and a condition for the grant of any rights to the Work.
|
||||
|
||||
8.Disclaimer of Liability
|
||||
Except in the cases of wilful misconduct or damages directly caused to natural persons, the Licensor will in no event be
|
||||
liable for any direct or indirect, material or moral, damages of any kind, arising out of the Licence or of the use of the
|
||||
Work, including without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, loss
|
||||
of data or any commercial damage, even if the Licensor has been advised of the possibility of such damage. However,
|
||||
the Licensor will be liable under statutory product liability laws as far such laws apply to the Work.
|
||||
|
||||
9.Additional agreements
|
||||
While distributing the Work, You may choose to conclude an additional agreement, defining obligations or services
|
||||
consistent with this Licence. However, if accepting obligations, You may act only on your own behalf and on your sole
|
||||
responsibility, not on behalf of the original Licensor or any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against such Contributor by
|
||||
the fact You have accepted any warranty or additional liability.
|
||||
|
||||
10.Acceptance of the Licence
|
||||
The provisions of this Licence can be accepted by clicking on an icon ‘I agree’ placed under the bottom of a window
|
||||
displaying the text of this Licence or by affirming consent in any other similar way, in accordance with the rules of
|
||||
applicable law. Clicking on that icon indicates your clear and irrevocable acceptance of this Licence and all of its terms
|
||||
and conditions.
|
||||
Similarly, you irrevocably accept this Licence and all of its terms and conditions by exercising any rights granted to You
|
||||
by Article 2 of this Licence, such as the use of the Work, the creation by You of a Derivative Work or the Distribution
|
||||
or Communication by You of the Work or copies thereof.
|
||||
|
||||
11.Information to the public
|
||||
In case of any Distribution or Communication of the Work by means of electronic communication by You (for example,
|
||||
by offering to download the Work from a remote location) the distribution channel or media (for example, a website)
|
||||
must at least provide to the public the information requested by the applicable law regarding the Licensor, the Licence
|
||||
and the way it may be accessible, concluded, stored and reproduced by the Licensee.
|
||||
|
||||
12.Termination of the Licence
|
||||
The Licence and the rights granted hereunder will terminate automatically upon any breach by the Licensee of the terms
|
||||
of the Licence.
|
||||
Such a termination will not terminate the licences of any person who has received the Work from the Licensee under
|
||||
the Licence, provided such persons remain in full compliance with the Licence.
|
||||
|
||||
13.Miscellaneous
|
||||
Without prejudice of Article 9 above, the Licence represents the complete agreement between the Parties as to the
|
||||
Work.
|
||||
If any provision of the Licence is invalid or unenforceable under applicable law, this will not affect the validity or
|
||||
enforceability of the Licence as a whole. Such provision will be construed or reformed so as necessary to make it valid
|
||||
and enforceable.
|
||||
The European Commission may publish other linguistic versions or new versions of this Licence or updated versions of
|
||||
the Appendix, so far this is required and reasonable, without reducing the scope of the rights granted by the Licence.
|
||||
New versions of the Licence will be published with a unique version number.
|
||||
All linguistic versions of this Licence, approved by the European Commission, have identical value. Parties can take
|
||||
advantage of the linguistic version of their choice.
|
||||
|
||||
14.Jurisdiction
|
||||
Without prejudice to specific agreement between parties,
|
||||
— any litigation resulting from the interpretation of this License, arising between the European Union institutions,
|
||||
bodies, offices or agencies, as a Licensor, and any Licensee, will be subject to the jurisdiction of the Court of Justice
|
||||
of the European Union, as laid down in article 272 of the Treaty on the Functioning of the European Union,
|
||||
— any litigation arising between other parties and resulting from the interpretation of this License, will be subject to
|
||||
the exclusive jurisdiction of the competent court where the Licensor resides or conducts its primary business.
|
||||
|
||||
15.Applicable Law
|
||||
Without prejudice to specific agreement between parties,
|
||||
— this Licence shall be governed by the law of the European Union Member State where the Licensor has his seat,
|
||||
resides or has his registered office,
|
||||
— this licence shall be governed by Belgian law if the Licensor has no seat, residence or registered office inside
|
||||
a European Union Member State.
|
||||
|
||||
|
||||
Appendix
|
||||
|
||||
‘Compatible Licences’ according to Article 5 EUPL are:
|
||||
— GNU General Public License (GPL) v. 2, v. 3
|
||||
— GNU Affero General Public License (AGPL) v. 3
|
||||
— Open Software License (OSL) v. 2.1, v. 3.0
|
||||
— Eclipse Public License (EPL) v. 1.0
|
||||
— CeCILL v. 2.0, v. 2.1
|
||||
— Mozilla Public Licence (MPL) v. 2
|
||||
— GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3
|
||||
— Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) for works other than software
|
||||
— European Union Public Licence (EUPL) v. 1.1, v. 1.2
|
||||
— Québec Free and Open-Source Licence — Reciprocity (LiLiQ-R) or Strong Reciprocity (LiLiQ-R+).
|
||||
|
||||
The European Commission may update this Appendix to later versions of the above licences without producing
|
||||
a new version of the EUPL, as long as they provide the rights granted in Article 2 of this Licence and protect the
|
||||
covered Source Code from exclusive appropriation.
|
||||
All other changes or additions to this Appendix require the production of a new EUPL version.
|
||||
171
Makefile
171
Makefile
|
|
@ -1,171 +0,0 @@
|
|||
# Maintainer Makefile for Poindexter
|
||||
# Usage: `make <target>`
|
||||
# Many targets are CI-parity helpers for local use.
|
||||
|
||||
# Tools (override with env if needed)
|
||||
GO ?= go
|
||||
GOLANGCI_LINT?= golangci-lint
|
||||
GORELEASER ?= goreleaser
|
||||
MKDOCS ?= mkdocs
|
||||
|
||||
# Params
|
||||
FUZZTIME ?= 10s
|
||||
BENCHOUT ?= bench.txt
|
||||
COVEROUT ?= coverage.out
|
||||
COVERHTML?= coverage.html
|
||||
|
||||
.PHONY: help all
|
||||
all: help
|
||||
help: ## List available targets
|
||||
@awk 'BEGIN {FS = ":.*##"}; /^[a-zA-Z0-9_.-]+:.*##/ {printf "\033[36m%-22s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | sort
|
||||
|
||||
.PHONY: tidy
|
||||
tidy: ## Run `go mod tidy`
|
||||
$(GO) mod tidy
|
||||
|
||||
.PHONY: tidy-check
|
||||
tidy-check: ## Run tidy and ensure go.mod/go.sum unchanged
|
||||
$(GO) mod tidy
|
||||
@git diff --exit-code -- go.mod go.sum
|
||||
|
||||
.PHONY: fmt
|
||||
fmt: ## Format code with go fmt
|
||||
$(GO) fmt ./...
|
||||
|
||||
.PHONY: vet
|
||||
vet: ## Run go vet
|
||||
$(GO) vet ./...
|
||||
|
||||
.PHONY: build
|
||||
build: ## Build all packages
|
||||
$(GO) build ./...
|
||||
|
||||
# WebAssembly build outputs
|
||||
DIST_DIR ?= dist
|
||||
WASM_OUT ?= $(DIST_DIR)/poindexter.wasm
|
||||
WASM_EXEC ?= $(shell $(GO) env GOROOT)/lib/wasm/wasm_exec.js
|
||||
|
||||
.PHONY: wasm-build
|
||||
wasm-build: ## Build WebAssembly module to $(WASM_OUT)
|
||||
@mkdir -p $(DIST_DIR)
|
||||
GOOS=js GOARCH=wasm $(GO) build -o $(WASM_OUT) ./wasm
|
||||
@set -e; \
|
||||
if [ -n "$$WASM_EXEC" ] && [ -f "$$WASM_EXEC" ]; then \
|
||||
cp "$$WASM_EXEC" $(DIST_DIR)/wasm_exec.js; \
|
||||
else \
|
||||
CAND1="$$($(GO) env GOROOT)/lib/wasm/wasm_exec.js"; \
|
||||
CAND2="$$($(GO) env GOROOT)/libexec/lib/wasm/wasm_exec.js"; \
|
||||
if [ -f "$$CAND1" ]; then cp "$$CAND1" $(DIST_DIR)/wasm_exec.js; \
|
||||
elif [ -f "$$CAND2" ]; then cp "$$CAND2" $(DIST_DIR)/wasm_exec.js; \
|
||||
else echo "Warning: could not locate wasm_exec.js under GOROOT or WASM_EXEC; please copy it manually"; fi; \
|
||||
fi
|
||||
@echo "WASM built: $(WASM_OUT)"
|
||||
|
||||
.PHONY: npm-pack
|
||||
npm-pack: wasm-build ## Prepare npm package folder with dist artifacts
|
||||
@mkdir -p npm/poindexter-wasm
|
||||
@rm -rf npm/poindexter-wasm/dist
|
||||
@cp -R $(DIST_DIR) npm/poindexter-wasm/dist
|
||||
@cp LICENSE npm/poindexter-wasm/LICENSE
|
||||
@cp README.md npm/poindexter-wasm/PROJECT_README.md
|
||||
@echo "npm package prepared in npm/poindexter-wasm"
|
||||
|
||||
.PHONY: examples
|
||||
examples: ## Build all example programs under examples/
|
||||
@if [ -d examples ]; then $(GO) build ./examples/...; else echo "No examples/ directory"; fi
|
||||
|
||||
.PHONY: test
|
||||
test: ## Run unit tests
|
||||
$(GO) test ./...
|
||||
|
||||
.PHONY: race
|
||||
race: ## Run tests with race detector
|
||||
$(GO) test -race ./...
|
||||
|
||||
.PHONY: cover
|
||||
cover: ## Run tests with race + coverage and summarize
|
||||
$(GO) test -race -coverprofile=$(COVEROUT) -covermode=atomic ./...
|
||||
@$(GO) tool cover -func=$(COVEROUT) | tail -n 1
|
||||
|
||||
.PHONY: coverfunc
|
||||
coverfunc: ## Print per-function coverage from $(COVEROUT)
|
||||
@$(GO) tool cover -func=$(COVEROUT)
|
||||
|
||||
.PHONY: cover-kdtree
|
||||
cover-kdtree: ## Print coverage details for kdtree.go only
|
||||
@$(GO) tool cover -func=$(COVEROUT) | grep 'kdtree.go' || true
|
||||
|
||||
.PHONY: coverhtml
|
||||
coverhtml: cover ## Generate HTML coverage report at $(COVERHTML)
|
||||
@$(GO) tool cover -html=$(COVEROUT) -o $(COVERHTML)
|
||||
@echo "Wrote $(COVERHTML)"
|
||||
|
||||
.PHONY: fuzz
|
||||
fuzz: ## Run Go fuzz tests for $(FUZZTIME)
|
||||
@set -e; \
|
||||
PKGS="$$($(GO) list ./...)"; \
|
||||
for pkg in $$PKGS; do \
|
||||
FUZZES="$$($(GO) test -list '^Fuzz' $$pkg | grep '^Fuzz' || true)"; \
|
||||
if [ -z "$$FUZZES" ]; then \
|
||||
echo "==> Skipping $$pkg (no fuzz targets)"; \
|
||||
continue; \
|
||||
fi; \
|
||||
for fz in $$FUZZES; do \
|
||||
echo "==> Fuzzing $$pkg :: $$fz for $(FUZZTIME)"; \
|
||||
$(GO) test -run=NONE -fuzz=^$${fz}$$ -fuzztime=$(FUZZTIME) $$pkg; \
|
||||
done; \
|
||||
done
|
||||
|
||||
.PHONY: bench
|
||||
# Benchmark configuration variables
|
||||
BENCHPKG ?= ./...
|
||||
BENCHFILTER ?= .
|
||||
BENCHTAGS ?=
|
||||
BENCHMEMFLAG ?= -benchmem
|
||||
|
||||
bench: ## Run benchmarks (configurable: BENCHPKG, BENCHFILTER, BENCHTAGS, BENCHOUT)
|
||||
$(GO) test $(if $(BENCHTAGS),-tags=$(BENCHTAGS),) -bench $(BENCHFILTER) $(BENCHMEMFLAG) -run=^$$ $(BENCHPKG) | tee $(BENCHOUT)
|
||||
|
||||
.PHONY: bench-linear
|
||||
bench-linear: ## Run linear-backend benchmarks and write bench-linear.txt
|
||||
$(MAKE) bench BENCHTAGS= BENCHOUT=bench-linear.txt
|
||||
|
||||
.PHONY: bench-gonum
|
||||
bench-gonum: ## Run gonum-backend benchmarks (includes 100k benches) and write bench-gonum.txt
|
||||
$(MAKE) bench BENCHTAGS=gonum BENCHOUT=bench-gonum.txt
|
||||
|
||||
.PHONY: bench-list
|
||||
bench-list: ## List available benchmark names for BENCHPKG (use with BENCHPKG=./pkg)
|
||||
$(GO) test $(if $(BENCHTAGS),-tags=$(BENCHTAGS),) -run=^$$ -bench ^$$ -list '^Benchmark' $(BENCHPKG)
|
||||
|
||||
.PHONY: lint
|
||||
lint: ## Run golangci-lint (requires it installed)
|
||||
$(GOLANGCI_LINT) run
|
||||
|
||||
.PHONY: vuln
|
||||
vuln: ## Run govulncheck (requires it installed)
|
||||
govulncheck ./...
|
||||
|
||||
.PHONY: ci
|
||||
ci: tidy-check build vet cover examples bench lint vuln wasm-build ## CI-parity local run (includes wasm-build)
|
||||
@echo "CI-like checks completed"
|
||||
|
||||
.PHONY: release
|
||||
release: ## Run GoReleaser to publish a tagged release (requires tag and permissions)
|
||||
$(GORELEASER) release --clean --config .goreleaser.yaml
|
||||
|
||||
.PHONY: snapshot
|
||||
snapshot: ## Run GoReleaser in snapshot mode (no publish)
|
||||
$(GORELEASER) release --skip=publish --clean --config .goreleaser.yaml
|
||||
|
||||
.PHONY: docs-serve
|
||||
docs-serve: ## Serve MkDocs locally (requires mkdocs-material)
|
||||
$(MKDOCS) serve -a 127.0.0.1:8000
|
||||
|
||||
.PHONY: docs-build
|
||||
docs-build: ## Build MkDocs site into site/
|
||||
$(MKDOCS) build
|
||||
|
||||
.PHONY: clean
|
||||
clean: ## Remove generated files and directories
|
||||
rm -rf $(DIST_DIR) $(COVEROUT) $(COVERHTML) $(BENCHOUT)
|
||||
238
README.md
238
README.md
|
|
@ -1,238 +0,0 @@
|
|||
# Poindexter
|
||||
|
||||
[](https://pkg.go.dev/github.com/Snider/Poindexter)
|
||||
[](https://github.com/Snider/Poindexter/actions)
|
||||
[](https://goreportcard.com/report/github.com/Snider/Poindexter)
|
||||
[](https://pkg.go.dev/golang.org/x/vuln/cmd/govulncheck)
|
||||
[](https://codecov.io/gh/Snider/Poindexter)
|
||||
[](https://github.com/Snider/Poindexter/releases)
|
||||
|
||||
A Go library package providing utility functions including sorting algorithms with custom comparators.
|
||||
|
||||
## Features
|
||||
|
||||
- 🔢 **Sorting Utilities**: Sort integers, strings, and floats in ascending or descending order
|
||||
- 🎯 **Custom Sorting**: Sort any type with custom comparison functions or key extractors
|
||||
- 🔍 **Binary Search**: Fast search on sorted data
|
||||
- 🧭 **KDTree (NN Search)**: Build a KDTree over points with generic payloads; nearest, k-NN, and radius queries with Euclidean, Manhattan, Chebyshev, and Cosine metrics
|
||||
- 📦 **Generic Functions**: Type-safe operations using Go generics
|
||||
- ✅ **Well-Tested**: Comprehensive test coverage
|
||||
- 📖 **Documentation**: Full documentation available at GitHub Pages
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go get github.com/Snider/Poindexter
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Basic sorting
|
||||
numbers := []int{3, 1, 4, 1, 5, 9}
|
||||
poindexter.SortInts(numbers)
|
||||
fmt.Println(numbers) // [1 1 3 4 5 9]
|
||||
|
||||
// Custom sorting with key function
|
||||
type Product struct {
|
||||
Name string
|
||||
Price float64
|
||||
}
|
||||
|
||||
products := []Product{{"Apple", 1.50}, {"Banana", 0.75}, {"Cherry", 3.00}}
|
||||
poindexter.SortByKey(products, func(p Product) float64 { return p.Price })
|
||||
|
||||
// KDTree quick demo
|
||||
pts := []poindexter.KDPoint[string]{
|
||||
{ID: "A", Coords: []float64{0, 0}, Value: "alpha"},
|
||||
{ID: "B", Coords: []float64{1, 0}, Value: "bravo"},
|
||||
{ID: "C", Coords: []float64{0, 1}, Value: "charlie"},
|
||||
}
|
||||
tree, _ := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
nearest, dist, _ := tree.Nearest([]float64{0.9, 0.1})
|
||||
fmt.Println(nearest.ID, nearest.Value, dist) // B bravo ~0.141...
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
Full documentation is available at [https://snider.github.io/Poindexter/](https://snider.github.io/Poindexter/)
|
||||
|
||||
Explore runnable examples in the repository:
|
||||
- examples/dht_ping_1d
|
||||
- examples/kdtree_2d_ping_hop
|
||||
- examples/kdtree_3d_ping_hop_geo
|
||||
- examples/kdtree_4d_ping_hop_geo_score
|
||||
- examples/dht_helpers (convenience wrappers for common DHT schemas)
|
||||
- examples/wasm-browser (browser demo using the ESM loader)
|
||||
- examples/wasm-browser-ts (TypeScript + Vite local demo)
|
||||
|
||||
### KDTree performance and notes
|
||||
- Dual backend support: Linear (always available) and an optimized KD backend enabled when building with `-tags=gonum`. Linear is the default; with the `gonum` tag, the optimized backend becomes the default.
|
||||
- Complexity: Linear backend is O(n) per query. Optimized KD backend is typically sub-linear on prunable datasets and dims ≤ ~8, especially as N grows (≥10k–100k).
|
||||
- Insert is O(1) amortized; delete by ID is O(1) via swap-delete; order is not preserved.
|
||||
- Concurrency: the KDTree type is not safe for concurrent mutation. Protect with a mutex or share immutable snapshots for read-mostly workloads.
|
||||
- See multi-dimensional examples (ping/hops/geo/score) in docs and `examples/`.
|
||||
- Performance guide: see docs/Performance for benchmark guidance and tips: [docs/perf.md](docs/perf.md) • Hosted: https://snider.github.io/Poindexter/perf/
|
||||
|
||||
### Backend selection
|
||||
- Default backend is Linear. If you build with `-tags=gonum`, the default becomes the optimized KD backend.
|
||||
- You can override per tree at construction:
|
||||
|
||||
```go
|
||||
// Force Linear (always available)
|
||||
kdt1, _ := poindexter.NewKDTree(pts, poindexter.WithBackend(poindexter.BackendLinear))
|
||||
|
||||
// Force Gonum (requires build tag)
|
||||
kdt2, _ := poindexter.NewKDTree(pts, poindexter.WithBackend(poindexter.BackendGonum))
|
||||
```
|
||||
|
||||
- Supported metrics in the optimized backend: Euclidean (L2), Manhattan (L1), Chebyshev (L∞).
|
||||
- Cosine and Weighted-Cosine currently run on the Linear backend.
|
||||
- See the Performance guide for measured comparisons and when to choose which backend.
|
||||
|
||||
#### Choosing a metric (quick tips)
|
||||
- Euclidean (L2): smooth trade-offs across axes; solid default for blended preferences.
|
||||
- Manhattan (L1): emphasizes per-axis absolute differences; good when each unit of ping/hop matters equally.
|
||||
- Chebyshev (L∞): dominated by the worst axis; useful for strict thresholds (e.g., reject high hop count regardless of ping).
|
||||
- Cosine: angle-based for vector similarity; pair it with normalized/weighted features when direction matters more than magnitude.
|
||||
|
||||
See the multi-dimensional KDTree docs for end-to-end examples and weighting/normalization helpers: [Multi-Dimensional KDTree (DHT)](docs/kdtree-multidimensional.md).
|
||||
|
||||
## Maintainer Makefile
|
||||
|
||||
The repository includes a maintainer-friendly `Makefile` that mirrors CI tasks and speeds up local workflows.
|
||||
|
||||
- help — list available targets
|
||||
- tidy / tidy-check — run `go mod tidy`, optionally verify no diffs
|
||||
- fmt — format code (`go fmt ./...`)
|
||||
- vet — `go vet ./...`
|
||||
- build — `go build ./...`
|
||||
- examples — build all programs under `examples/` (if present)
|
||||
- test — run unit tests
|
||||
- race — run tests with the race detector
|
||||
- cover — run tests with race + coverage (writes `coverage.out` and prints summary)
|
||||
- coverhtml — render HTML coverage report to `coverage.html`
|
||||
- coverfunc — print per-function coverage (from `coverage.out`)
|
||||
- cover-kdtree — print coverage details filtered to `kdtree.go`
|
||||
- fuzz — run Go fuzzing for a configurable time (default 10s) matching CI
|
||||
- bench — run benchmarks with `-benchmem` (writes `bench.txt`)
|
||||
- lint — run `golangci-lint` (if installed)
|
||||
- vuln — run `govulncheck` (if installed)
|
||||
- ci — CI-parity aggregate: tidy-check, build, vet, cover, examples, bench, lint, vuln
|
||||
- release — run GoReleaser with the canonical `.goreleaser.yaml` (for tagged releases)
|
||||
- snapshot — GoReleaser snapshot (no publish)
|
||||
- docs-serve — serve MkDocs locally on 127.0.0.1:8000
|
||||
- docs-build — build MkDocs site into `site/`
|
||||
|
||||
Quick usage:
|
||||
|
||||
- See all targets:
|
||||
|
||||
```bash
|
||||
make help
|
||||
```
|
||||
|
||||
- Fast local cycle:
|
||||
|
||||
```bash
|
||||
make fmt
|
||||
make vet
|
||||
make test
|
||||
```
|
||||
|
||||
- CI-parity run (what GitHub Actions does, locally):
|
||||
|
||||
```bash
|
||||
make ci
|
||||
```
|
||||
|
||||
- Coverage summary:
|
||||
|
||||
```bash
|
||||
make cover
|
||||
```
|
||||
|
||||
- Generate HTML coverage report (writes coverage.html):
|
||||
|
||||
```bash
|
||||
make coverhtml
|
||||
```
|
||||
|
||||
- Fuzz for 10 seconds (default):
|
||||
|
||||
```bash
|
||||
make fuzz
|
||||
```
|
||||
|
||||
- Fuzz with a custom time (e.g., 30s):
|
||||
|
||||
```bash
|
||||
make fuzz FUZZTIME=30s
|
||||
```
|
||||
|
||||
- Run benchmarks (writes bench.txt):
|
||||
|
||||
```bash
|
||||
make bench
|
||||
```
|
||||
|
||||
- Build examples (if any under ./examples):
|
||||
|
||||
```bash
|
||||
make examples
|
||||
```
|
||||
|
||||
- Serve docs locally (requires mkdocs-material):
|
||||
|
||||
```bash
|
||||
make docs-serve
|
||||
```
|
||||
|
||||
Configurable variables:
|
||||
|
||||
- `FUZZTIME` (default `10s`) — e.g. `make fuzz FUZZTIME=30s`
|
||||
- `BENCHOUT` (default `bench.txt`), `COVEROUT` (default `coverage.out`), `COVERHTML` (default `coverage.html`)
|
||||
- Tool commands are overridable via env: `GO`, `GOLANGCI_LINT`, `GORELEASER`, `MKDOCS`
|
||||
|
||||
Requirements for optional targets:
|
||||
|
||||
- `golangci-lint` for `make lint`
|
||||
- `golang.org/x/vuln/cmd/govulncheck` for `make vuln`
|
||||
- `goreleaser` for `make release` / `make snapshot`
|
||||
- `mkdocs` + `mkdocs-material` for `make docs-serve` / `make docs-build`
|
||||
|
||||
See the full Makefile at the repo root for authoritative target definitions.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the European Union Public Licence v1.2 (EUPL-1.2). See [LICENSE](LICENSE) for details.
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Please feel free to submit a Pull Request.
|
||||
|
||||
|
||||
## Coverage
|
||||
|
||||
- CI produces coverage summaries as artifacts on every push/PR:
|
||||
- Default job: `coverage-summary.md` (from `coverage.out`)
|
||||
- Gonum-tag job: `coverage-summary-gonum.md` (from `coverage-gonum.out`)
|
||||
- Locally, you can generate and inspect coverage with the Makefile:
|
||||
|
||||
```bash
|
||||
make cover # runs tests with race + coverage and prints the total
|
||||
make coverfunc # prints per-function coverage
|
||||
make cover-kdtree # filters coverage to kdtree.go
|
||||
make coverhtml # writes coverage.html for visual inspection
|
||||
```
|
||||
|
||||
Note: CI also uploads raw coverage profiles as artifacts (`coverage.out`, `coverage-gonum.out`).
|
||||
20
SECURITY.md
20
SECURITY.md
|
|
@ -1,20 +0,0 @@
|
|||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We support the latest minor release series. Please use the most recent tagged version.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you believe you have found a security vulnerability in Poindexter:
|
||||
|
||||
- Please DO NOT open a public GitHub issue.
|
||||
- Email the maintainer listed on the repository profile with:
|
||||
- A description of the issue and its impact
|
||||
- Steps to reproduce (a minimal proof-of-concept if possible)
|
||||
- Affected versions/commit hashes
|
||||
- We will acknowledge receipt within 5 business days and work with you on a fix and coordinated disclosure.
|
||||
|
||||
## Dependencies
|
||||
|
||||
We run `govulncheck` in CI. If you see alerts or advisories that affect Poindexter, please include links or CVE identifiers in your report.
|
||||
2218
api/index.html
Normal file
2218
api/index.html
Normal file
File diff suppressed because it is too large
Load diff
BIN
assets/images/favicon.png
Normal file
BIN
assets/images/favicon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.8 KiB |
16
assets/javascripts/bundle.79ae519e.min.js
vendored
Normal file
16
assets/javascripts/bundle.79ae519e.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
7
assets/javascripts/bundle.79ae519e.min.js.map
Normal file
7
assets/javascripts/bundle.79ae519e.min.js.map
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.ar.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.ar.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
18
assets/javascripts/lunr/min/lunr.da.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.da.min.js
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
/*!
|
||||
* Lunr languages, `Danish` language
|
||||
* https://github.com/MihaiValentin/lunr-languages
|
||||
*
|
||||
* Copyright 2014, Mihai Valentin
|
||||
* http://www.mozilla.org/MPL/
|
||||
*/
|
||||
/*!
|
||||
* based on
|
||||
* Snowball JavaScript Library v0.3
|
||||
* http://code.google.com/p/urim/
|
||||
* http://snowball.tartarus.org/
|
||||
*
|
||||
* Copyright 2010, Oleg Mazko
|
||||
* http://www.mozilla.org/MPL/
|
||||
*/
|
||||
|
||||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.da=function(){this.pipeline.reset(),this.pipeline.add(e.da.trimmer,e.da.stopWordFilter,e.da.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.da.stemmer))},e.da.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.da.trimmer=e.trimmerSupport.generateTrimmer(e.da.wordCharacters),e.Pipeline.registerFunction(e.da.trimmer,"trimmer-da"),e.da.stemmer=function(){var r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){function e(){var e,r=f.cursor+3;if(d=f.limit,0<=r&&r<=f.limit){for(a=r;;){if(e=f.cursor,f.in_grouping(w,97,248)){f.cursor=e;break}if(f.cursor=e,e>=f.limit)return;f.cursor++}for(;!f.out_grouping(w,97,248);){if(f.cursor>=f.limit)return;f.cursor++}d=f.cursor,d<a&&(d=a)}}function n(){var e,r;if(f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(c,32),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del();break;case 2:f.in_grouping_b(p,97,229)&&f.slice_del()}}function t(){var e,r=f.limit-f.cursor;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.find_among_b(l,4)?(f.bra=f.cursor,f.limit_backward=e,f.cursor=f.limit-r,f.cursor>f.limit_backward&&(f.cursor--,f.bra=f.cursor,f.slice_del())):f.limit_backward=e)}function s(){var e,r,i,n=f.limit-f.cursor;if(f.ket=f.cursor,f.eq_s_b(2,"st")&&(f.bra=f.cursor,f.eq_s_b(2,"ig")&&f.slice_del()),f.cursor=f.limit-n,f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(m,5),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del(),i=f.limit-f.cursor,t(),f.cursor=f.limit-i;break;case 2:f.slice_from("løs")}}function o(){var e;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.out_grouping_b(w,97,248)?(f.bra=f.cursor,u=f.slice_to(u),f.limit_backward=e,f.eq_v_b(u)&&f.slice_del()):f.limit_backward=e)}var a,d,u,c=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],l=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],w=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],p=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],f=new i;this.setCurrent=function(e){f.setCurrent(e)},this.getCurrent=function(){return f.getCurrent()},this.stem=function(){var r=f.cursor;return e(),f.limit_backward=r,f.cursor=f.limit,n(),f.cursor=f.limit,t(),f.cursor=f.limit,s(),f.cursor=f.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}});
|
||||
18
assets/javascripts/lunr/min/lunr.de.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.de.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
18
assets/javascripts/lunr/min/lunr.du.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.du.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.el.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.el.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
18
assets/javascripts/lunr/min/lunr.es.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.es.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
18
assets/javascripts/lunr/min/lunr.fi.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.fi.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
18
assets/javascripts/lunr/min/lunr.fr.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.fr.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.he.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.he.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.hi.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.hi.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hi=function(){this.pipeline.reset(),this.pipeline.add(e.hi.trimmer,e.hi.stopWordFilter,e.hi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hi.stemmer))},e.hi.wordCharacters="ऀ-ःऄ-एऐ-टठ-यर-िी-ॏॐ-य़ॠ-९॰-ॿa-zA-Za-zA-Z0-90-9",e.hi.trimmer=e.trimmerSupport.generateTrimmer(e.hi.wordCharacters),e.Pipeline.registerFunction(e.hi.trimmer,"trimmer-hi"),e.hi.stopWordFilter=e.generateStopWordFilter("अत अपना अपनी अपने अभी अंदर आदि आप इत्यादि इन इनका इन्हीं इन्हें इन्हों इस इसका इसकी इसके इसमें इसी इसे उन उनका उनकी उनके उनको उन्हीं उन्हें उन्हों उस उसके उसी उसे एक एवं एस ऐसे और कई कर करता करते करना करने करें कहते कहा का काफ़ी कि कितना किन्हें किन्हों किया किर किस किसी किसे की कुछ कुल के को कोई कौन कौनसा गया घर जब जहाँ जा जितना जिन जिन्हें जिन्हों जिस जिसे जीधर जैसा जैसे जो तक तब तरह तिन तिन्हें तिन्हों तिस तिसे तो था थी थे दबारा दिया दुसरा दूसरे दो द्वारा न नके नहीं ना निहायत नीचे ने पर पहले पूरा पे फिर बनी बही बहुत बाद बाला बिलकुल भी भीतर मगर मानो मे में यदि यह यहाँ यही या यिह ये रखें रहा रहे ऱ्वासा लिए लिये लेकिन व वग़ैरह वर्ग वह वहाँ वहीं वाले वुह वे वो सकता सकते सबसे सभी साथ साबुत साभ सारा से सो संग ही हुआ हुई हुए है हैं हो होता होती होते होना होने".split(" ")),e.hi.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.hi.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var t=i.toString().toLowerCase().replace(/^\s+/,"");return r.cut(t).split("|")},e.Pipeline.registerFunction(e.hi.stemmer,"stemmer-hi"),e.Pipeline.registerFunction(e.hi.stopWordFilter,"stopWordFilter-hi")}});
|
||||
18
assets/javascripts/lunr/min/lunr.hu.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.hu.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.hy.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.hy.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hy=function(){this.pipeline.reset(),this.pipeline.add(e.hy.trimmer,e.hy.stopWordFilter)},e.hy.wordCharacters="[A-Za-z-֏ff-ﭏ]",e.hy.trimmer=e.trimmerSupport.generateTrimmer(e.hy.wordCharacters),e.Pipeline.registerFunction(e.hy.trimmer,"trimmer-hy"),e.hy.stopWordFilter=e.generateStopWordFilter("դու և եք էիր էիք հետո նաև նրանք որը վրա է որ պիտի են այս մեջ ն իր ու ի այդ որոնք այն կամ էր մի ես համար այլ իսկ էին ենք հետ ին թ էինք մենք նրա նա դուք եմ էի ըստ որպես ում".split(" ")),e.Pipeline.registerFunction(e.hy.stopWordFilter,"stopWordFilter-hy"),e.hy.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}(),e.Pipeline.registerFunction(e.hy.stemmer,"stemmer-hy")}});
|
||||
18
assets/javascripts/lunr/min/lunr.it.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.it.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.ja.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.ja.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.ja=function(){this.pipeline.reset(),this.pipeline.add(e.ja.trimmer,e.ja.stopWordFilter,e.ja.stemmer),r?this.tokenizer=e.ja.tokenizer:(e.tokenizer&&(e.tokenizer=e.ja.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.ja.tokenizer))};var t=new e.TinySegmenter;e.ja.tokenizer=function(i){var n,o,s,p,a,u,m,l,c,f;if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t.toLowerCase()):t.toLowerCase()});for(o=i.toString().toLowerCase().replace(/^\s+/,""),n=o.length-1;n>=0;n--)if(/\S/.test(o.charAt(n))){o=o.substring(0,n+1);break}for(a=[],s=o.length,c=0,l=0;c<=s;c++)if(u=o.charAt(c),m=c-l,u.match(/\s/)||c==s){if(m>0)for(p=t.segment(o.slice(l,c)).filter(function(e){return!!e}),f=l,n=0;n<p.length;n++)r?a.push(new e.Token(p[n],{position:[f,p[n].length],index:a.length})):a.push(p[n]),f+=p[n].length;l=c+1}return a},e.ja.stemmer=function(){return function(e){return e}}(),e.Pipeline.registerFunction(e.ja.stemmer,"stemmer-ja"),e.ja.wordCharacters="一二三四五六七八九十百千万億兆一-龠々〆ヵヶぁ-んァ-ヴーア-ン゙a-zA-Za-zA-Z0-90-9",e.ja.trimmer=e.trimmerSupport.generateTrimmer(e.ja.wordCharacters),e.Pipeline.registerFunction(e.ja.trimmer,"trimmer-ja"),e.ja.stopWordFilter=e.generateStopWordFilter("これ それ あれ この その あの ここ そこ あそこ こちら どこ だれ なに なん 何 私 貴方 貴方方 我々 私達 あの人 あのかた 彼女 彼 です あります おります います は が の に を で え から まで より も どの と し それで しかし".split(" ")),e.Pipeline.registerFunction(e.ja.stopWordFilter,"stopWordFilter-ja"),e.jp=e.ja,e.Pipeline.registerFunction(e.jp.stemmer,"stemmer-jp"),e.Pipeline.registerFunction(e.jp.trimmer,"trimmer-jp"),e.Pipeline.registerFunction(e.jp.stopWordFilter,"stopWordFilter-jp")}});
|
||||
1
assets/javascripts/lunr/min/lunr.jp.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.jp.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
module.exports=require("./lunr.ja");
|
||||
1
assets/javascripts/lunr/min/lunr.kn.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.kn.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.kn=function(){this.pipeline.reset(),this.pipeline.add(e.kn.trimmer,e.kn.stopWordFilter,e.kn.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.kn.stemmer))},e.kn.wordCharacters="ಀ-಄ಅ-ಔಕ-ಹಾ-ೌ಼-ಽೕ-ೖೝ-ೞೠ-ೡೢ-ೣ೦-೯ೱ-ೳ",e.kn.trimmer=e.trimmerSupport.generateTrimmer(e.kn.wordCharacters),e.Pipeline.registerFunction(e.kn.trimmer,"trimmer-kn"),e.kn.stopWordFilter=e.generateStopWordFilter("ಮತ್ತು ಈ ಒಂದು ರಲ್ಲಿ ಹಾಗೂ ಎಂದು ಅಥವಾ ಇದು ರ ಅವರು ಎಂಬ ಮೇಲೆ ಅವರ ತನ್ನ ಆದರೆ ತಮ್ಮ ನಂತರ ಮೂಲಕ ಹೆಚ್ಚು ನ ಆ ಕೆಲವು ಅನೇಕ ಎರಡು ಹಾಗು ಪ್ರಮುಖ ಇದನ್ನು ಇದರ ಸುಮಾರು ಅದರ ಅದು ಮೊದಲ ಬಗ್ಗೆ ನಲ್ಲಿ ರಂದು ಇತರ ಅತ್ಯಂತ ಹೆಚ್ಚಿನ ಸಹ ಸಾಮಾನ್ಯವಾಗಿ ನೇ ಹಲವಾರು ಹೊಸ ದಿ ಕಡಿಮೆ ಯಾವುದೇ ಹೊಂದಿದೆ ದೊಡ್ಡ ಅನ್ನು ಇವರು ಪ್ರಕಾರ ಇದೆ ಮಾತ್ರ ಕೂಡ ಇಲ್ಲಿ ಎಲ್ಲಾ ವಿವಿಧ ಅದನ್ನು ಹಲವು ರಿಂದ ಕೇವಲ ದ ದಕ್ಷಿಣ ಗೆ ಅವನ ಅತಿ ನೆಯ ಬಹಳ ಕೆಲಸ ಎಲ್ಲ ಪ್ರತಿ ಇತ್ಯಾದಿ ಇವು ಬೇರೆ ಹೀಗೆ ನಡುವೆ ಇದಕ್ಕೆ ಎಸ್ ಇವರ ಮೊದಲು ಶ್ರೀ ಮಾಡುವ ಇದರಲ್ಲಿ ರೀತಿಯ ಮಾಡಿದ ಕಾಲ ಅಲ್ಲಿ ಮಾಡಲು ಅದೇ ಈಗ ಅವು ಗಳು ಎ ಎಂಬುದು ಅವನು ಅಂದರೆ ಅವರಿಗೆ ಇರುವ ವಿಶೇಷ ಮುಂದೆ ಅವುಗಳ ಮುಂತಾದ ಮೂಲ ಬಿ ಮೀ ಒಂದೇ ಇನ್ನೂ ಹೆಚ್ಚಾಗಿ ಮಾಡಿ ಅವರನ್ನು ಇದೇ ಯ ರೀತಿಯಲ್ಲಿ ಜೊತೆ ಅದರಲ್ಲಿ ಮಾಡಿದರು ನಡೆದ ಆಗ ಮತ್ತೆ ಪೂರ್ವ ಆತ ಬಂದ ಯಾವ ಒಟ್ಟು ಇತರೆ ಹಿಂದೆ ಪ್ರಮಾಣದ ಗಳನ್ನು ಕುರಿತು ಯು ಆದ್ದರಿಂದ ಅಲ್ಲದೆ ನಗರದ ಮೇಲಿನ ಏಕೆಂದರೆ ರಷ್ಟು ಎಂಬುದನ್ನು ಬಾರಿ ಎಂದರೆ ಹಿಂದಿನ ಆದರೂ ಆದ ಸಂಬಂಧಿಸಿದ ಮತ್ತೊಂದು ಸಿ ಆತನ ".split(" ")),e.kn.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.kn.tokenizer=function(t){if(!arguments.length||null==t||void 0==t)return[];if(Array.isArray(t))return t.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var n=t.toString().toLowerCase().replace(/^\s+/,"");return r.cut(n).split("|")},e.Pipeline.registerFunction(e.kn.stemmer,"stemmer-kn"),e.Pipeline.registerFunction(e.kn.stopWordFilter,"stopWordFilter-kn")}});
|
||||
1
assets/javascripts/lunr/min/lunr.ko.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.ko.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.multi.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.multi.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){e.multiLanguage=function(){for(var t=Array.prototype.slice.call(arguments),i=t.join("-"),r="",n=[],s=[],p=0;p<t.length;++p)"en"==t[p]?(r+="\\w",n.unshift(e.stopWordFilter),n.push(e.stemmer),s.push(e.stemmer)):(r+=e[t[p]].wordCharacters,e[t[p]].stopWordFilter&&n.unshift(e[t[p]].stopWordFilter),e[t[p]].stemmer&&(n.push(e[t[p]].stemmer),s.push(e[t[p]].stemmer)));var o=e.trimmerSupport.generateTrimmer(r);return e.Pipeline.registerFunction(o,"lunr-multi-trimmer-"+i),n.unshift(o),function(){this.pipeline.reset(),this.pipeline.add.apply(this.pipeline,n),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add.apply(this.searchPipeline,s))}}}});
|
||||
18
assets/javascripts/lunr/min/lunr.nl.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.nl.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
18
assets/javascripts/lunr/min/lunr.no.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.no.min.js
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
/*!
|
||||
* Lunr languages, `Norwegian` language
|
||||
* https://github.com/MihaiValentin/lunr-languages
|
||||
*
|
||||
* Copyright 2014, Mihai Valentin
|
||||
* http://www.mozilla.org/MPL/
|
||||
*/
|
||||
/*!
|
||||
* based on
|
||||
* Snowball JavaScript Library v0.3
|
||||
* http://code.google.com/p/urim/
|
||||
* http://snowball.tartarus.org/
|
||||
*
|
||||
* Copyright 2010, Oleg Mazko
|
||||
* http://www.mozilla.org/MPL/
|
||||
*/
|
||||
|
||||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.no=function(){this.pipeline.reset(),this.pipeline.add(e.no.trimmer,e.no.stopWordFilter,e.no.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.no.stemmer))},e.no.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.no.trimmer=e.trimmerSupport.generateTrimmer(e.no.wordCharacters),e.Pipeline.registerFunction(e.no.trimmer,"trimmer-no"),e.no.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(){var e,r=w.cursor+3;if(a=w.limit,0<=r||r<=w.limit){for(s=r;;){if(e=w.cursor,w.in_grouping(d,97,248)){w.cursor=e;break}if(e>=w.limit)return;w.cursor=e+1}for(;!w.out_grouping(d,97,248);){if(w.cursor>=w.limit)return;w.cursor++}a=w.cursor,a<s&&(a=s)}}function i(){var e,r,n;if(w.cursor>=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(m,29),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:n=w.limit-w.cursor,w.in_grouping_b(c,98,122)?w.slice_del():(w.cursor=w.limit-n,w.eq_s_b(1,"k")&&w.out_grouping_b(d,97,248)&&w.slice_del());break;case 3:w.slice_from("er")}}function t(){var e,r=w.limit-w.cursor;w.cursor>=a&&(e=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,w.find_among_b(u,2)?(w.bra=w.cursor,w.limit_backward=e,w.cursor=w.limit-r,w.cursor>w.limit_backward&&(w.cursor--,w.bra=w.cursor,w.slice_del())):w.limit_backward=e)}function o(){var e,r;w.cursor>=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(l,11),e?(w.bra=w.cursor,w.limit_backward=r,1==e&&w.slice_del()):w.limit_backward=r)}var s,a,m=[new r("a",-1,1),new r("e",-1,1),new r("ede",1,1),new r("ande",1,1),new r("ende",1,1),new r("ane",1,1),new r("ene",1,1),new r("hetene",6,1),new r("erte",1,3),new r("en",-1,1),new r("heten",9,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",12,1),new r("s",-1,2),new r("as",14,1),new r("es",14,1),new r("edes",16,1),new r("endes",16,1),new r("enes",16,1),new r("hetenes",19,1),new r("ens",14,1),new r("hetens",21,1),new r("ers",14,1),new r("ets",14,1),new r("et",-1,1),new r("het",25,1),new r("ert",-1,3),new r("ast",-1,1)],u=[new r("dt",-1,-1),new r("vt",-1,-1)],l=[new r("leg",-1,1),new r("eleg",0,1),new r("ig",-1,1),new r("eig",2,1),new r("lig",2,1),new r("elig",4,1),new r("els",-1,1),new r("lov",-1,1),new r("elov",7,1),new r("slov",7,1),new r("hetslov",9,1)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],c=[119,125,149,1],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,i(),w.cursor=w.limit,t(),w.cursor=w.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.no.stemmer,"stemmer-no"),e.no.stopWordFilter=e.generateStopWordFilter("alle at av bare begge ble blei bli blir blitt både båe da de deg dei deim deira deires dem den denne der dere deres det dette di din disse ditt du dykk dykkar då eg ein eit eitt eller elles en enn er et ett etter for fordi fra før ha hadde han hans har hennar henne hennes her hjå ho hoe honom hoss hossen hun hva hvem hver hvilke hvilken hvis hvor hvordan hvorfor i ikke ikkje ikkje ingen ingi inkje inn inni ja jeg kan kom korleis korso kun kunne kva kvar kvarhelst kven kvi kvifor man mange me med medan meg meget mellom men mi min mine mitt mot mykje ned no noe noen noka noko nokon nokor nokre nå når og også om opp oss over på samme seg selv si si sia sidan siden sin sine sitt sjøl skal skulle slik so som som somme somt så sånn til um upp ut uten var vart varte ved vere verte vi vil ville vore vors vort vår være være vært å".split(" ")),e.Pipeline.registerFunction(e.no.stopWordFilter,"stopWordFilter-no")}});
|
||||
18
assets/javascripts/lunr/min/lunr.pt.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.pt.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
18
assets/javascripts/lunr/min/lunr.ro.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.ro.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
18
assets/javascripts/lunr/min/lunr.ru.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.ru.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.sa.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.sa.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.sa=function(){this.pipeline.reset(),this.pipeline.add(e.sa.trimmer,e.sa.stopWordFilter,e.sa.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sa.stemmer))},e.sa.wordCharacters="ऀ-ःऄ-एऐ-टठ-यर-िी-ॏॐ-य़ॠ-९॰-ॿ꣠-꣱ꣲ-ꣷ꣸-ꣻ꣼-ꣽꣾ-ꣿᆰ0-ᆰ9",e.sa.trimmer=e.trimmerSupport.generateTrimmer(e.sa.wordCharacters),e.Pipeline.registerFunction(e.sa.trimmer,"trimmer-sa"),e.sa.stopWordFilter=e.generateStopWordFilter('तथा अयम् एकम् इत्यस्मिन् तथा तत् वा अयम् इत्यस्य ते आहूत उपरि तेषाम् किन्तु तेषाम् तदा इत्यनेन अधिकः इत्यस्य तत् केचन बहवः द्वि तथा महत्वपूर्णः अयम् अस्य विषये अयं अस्ति तत् प्रथमः विषये इत्युपरि इत्युपरि इतर अधिकतमः अधिकः अपि सामान्यतया ठ इतरेतर नूतनम् द न्यूनम् कश्चित् वा विशालः द सः अस्ति तदनुसारम् तत्र अस्ति केवलम् अपि अत्र सर्वे विविधाः तत् बहवः यतः इदानीम् द दक्षिण इत्यस्मै तस्य उपरि नथ अतीव कार्यम् सर्वे एकैकम् इत्यादि। एते सन्ति उत इत्थम् मध्ये एतदर्थं . स कस्य प्रथमः श्री. करोति अस्मिन् प्रकारः निर्मिता कालः तत्र कर्तुं समान अधुना ते सन्ति स एकः अस्ति सः अर्थात् तेषां कृते . स्थितम् विशेषः अग्रिम तेषाम् समान स्रोतः ख म समान इदानीमपि अधिकतया करोतु ते समान इत्यस्य वीथी सह यस्मिन् कृतवान् धृतः तदा पुनः पूर्वं सः आगतः किम् कुल इतर पुरा मात्रा स विषये उ अतएव अपि नगरस्य उपरि यतः प्रतिशतं कतरः कालः साधनानि भूत तथापि जात सम्बन्धि अन्यत् ग अतः अस्माकं स्वकीयाः अस्माकं इदानीं अन्तः इत्यादयः भवन्तः इत्यादयः एते एताः तस्य अस्य इदम् एते तेषां तेषां तेषां तान् तेषां तेषां तेषां समानः सः एकः च तादृशाः बहवः अन्ये च वदन्ति यत् कियत् कस्मै कस्मै यस्मै यस्मै यस्मै यस्मै न अतिनीचः किन्तु प्रथमं सम्पूर्णतया ततः चिरकालानन्तरं पुस्तकं सम्पूर्णतया अन्तः किन्तु अत्र वा इह इव श्रद्धाय अवशिष्यते परन्तु अन्ये वर्गाः सन्ति ते सन्ति शक्नुवन्ति सर्वे मिलित्वा सर्वे एकत्र"'.split(" ")),e.sa.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.sa.tokenizer=function(t){if(!arguments.length||null==t||void 0==t)return[];if(Array.isArray(t))return t.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var i=t.toString().toLowerCase().replace(/^\s+/,"");return r.cut(i).split("|")},e.Pipeline.registerFunction(e.sa.stemmer,"stemmer-sa"),e.Pipeline.registerFunction(e.sa.stopWordFilter,"stopWordFilter-sa")}});
|
||||
1
assets/javascripts/lunr/min/lunr.stemmer.support.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.stemmer.support.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(r,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(r.lunr)}(this,function(){return function(r){r.stemmerSupport={Among:function(r,t,i,s){if(this.toCharArray=function(r){for(var t=r.length,i=new Array(t),s=0;s<t;s++)i[s]=r.charCodeAt(s);return i},!r&&""!=r||!t&&0!=t||!i)throw"Bad Among initialisation: s:"+r+", substring_i: "+t+", result: "+i;this.s_size=r.length,this.s=this.toCharArray(r),this.substring_i=t,this.result=i,this.method=s},SnowballProgram:function(){var r;return{bra:0,ket:0,limit:0,cursor:0,limit_backward:0,setCurrent:function(t){r=t,this.cursor=0,this.limit=t.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},getCurrent:function(){var t=r;return r=null,t},in_grouping:function(t,i,s){if(this.cursor<this.limit){var e=r.charCodeAt(this.cursor);if(e<=s&&e>=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor++,!0}return!1},in_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e<=s&&e>=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor--,!0}return!1},out_grouping:function(t,i,s){if(this.cursor<this.limit){var e=r.charCodeAt(this.cursor);if(e>s||e<i)return this.cursor++,!0;if(e-=i,!(t[e>>3]&1<<(7&e)))return this.cursor++,!0}return!1},out_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e>s||e<i)return this.cursor--,!0;if(e-=i,!(t[e>>3]&1<<(7&e)))return this.cursor--,!0}return!1},eq_s:function(t,i){if(this.limit-this.cursor<t)return!1;for(var s=0;s<t;s++)if(r.charCodeAt(this.cursor+s)!=i.charCodeAt(s))return!1;return this.cursor+=t,!0},eq_s_b:function(t,i){if(this.cursor-this.limit_backward<t)return!1;for(var s=0;s<t;s++)if(r.charCodeAt(this.cursor-t+s)!=i.charCodeAt(s))return!1;return this.cursor-=t,!0},find_among:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o<h?o:h,_=t[a],m=l;m<_.s_size;m++){if(n+l==u){f=-1;break}if(f=r.charCodeAt(n+l)-_.s[m])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n+_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n+_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},find_among_b:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit_backward,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o<h?o:h,_=t[a],m=_.s_size-1-l;m>=0;m--){if(n-l==u){f=-1;break}if(f=r.charCodeAt(n-1-l)-_.s[m])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n-_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n-_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},replace_s:function(t,i,s){var e=s.length-(i-t),n=r.substring(0,t),u=r.substring(i);return r=n+s+u,this.limit+=e,this.cursor>=i?this.cursor+=e:this.cursor>t&&(this.cursor=t),e},slice_check:function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>r.length)throw"faulty slice operation"},slice_from:function(r){this.slice_check(),this.replace_s(this.bra,this.ket,r)},slice_del:function(){this.slice_from("")},insert:function(r,t,i){var s=this.replace_s(r,t,i);r<=this.bra&&(this.bra+=s),r<=this.ket&&(this.ket+=s)},slice_to:function(){return this.slice_check(),r.substring(this.bra,this.ket)},eq_v_b:function(r){return this.eq_s_b(r.length,r)}}}},r.trimmerSupport={generateTrimmer:function(r){var t=new RegExp("^[^"+r+"]+"),i=new RegExp("[^"+r+"]+$");return function(r){return"function"==typeof r.update?r.update(function(r){return r.replace(t,"").replace(i,"")}):r.replace(t,"").replace(i,"")}}}}});
|
||||
18
assets/javascripts/lunr/min/lunr.sv.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.sv.min.js
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
/*!
|
||||
* Lunr languages, `Swedish` language
|
||||
* https://github.com/MihaiValentin/lunr-languages
|
||||
*
|
||||
* Copyright 2014, Mihai Valentin
|
||||
* http://www.mozilla.org/MPL/
|
||||
*/
|
||||
/*!
|
||||
* based on
|
||||
* Snowball JavaScript Library v0.3
|
||||
* http://code.google.com/p/urim/
|
||||
* http://snowball.tartarus.org/
|
||||
*
|
||||
* Copyright 2010, Oleg Mazko
|
||||
* http://www.mozilla.org/MPL/
|
||||
*/
|
||||
|
||||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.sv=function(){this.pipeline.reset(),this.pipeline.add(e.sv.trimmer,e.sv.stopWordFilter,e.sv.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sv.stemmer))},e.sv.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.sv.trimmer=e.trimmerSupport.generateTrimmer(e.sv.wordCharacters),e.Pipeline.registerFunction(e.sv.trimmer,"trimmer-sv"),e.sv.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,t=new function(){function e(){var e,r=w.cursor+3;if(o=w.limit,0<=r||r<=w.limit){for(a=r;;){if(e=w.cursor,w.in_grouping(l,97,246)){w.cursor=e;break}if(w.cursor=e,w.cursor>=w.limit)return;w.cursor++}for(;!w.out_grouping(l,97,246);){if(w.cursor>=w.limit)return;w.cursor++}o=w.cursor,o<a&&(o=a)}}function t(){var e,r=w.limit_backward;if(w.cursor>=o&&(w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(u,37),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.in_grouping_b(d,98,121)&&w.slice_del()}}function i(){var e=w.limit_backward;w.cursor>=o&&(w.limit_backward=o,w.cursor=w.limit,w.find_among_b(c,7)&&(w.cursor=w.limit,w.ket=w.cursor,w.cursor>w.limit_backward&&(w.bra=--w.cursor,w.slice_del())),w.limit_backward=e)}function s(){var e,r;if(w.cursor>=o){if(r=w.limit_backward,w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(m,5))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.slice_from("lös");break;case 3:w.slice_from("full")}w.limit_backward=r}}var a,o,u=[new r("a",-1,1),new r("arna",0,1),new r("erna",0,1),new r("heterna",2,1),new r("orna",0,1),new r("ad",-1,1),new r("e",-1,1),new r("ade",6,1),new r("ande",6,1),new r("arne",6,1),new r("are",6,1),new r("aste",6,1),new r("en",-1,1),new r("anden",12,1),new r("aren",12,1),new r("heten",12,1),new r("ern",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",18,1),new r("or",-1,1),new r("s",-1,2),new r("as",21,1),new r("arnas",22,1),new r("ernas",22,1),new r("ornas",22,1),new r("es",21,1),new r("ades",26,1),new r("andes",26,1),new r("ens",21,1),new r("arens",29,1),new r("hetens",29,1),new r("erns",21,1),new r("at",-1,1),new r("andet",-1,1),new r("het",-1,1),new r("ast",-1,1)],c=[new r("dd",-1,-1),new r("gd",-1,-1),new r("nn",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1),new r("tt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("els",-1,1),new r("fullt",-1,3),new r("löst",-1,2)],l=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32],d=[119,127,149],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,t(),w.cursor=w.limit,i(),w.cursor=w.limit,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return t.setCurrent(e),t.stem(),t.getCurrent()}):(t.setCurrent(e),t.stem(),t.getCurrent())}}(),e.Pipeline.registerFunction(e.sv.stemmer,"stemmer-sv"),e.sv.stopWordFilter=e.generateStopWordFilter("alla allt att av blev bli blir blivit de dem den denna deras dess dessa det detta dig din dina ditt du där då efter ej eller en er era ert ett från för ha hade han hans har henne hennes hon honom hur här i icke ingen inom inte jag ju kan kunde man med mellan men mig min mina mitt mot mycket ni nu när någon något några och om oss på samma sedan sig sin sina sitta själv skulle som så sådan sådana sådant till under upp ut utan vad var vara varför varit varje vars vart vem vi vid vilka vilkas vilken vilket vår våra vårt än är åt över".split(" ")),e.Pipeline.registerFunction(e.sv.stopWordFilter,"stopWordFilter-sv")}});
|
||||
1
assets/javascripts/lunr/min/lunr.ta.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.ta.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ta=function(){this.pipeline.reset(),this.pipeline.add(e.ta.trimmer,e.ta.stopWordFilter,e.ta.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ta.stemmer))},e.ta.wordCharacters="-உஊ-ஏஐ-ஙச-ட-னப-யர-ஹ-ிீ-ொ-ௐ---௩௪-௯௰-௹௺-a-zA-Za-zA-Z0-90-9",e.ta.trimmer=e.trimmerSupport.generateTrimmer(e.ta.wordCharacters),e.Pipeline.registerFunction(e.ta.trimmer,"trimmer-ta"),e.ta.stopWordFilter=e.generateStopWordFilter("அங்கு அங்கே அது அதை அந்த அவர் அவர்கள் அவள் அவன் அவை ஆக ஆகவே ஆகையால் ஆதலால் ஆதலினால் ஆனாலும் ஆனால் இங்கு இங்கே இது இதை இந்த இப்படி இவர் இவர்கள் இவள் இவன் இவை இவ்வளவு உனக்கு உனது உன் உன்னால் எங்கு எங்கே எது எதை எந்த எப்படி எவர் எவர்கள் எவள் எவன் எவை எவ்வளவு எனக்கு எனது எனவே என் என்ன என்னால் ஏது ஏன் தனது தன்னால் தானே தான் நாங்கள் நாம் நான் நீ நீங்கள்".split(" ")),e.ta.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var t=e.wordcut;t.init(),e.ta.tokenizer=function(r){if(!arguments.length||null==r||void 0==r)return[];if(Array.isArray(r))return r.map(function(t){return isLunr2?new e.Token(t.toLowerCase()):t.toLowerCase()});var i=r.toString().toLowerCase().replace(/^\s+/,"");return t.cut(i).split("|")},e.Pipeline.registerFunction(e.ta.stemmer,"stemmer-ta"),e.Pipeline.registerFunction(e.ta.stopWordFilter,"stopWordFilter-ta")}});
|
||||
1
assets/javascripts/lunr/min/lunr.te.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.te.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.te=function(){this.pipeline.reset(),this.pipeline.add(e.te.trimmer,e.te.stopWordFilter,e.te.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.te.stemmer))},e.te.wordCharacters="ఀ-ఄఅ-ఔక-హా-ౌౕ-ౖౘ-ౚౠ-ౡౢ-ౣ౦-౯౸-౿఼ఽ్ౝ౷",e.te.trimmer=e.trimmerSupport.generateTrimmer(e.te.wordCharacters),e.Pipeline.registerFunction(e.te.trimmer,"trimmer-te"),e.te.stopWordFilter=e.generateStopWordFilter("అందరూ అందుబాటులో అడగండి అడగడం అడ్డంగా అనుగుణంగా అనుమతించు అనుమతిస్తుంది అయితే ఇప్పటికే ఉన్నారు ఎక్కడైనా ఎప్పుడు ఎవరైనా ఎవరో ఏ ఏదైనా ఏమైనప్పటికి ఒక ఒకరు కనిపిస్తాయి కాదు కూడా గా గురించి చుట్టూ చేయగలిగింది తగిన తర్వాత దాదాపు దూరంగా నిజంగా పై ప్రకారం ప్రక్కన మధ్య మరియు మరొక మళ్ళీ మాత్రమే మెచ్చుకో వద్ద వెంట వేరుగా వ్యతిరేకంగా సంబంధం".split(" ")),e.te.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var t=e.wordcut;t.init(),e.te.tokenizer=function(r){if(!arguments.length||null==r||void 0==r)return[];if(Array.isArray(r))return r.map(function(t){return isLunr2?new e.Token(t.toLowerCase()):t.toLowerCase()});var i=r.toString().toLowerCase().replace(/^\s+/,"");return t.cut(i).split("|")},e.Pipeline.registerFunction(e.te.stemmer,"stemmer-te"),e.Pipeline.registerFunction(e.te.stopWordFilter,"stopWordFilter-te")}});
|
||||
1
assets/javascripts/lunr/min/lunr.th.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.th.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.th=function(){this.pipeline.reset(),this.pipeline.add(e.th.trimmer),r?this.tokenizer=e.th.tokenizer:(e.tokenizer&&(e.tokenizer=e.th.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.th.tokenizer))},e.th.wordCharacters="[-]",e.th.trimmer=e.trimmerSupport.generateTrimmer(e.th.wordCharacters),e.Pipeline.registerFunction(e.th.trimmer,"trimmer-th");var t=e.wordcut;t.init(),e.th.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t):t});var n=i.toString().replace(/^\s+/,"");return t.cut(n).split("|")}}});
|
||||
18
assets/javascripts/lunr/min/lunr.tr.min.js
vendored
Normal file
18
assets/javascripts/lunr/min/lunr.tr.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/javascripts/lunr/min/lunr.vi.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.vi.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.vi=function(){this.pipeline.reset(),this.pipeline.add(e.vi.stopWordFilter,e.vi.trimmer)},e.vi.wordCharacters="[A-Za-ẓ̀͐́͑̉̃̓ÂâÊêÔôĂ-ăĐ-đƠ-ơƯ-ư]",e.vi.trimmer=e.trimmerSupport.generateTrimmer(e.vi.wordCharacters),e.Pipeline.registerFunction(e.vi.trimmer,"trimmer-vi"),e.vi.stopWordFilter=e.generateStopWordFilter("là cái nhưng mà".split(" "))}});
|
||||
1
assets/javascripts/lunr/min/lunr.zh.min.js
vendored
Normal file
1
assets/javascripts/lunr/min/lunr.zh.min.js
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r(require("@node-rs/jieba")):r()(e.lunr)}(this,function(e){return function(r,t){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i="2"==r.version[0];r.zh=function(){this.pipeline.reset(),this.pipeline.add(r.zh.trimmer,r.zh.stopWordFilter,r.zh.stemmer),i?this.tokenizer=r.zh.tokenizer:(r.tokenizer&&(r.tokenizer=r.zh.tokenizer),this.tokenizerFn&&(this.tokenizerFn=r.zh.tokenizer))},r.zh.tokenizer=function(n){if(!arguments.length||null==n||void 0==n)return[];if(Array.isArray(n))return n.map(function(e){return i?new r.Token(e.toLowerCase()):e.toLowerCase()});t&&e.load(t);var o=n.toString().trim().toLowerCase(),s=[];e.cut(o,!0).forEach(function(e){s=s.concat(e.split(" "))}),s=s.filter(function(e){return!!e});var u=0;return s.map(function(e,t){if(i){var n=o.indexOf(e,u),s={};return s.position=[n,e.length],s.index=t,u=n,new r.Token(e,s)}return e})},r.zh.wordCharacters="\\w一-龥",r.zh.trimmer=r.trimmerSupport.generateTrimmer(r.zh.wordCharacters),r.Pipeline.registerFunction(r.zh.trimmer,"trimmer-zh"),r.zh.stemmer=function(){return function(e){return e}}(),r.Pipeline.registerFunction(r.zh.stemmer,"stemmer-zh"),r.zh.stopWordFilter=r.generateStopWordFilter("的 一 不 在 人 有 是 为 為 以 于 於 上 他 而 后 後 之 来 來 及 了 因 下 可 到 由 这 這 与 與 也 此 但 并 並 个 個 其 已 无 無 小 我 们 們 起 最 再 今 去 好 只 又 或 很 亦 某 把 那 你 乃 它 吧 被 比 别 趁 当 當 从 從 得 打 凡 儿 兒 尔 爾 该 該 各 给 給 跟 和 何 还 還 即 几 幾 既 看 据 據 距 靠 啦 另 么 麽 每 嘛 拿 哪 您 凭 憑 且 却 卻 让 讓 仍 啥 如 若 使 谁 誰 虽 雖 随 隨 同 所 她 哇 嗡 往 些 向 沿 哟 喲 用 咱 则 則 怎 曾 至 致 着 著 诸 諸 自".split(" ")),r.Pipeline.registerFunction(r.zh.stopWordFilter,"stopWordFilter-zh")}});
|
||||
206
assets/javascripts/lunr/tinyseg.js
Normal file
206
assets/javascripts/lunr/tinyseg.js
Normal file
|
|
@ -0,0 +1,206 @@
|
|||
/**
|
||||
* export the module via AMD, CommonJS or as a browser global
|
||||
* Export code from https://github.com/umdjs/umd/blob/master/returnExports.js
|
||||
*/
|
||||
;(function (root, factory) {
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// AMD. Register as an anonymous module.
|
||||
define(factory)
|
||||
} else if (typeof exports === 'object') {
|
||||
/**
|
||||
* Node. Does not work with strict CommonJS, but
|
||||
* only CommonJS-like environments that support module.exports,
|
||||
* like Node.
|
||||
*/
|
||||
module.exports = factory()
|
||||
} else {
|
||||
// Browser globals (root is window)
|
||||
factory()(root.lunr);
|
||||
}
|
||||
}(this, function () {
|
||||
/**
|
||||
* Just return a value to define the module export.
|
||||
* This example returns an object, but the module
|
||||
* can return a function as the exported value.
|
||||
*/
|
||||
|
||||
return function(lunr) {
|
||||
// TinySegmenter 0.1 -- Super compact Japanese tokenizer in Javascript
|
||||
// (c) 2008 Taku Kudo <taku@chasen.org>
|
||||
// TinySegmenter is freely distributable under the terms of a new BSD licence.
|
||||
// For details, see http://chasen.org/~taku/software/TinySegmenter/LICENCE.txt
|
||||
|
||||
function TinySegmenter() {
|
||||
var patterns = {
|
||||
"[一二三四五六七八九十百千万億兆]":"M",
|
||||
"[一-龠々〆ヵヶ]":"H",
|
||||
"[ぁ-ん]":"I",
|
||||
"[ァ-ヴーア-ン゙ー]":"K",
|
||||
"[a-zA-Za-zA-Z]":"A",
|
||||
"[0-90-9]":"N"
|
||||
}
|
||||
this.chartype_ = [];
|
||||
for (var i in patterns) {
|
||||
var regexp = new RegExp(i);
|
||||
this.chartype_.push([regexp, patterns[i]]);
|
||||
}
|
||||
|
||||
this.BIAS__ = -332
|
||||
this.BC1__ = {"HH":6,"II":2461,"KH":406,"OH":-1378};
|
||||
this.BC2__ = {"AA":-3267,"AI":2744,"AN":-878,"HH":-4070,"HM":-1711,"HN":4012,"HO":3761,"IA":1327,"IH":-1184,"II":-1332,"IK":1721,"IO":5492,"KI":3831,"KK":-8741,"MH":-3132,"MK":3334,"OO":-2920};
|
||||
this.BC3__ = {"HH":996,"HI":626,"HK":-721,"HN":-1307,"HO":-836,"IH":-301,"KK":2762,"MK":1079,"MM":4034,"OA":-1652,"OH":266};
|
||||
this.BP1__ = {"BB":295,"OB":304,"OO":-125,"UB":352};
|
||||
this.BP2__ = {"BO":60,"OO":-1762};
|
||||
this.BQ1__ = {"BHH":1150,"BHM":1521,"BII":-1158,"BIM":886,"BMH":1208,"BNH":449,"BOH":-91,"BOO":-2597,"OHI":451,"OIH":-296,"OKA":1851,"OKH":-1020,"OKK":904,"OOO":2965};
|
||||
this.BQ2__ = {"BHH":118,"BHI":-1159,"BHM":466,"BIH":-919,"BKK":-1720,"BKO":864,"OHH":-1139,"OHM":-181,"OIH":153,"UHI":-1146};
|
||||
this.BQ3__ = {"BHH":-792,"BHI":2664,"BII":-299,"BKI":419,"BMH":937,"BMM":8335,"BNN":998,"BOH":775,"OHH":2174,"OHM":439,"OII":280,"OKH":1798,"OKI":-793,"OKO":-2242,"OMH":-2402,"OOO":11699};
|
||||
this.BQ4__ = {"BHH":-3895,"BIH":3761,"BII":-4654,"BIK":1348,"BKK":-1806,"BMI":-3385,"BOO":-12396,"OAH":926,"OHH":266,"OHK":-2036,"ONN":-973};
|
||||
this.BW1__ = {",と":660,",同":727,"B1あ":1404,"B1同":542,"、と":660,"、同":727,"」と":1682,"あっ":1505,"いう":1743,"いっ":-2055,"いる":672,"うし":-4817,"うん":665,"から":3472,"がら":600,"こう":-790,"こと":2083,"こん":-1262,"さら":-4143,"さん":4573,"した":2641,"して":1104,"すで":-3399,"そこ":1977,"それ":-871,"たち":1122,"ため":601,"った":3463,"つい":-802,"てい":805,"てき":1249,"でき":1127,"です":3445,"では":844,"とい":-4915,"とみ":1922,"どこ":3887,"ない":5713,"なっ":3015,"など":7379,"なん":-1113,"にし":2468,"には":1498,"にも":1671,"に対":-912,"の一":-501,"の中":741,"ませ":2448,"まで":1711,"まま":2600,"まる":-2155,"やむ":-1947,"よっ":-2565,"れた":2369,"れで":-913,"をし":1860,"を見":731,"亡く":-1886,"京都":2558,"取り":-2784,"大き":-2604,"大阪":1497,"平方":-2314,"引き":-1336,"日本":-195,"本当":-2423,"毎日":-2113,"目指":-724,"B1あ":1404,"B1同":542,"」と":1682};
|
||||
this.BW2__ = {"..":-11822,"11":-669,"――":-5730,"−−":-13175,"いう":-1609,"うか":2490,"かし":-1350,"かも":-602,"から":-7194,"かれ":4612,"がい":853,"がら":-3198,"きた":1941,"くな":-1597,"こと":-8392,"この":-4193,"させ":4533,"され":13168,"さん":-3977,"しい":-1819,"しか":-545,"した":5078,"して":972,"しな":939,"その":-3744,"たい":-1253,"たた":-662,"ただ":-3857,"たち":-786,"たと":1224,"たは":-939,"った":4589,"って":1647,"っと":-2094,"てい":6144,"てき":3640,"てく":2551,"ては":-3110,"ても":-3065,"でい":2666,"でき":-1528,"でし":-3828,"です":-4761,"でも":-4203,"とい":1890,"とこ":-1746,"とと":-2279,"との":720,"とみ":5168,"とも":-3941,"ない":-2488,"なが":-1313,"など":-6509,"なの":2614,"なん":3099,"にお":-1615,"にし":2748,"にな":2454,"によ":-7236,"に対":-14943,"に従":-4688,"に関":-11388,"のか":2093,"ので":-7059,"のに":-6041,"のの":-6125,"はい":1073,"はが":-1033,"はず":-2532,"ばれ":1813,"まし":-1316,"まで":-6621,"まれ":5409,"めて":-3153,"もい":2230,"もの":-10713,"らか":-944,"らし":-1611,"らに":-1897,"りし":651,"りま":1620,"れた":4270,"れて":849,"れば":4114,"ろう":6067,"われ":7901,"を通":-11877,"んだ":728,"んな":-4115,"一人":602,"一方":-1375,"一日":970,"一部":-1051,"上が":-4479,"会社":-1116,"出て":2163,"分の":-7758,"同党":970,"同日":-913,"大阪":-2471,"委員":-1250,"少な":-1050,"年度":-8669,"年間":-1626,"府県":-2363,"手権":-1982,"新聞":-4066,"日新":-722,"日本":-7068,"日米":3372,"曜日":-601,"朝鮮":-2355,"本人":-2697,"東京":-1543,"然と":-1384,"社会":-1276,"立て":-990,"第に":-1612,"米国":-4268,"11":-669};
|
||||
this.BW3__ = {"あた":-2194,"あり":719,"ある":3846,"い.":-1185,"い。":-1185,"いい":5308,"いえ":2079,"いく":3029,"いた":2056,"いっ":1883,"いる":5600,"いわ":1527,"うち":1117,"うと":4798,"えと":1454,"か.":2857,"か。":2857,"かけ":-743,"かっ":-4098,"かに":-669,"から":6520,"かり":-2670,"が,":1816,"が、":1816,"がき":-4855,"がけ":-1127,"がっ":-913,"がら":-4977,"がり":-2064,"きた":1645,"けど":1374,"こと":7397,"この":1542,"ころ":-2757,"さい":-714,"さを":976,"し,":1557,"し、":1557,"しい":-3714,"した":3562,"して":1449,"しな":2608,"しま":1200,"す.":-1310,"す。":-1310,"する":6521,"ず,":3426,"ず、":3426,"ずに":841,"そう":428,"た.":8875,"た。":8875,"たい":-594,"たの":812,"たり":-1183,"たる":-853,"だ.":4098,"だ。":4098,"だっ":1004,"った":-4748,"って":300,"てい":6240,"てお":855,"ても":302,"です":1437,"でに":-1482,"では":2295,"とう":-1387,"とし":2266,"との":541,"とも":-3543,"どう":4664,"ない":1796,"なく":-903,"など":2135,"に,":-1021,"に、":-1021,"にし":1771,"にな":1906,"には":2644,"の,":-724,"の、":-724,"の子":-1000,"は,":1337,"は、":1337,"べき":2181,"まし":1113,"ます":6943,"まっ":-1549,"まで":6154,"まれ":-793,"らし":1479,"られ":6820,"るる":3818,"れ,":854,"れ、":854,"れた":1850,"れて":1375,"れば":-3246,"れる":1091,"われ":-605,"んだ":606,"んで":798,"カ月":990,"会議":860,"入り":1232,"大会":2217,"始め":1681,"市":965,"新聞":-5055,"日,":974,"日、":974,"社会":2024,"カ月":990};
|
||||
this.TC1__ = {"AAA":1093,"HHH":1029,"HHM":580,"HII":998,"HOH":-390,"HOM":-331,"IHI":1169,"IOH":-142,"IOI":-1015,"IOM":467,"MMH":187,"OOI":-1832};
|
||||
this.TC2__ = {"HHO":2088,"HII":-1023,"HMM":-1154,"IHI":-1965,"KKH":703,"OII":-2649};
|
||||
this.TC3__ = {"AAA":-294,"HHH":346,"HHI":-341,"HII":-1088,"HIK":731,"HOH":-1486,"IHH":128,"IHI":-3041,"IHO":-1935,"IIH":-825,"IIM":-1035,"IOI":-542,"KHH":-1216,"KKA":491,"KKH":-1217,"KOK":-1009,"MHH":-2694,"MHM":-457,"MHO":123,"MMH":-471,"NNH":-1689,"NNO":662,"OHO":-3393};
|
||||
this.TC4__ = {"HHH":-203,"HHI":1344,"HHK":365,"HHM":-122,"HHN":182,"HHO":669,"HIH":804,"HII":679,"HOH":446,"IHH":695,"IHO":-2324,"IIH":321,"III":1497,"IIO":656,"IOO":54,"KAK":4845,"KKA":3386,"KKK":3065,"MHH":-405,"MHI":201,"MMH":-241,"MMM":661,"MOM":841};
|
||||
this.TQ1__ = {"BHHH":-227,"BHHI":316,"BHIH":-132,"BIHH":60,"BIII":1595,"BNHH":-744,"BOHH":225,"BOOO":-908,"OAKK":482,"OHHH":281,"OHIH":249,"OIHI":200,"OIIH":-68};
|
||||
this.TQ2__ = {"BIHH":-1401,"BIII":-1033,"BKAK":-543,"BOOO":-5591};
|
||||
this.TQ3__ = {"BHHH":478,"BHHM":-1073,"BHIH":222,"BHII":-504,"BIIH":-116,"BIII":-105,"BMHI":-863,"BMHM":-464,"BOMH":620,"OHHH":346,"OHHI":1729,"OHII":997,"OHMH":481,"OIHH":623,"OIIH":1344,"OKAK":2792,"OKHH":587,"OKKA":679,"OOHH":110,"OOII":-685};
|
||||
this.TQ4__ = {"BHHH":-721,"BHHM":-3604,"BHII":-966,"BIIH":-607,"BIII":-2181,"OAAA":-2763,"OAKK":180,"OHHH":-294,"OHHI":2446,"OHHO":480,"OHIH":-1573,"OIHH":1935,"OIHI":-493,"OIIH":626,"OIII":-4007,"OKAK":-8156};
|
||||
this.TW1__ = {"につい":-4681,"東京都":2026};
|
||||
this.TW2__ = {"ある程":-2049,"いった":-1256,"ころが":-2434,"しょう":3873,"その後":-4430,"だって":-1049,"ていた":1833,"として":-4657,"ともに":-4517,"もので":1882,"一気に":-792,"初めて":-1512,"同時に":-8097,"大きな":-1255,"対して":-2721,"社会党":-3216};
|
||||
this.TW3__ = {"いただ":-1734,"してい":1314,"として":-4314,"につい":-5483,"にとっ":-5989,"に当た":-6247,"ので,":-727,"ので、":-727,"のもの":-600,"れから":-3752,"十二月":-2287};
|
||||
this.TW4__ = {"いう.":8576,"いう。":8576,"からな":-2348,"してい":2958,"たが,":1516,"たが、":1516,"ている":1538,"という":1349,"ました":5543,"ません":1097,"ようと":-4258,"よると":5865};
|
||||
this.UC1__ = {"A":484,"K":93,"M":645,"O":-505};
|
||||
this.UC2__ = {"A":819,"H":1059,"I":409,"M":3987,"N":5775,"O":646};
|
||||
this.UC3__ = {"A":-1370,"I":2311};
|
||||
this.UC4__ = {"A":-2643,"H":1809,"I":-1032,"K":-3450,"M":3565,"N":3876,"O":6646};
|
||||
this.UC5__ = {"H":313,"I":-1238,"K":-799,"M":539,"O":-831};
|
||||
this.UC6__ = {"H":-506,"I":-253,"K":87,"M":247,"O":-387};
|
||||
this.UP1__ = {"O":-214};
|
||||
this.UP2__ = {"B":69,"O":935};
|
||||
this.UP3__ = {"B":189};
|
||||
this.UQ1__ = {"BH":21,"BI":-12,"BK":-99,"BN":142,"BO":-56,"OH":-95,"OI":477,"OK":410,"OO":-2422};
|
||||
this.UQ2__ = {"BH":216,"BI":113,"OK":1759};
|
||||
this.UQ3__ = {"BA":-479,"BH":42,"BI":1913,"BK":-7198,"BM":3160,"BN":6427,"BO":14761,"OI":-827,"ON":-3212};
|
||||
this.UW1__ = {",":156,"、":156,"「":-463,"あ":-941,"う":-127,"が":-553,"き":121,"こ":505,"で":-201,"と":-547,"ど":-123,"に":-789,"の":-185,"は":-847,"も":-466,"や":-470,"よ":182,"ら":-292,"り":208,"れ":169,"を":-446,"ん":-137,"・":-135,"主":-402,"京":-268,"区":-912,"午":871,"国":-460,"大":561,"委":729,"市":-411,"日":-141,"理":361,"生":-408,"県":-386,"都":-718,"「":-463,"・":-135};
|
||||
this.UW2__ = {",":-829,"、":-829,"〇":892,"「":-645,"」":3145,"あ":-538,"い":505,"う":134,"お":-502,"か":1454,"が":-856,"く":-412,"こ":1141,"さ":878,"ざ":540,"し":1529,"す":-675,"せ":300,"そ":-1011,"た":188,"だ":1837,"つ":-949,"て":-291,"で":-268,"と":-981,"ど":1273,"な":1063,"に":-1764,"の":130,"は":-409,"ひ":-1273,"べ":1261,"ま":600,"も":-1263,"や":-402,"よ":1639,"り":-579,"る":-694,"れ":571,"を":-2516,"ん":2095,"ア":-587,"カ":306,"キ":568,"ッ":831,"三":-758,"不":-2150,"世":-302,"中":-968,"主":-861,"事":492,"人":-123,"会":978,"保":362,"入":548,"初":-3025,"副":-1566,"北":-3414,"区":-422,"大":-1769,"天":-865,"太":-483,"子":-1519,"学":760,"実":1023,"小":-2009,"市":-813,"年":-1060,"強":1067,"手":-1519,"揺":-1033,"政":1522,"文":-1355,"新":-1682,"日":-1815,"明":-1462,"最":-630,"朝":-1843,"本":-1650,"東":-931,"果":-665,"次":-2378,"民":-180,"気":-1740,"理":752,"発":529,"目":-1584,"相":-242,"県":-1165,"立":-763,"第":810,"米":509,"自":-1353,"行":838,"西":-744,"見":-3874,"調":1010,"議":1198,"込":3041,"開":1758,"間":-1257,"「":-645,"」":3145,"ッ":831,"ア":-587,"カ":306,"キ":568};
|
||||
this.UW3__ = {",":4889,"1":-800,"−":-1723,"、":4889,"々":-2311,"〇":5827,"」":2670,"〓":-3573,"あ":-2696,"い":1006,"う":2342,"え":1983,"お":-4864,"か":-1163,"が":3271,"く":1004,"け":388,"げ":401,"こ":-3552,"ご":-3116,"さ":-1058,"し":-395,"す":584,"せ":3685,"そ":-5228,"た":842,"ち":-521,"っ":-1444,"つ":-1081,"て":6167,"で":2318,"と":1691,"ど":-899,"な":-2788,"に":2745,"の":4056,"は":4555,"ひ":-2171,"ふ":-1798,"へ":1199,"ほ":-5516,"ま":-4384,"み":-120,"め":1205,"も":2323,"や":-788,"よ":-202,"ら":727,"り":649,"る":5905,"れ":2773,"わ":-1207,"を":6620,"ん":-518,"ア":551,"グ":1319,"ス":874,"ッ":-1350,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278,"・":-3794,"一":-1619,"下":-1759,"世":-2087,"両":3815,"中":653,"主":-758,"予":-1193,"二":974,"人":2742,"今":792,"他":1889,"以":-1368,"低":811,"何":4265,"作":-361,"保":-2439,"元":4858,"党":3593,"全":1574,"公":-3030,"六":755,"共":-1880,"円":5807,"再":3095,"分":457,"初":2475,"別":1129,"前":2286,"副":4437,"力":365,"動":-949,"務":-1872,"化":1327,"北":-1038,"区":4646,"千":-2309,"午":-783,"協":-1006,"口":483,"右":1233,"各":3588,"合":-241,"同":3906,"和":-837,"員":4513,"国":642,"型":1389,"場":1219,"外":-241,"妻":2016,"学":-1356,"安":-423,"実":-1008,"家":1078,"小":-513,"少":-3102,"州":1155,"市":3197,"平":-1804,"年":2416,"広":-1030,"府":1605,"度":1452,"建":-2352,"当":-3885,"得":1905,"思":-1291,"性":1822,"戸":-488,"指":-3973,"政":-2013,"教":-1479,"数":3222,"文":-1489,"新":1764,"日":2099,"旧":5792,"昨":-661,"時":-1248,"曜":-951,"最":-937,"月":4125,"期":360,"李":3094,"村":364,"東":-805,"核":5156,"森":2438,"業":484,"氏":2613,"民":-1694,"決":-1073,"法":1868,"海":-495,"無":979,"物":461,"特":-3850,"生":-273,"用":914,"町":1215,"的":7313,"直":-1835,"省":792,"県":6293,"知":-1528,"私":4231,"税":401,"立":-960,"第":1201,"米":7767,"系":3066,"約":3663,"級":1384,"統":-4229,"総":1163,"線":1255,"者":6457,"能":725,"自":-2869,"英":785,"見":1044,"調":-562,"財":-733,"費":1777,"車":1835,"軍":1375,"込":-1504,"通":-1136,"選":-681,"郎":1026,"郡":4404,"部":1200,"金":2163,"長":421,"開":-1432,"間":1302,"関":-1282,"雨":2009,"電":-1045,"非":2066,"駅":1620,"1":-800,"」":2670,"・":-3794,"ッ":-1350,"ア":551,"グ":1319,"ス":874,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278};
|
||||
this.UW4__ = {",":3930,".":3508,"―":-4841,"、":3930,"。":3508,"〇":4999,"「":1895,"」":3798,"〓":-5156,"あ":4752,"い":-3435,"う":-640,"え":-2514,"お":2405,"か":530,"が":6006,"き":-4482,"ぎ":-3821,"く":-3788,"け":-4376,"げ":-4734,"こ":2255,"ご":1979,"さ":2864,"し":-843,"じ":-2506,"す":-731,"ず":1251,"せ":181,"そ":4091,"た":5034,"だ":5408,"ち":-3654,"っ":-5882,"つ":-1659,"て":3994,"で":7410,"と":4547,"な":5433,"に":6499,"ぬ":1853,"ね":1413,"の":7396,"は":8578,"ば":1940,"ひ":4249,"び":-4134,"ふ":1345,"へ":6665,"べ":-744,"ほ":1464,"ま":1051,"み":-2082,"む":-882,"め":-5046,"も":4169,"ゃ":-2666,"や":2795,"ょ":-1544,"よ":3351,"ら":-2922,"り":-9726,"る":-14896,"れ":-2613,"ろ":-4570,"わ":-1783,"を":13150,"ん":-2352,"カ":2145,"コ":1789,"セ":1287,"ッ":-724,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637,"・":-4371,"ー":-11870,"一":-2069,"中":2210,"予":782,"事":-190,"井":-1768,"人":1036,"以":544,"会":950,"体":-1286,"作":530,"側":4292,"先":601,"党":-2006,"共":-1212,"内":584,"円":788,"初":1347,"前":1623,"副":3879,"力":-302,"動":-740,"務":-2715,"化":776,"区":4517,"協":1013,"参":1555,"合":-1834,"和":-681,"員":-910,"器":-851,"回":1500,"国":-619,"園":-1200,"地":866,"場":-1410,"塁":-2094,"士":-1413,"多":1067,"大":571,"子":-4802,"学":-1397,"定":-1057,"寺":-809,"小":1910,"屋":-1328,"山":-1500,"島":-2056,"川":-2667,"市":2771,"年":374,"庁":-4556,"後":456,"性":553,"感":916,"所":-1566,"支":856,"改":787,"政":2182,"教":704,"文":522,"方":-856,"日":1798,"時":1829,"最":845,"月":-9066,"木":-485,"来":-442,"校":-360,"業":-1043,"氏":5388,"民":-2716,"気":-910,"沢":-939,"済":-543,"物":-735,"率":672,"球":-1267,"生":-1286,"産":-1101,"田":-2900,"町":1826,"的":2586,"目":922,"省":-3485,"県":2997,"空":-867,"立":-2112,"第":788,"米":2937,"系":786,"約":2171,"経":1146,"統":-1169,"総":940,"線":-994,"署":749,"者":2145,"能":-730,"般":-852,"行":-792,"規":792,"警":-1184,"議":-244,"谷":-1000,"賞":730,"車":-1481,"軍":1158,"輪":-1433,"込":-3370,"近":929,"道":-1291,"選":2596,"郎":-4866,"都":1192,"野":-1100,"銀":-2213,"長":357,"間":-2344,"院":-2297,"際":-2604,"電":-878,"領":-1659,"題":-792,"館":-1984,"首":1749,"高":2120,"「":1895,"」":3798,"・":-4371,"ッ":-724,"ー":-11870,"カ":2145,"コ":1789,"セ":1287,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637};
|
||||
this.UW5__ = {",":465,".":-299,"1":-514,"E2":-32768,"]":-2762,"、":465,"。":-299,"「":363,"あ":1655,"い":331,"う":-503,"え":1199,"お":527,"か":647,"が":-421,"き":1624,"ぎ":1971,"く":312,"げ":-983,"さ":-1537,"し":-1371,"す":-852,"だ":-1186,"ち":1093,"っ":52,"つ":921,"て":-18,"で":-850,"と":-127,"ど":1682,"な":-787,"に":-1224,"の":-635,"は":-578,"べ":1001,"み":502,"め":865,"ゃ":3350,"ょ":854,"り":-208,"る":429,"れ":504,"わ":419,"を":-1264,"ん":327,"イ":241,"ル":451,"ン":-343,"中":-871,"京":722,"会":-1153,"党":-654,"務":3519,"区":-901,"告":848,"員":2104,"大":-1296,"学":-548,"定":1785,"嵐":-1304,"市":-2991,"席":921,"年":1763,"思":872,"所":-814,"挙":1618,"新":-1682,"日":218,"月":-4353,"査":932,"格":1356,"機":-1508,"氏":-1347,"田":240,"町":-3912,"的":-3149,"相":1319,"省":-1052,"県":-4003,"研":-997,"社":-278,"空":-813,"統":1955,"者":-2233,"表":663,"語":-1073,"議":1219,"選":-1018,"郎":-368,"長":786,"間":1191,"題":2368,"館":-689,"1":-514,"E2":-32768,"「":363,"イ":241,"ル":451,"ン":-343};
|
||||
this.UW6__ = {",":227,".":808,"1":-270,"E1":306,"、":227,"。":808,"あ":-307,"う":189,"か":241,"が":-73,"く":-121,"こ":-200,"じ":1782,"す":383,"た":-428,"っ":573,"て":-1014,"で":101,"と":-105,"な":-253,"に":-149,"の":-417,"は":-236,"も":-206,"り":187,"る":-135,"を":195,"ル":-673,"ン":-496,"一":-277,"中":201,"件":-800,"会":624,"前":302,"区":1792,"員":-1212,"委":798,"学":-960,"市":887,"広":-695,"後":535,"業":-697,"相":753,"社":-507,"福":974,"空":-822,"者":1811,"連":463,"郎":1082,"1":-270,"E1":306,"ル":-673,"ン":-496};
|
||||
|
||||
return this;
|
||||
}
|
||||
TinySegmenter.prototype.ctype_ = function(str) {
|
||||
for (var i in this.chartype_) {
|
||||
if (str.match(this.chartype_[i][0])) {
|
||||
return this.chartype_[i][1];
|
||||
}
|
||||
}
|
||||
return "O";
|
||||
}
|
||||
|
||||
TinySegmenter.prototype.ts_ = function(v) {
|
||||
if (v) { return v; }
|
||||
return 0;
|
||||
}
|
||||
|
||||
TinySegmenter.prototype.segment = function(input) {
|
||||
if (input == null || input == undefined || input == "") {
|
||||
return [];
|
||||
}
|
||||
var result = [];
|
||||
var seg = ["B3","B2","B1"];
|
||||
var ctype = ["O","O","O"];
|
||||
var o = input.split("");
|
||||
for (i = 0; i < o.length; ++i) {
|
||||
seg.push(o[i]);
|
||||
ctype.push(this.ctype_(o[i]))
|
||||
}
|
||||
seg.push("E1");
|
||||
seg.push("E2");
|
||||
seg.push("E3");
|
||||
ctype.push("O");
|
||||
ctype.push("O");
|
||||
ctype.push("O");
|
||||
var word = seg[3];
|
||||
var p1 = "U";
|
||||
var p2 = "U";
|
||||
var p3 = "U";
|
||||
for (var i = 4; i < seg.length - 3; ++i) {
|
||||
var score = this.BIAS__;
|
||||
var w1 = seg[i-3];
|
||||
var w2 = seg[i-2];
|
||||
var w3 = seg[i-1];
|
||||
var w4 = seg[i];
|
||||
var w5 = seg[i+1];
|
||||
var w6 = seg[i+2];
|
||||
var c1 = ctype[i-3];
|
||||
var c2 = ctype[i-2];
|
||||
var c3 = ctype[i-1];
|
||||
var c4 = ctype[i];
|
||||
var c5 = ctype[i+1];
|
||||
var c6 = ctype[i+2];
|
||||
score += this.ts_(this.UP1__[p1]);
|
||||
score += this.ts_(this.UP2__[p2]);
|
||||
score += this.ts_(this.UP3__[p3]);
|
||||
score += this.ts_(this.BP1__[p1 + p2]);
|
||||
score += this.ts_(this.BP2__[p2 + p3]);
|
||||
score += this.ts_(this.UW1__[w1]);
|
||||
score += this.ts_(this.UW2__[w2]);
|
||||
score += this.ts_(this.UW3__[w3]);
|
||||
score += this.ts_(this.UW4__[w4]);
|
||||
score += this.ts_(this.UW5__[w5]);
|
||||
score += this.ts_(this.UW6__[w6]);
|
||||
score += this.ts_(this.BW1__[w2 + w3]);
|
||||
score += this.ts_(this.BW2__[w3 + w4]);
|
||||
score += this.ts_(this.BW3__[w4 + w5]);
|
||||
score += this.ts_(this.TW1__[w1 + w2 + w3]);
|
||||
score += this.ts_(this.TW2__[w2 + w3 + w4]);
|
||||
score += this.ts_(this.TW3__[w3 + w4 + w5]);
|
||||
score += this.ts_(this.TW4__[w4 + w5 + w6]);
|
||||
score += this.ts_(this.UC1__[c1]);
|
||||
score += this.ts_(this.UC2__[c2]);
|
||||
score += this.ts_(this.UC3__[c3]);
|
||||
score += this.ts_(this.UC4__[c4]);
|
||||
score += this.ts_(this.UC5__[c5]);
|
||||
score += this.ts_(this.UC6__[c6]);
|
||||
score += this.ts_(this.BC1__[c2 + c3]);
|
||||
score += this.ts_(this.BC2__[c3 + c4]);
|
||||
score += this.ts_(this.BC3__[c4 + c5]);
|
||||
score += this.ts_(this.TC1__[c1 + c2 + c3]);
|
||||
score += this.ts_(this.TC2__[c2 + c3 + c4]);
|
||||
score += this.ts_(this.TC3__[c3 + c4 + c5]);
|
||||
score += this.ts_(this.TC4__[c4 + c5 + c6]);
|
||||
// score += this.ts_(this.TC5__[c4 + c5 + c6]);
|
||||
score += this.ts_(this.UQ1__[p1 + c1]);
|
||||
score += this.ts_(this.UQ2__[p2 + c2]);
|
||||
score += this.ts_(this.UQ3__[p3 + c3]);
|
||||
score += this.ts_(this.BQ1__[p2 + c2 + c3]);
|
||||
score += this.ts_(this.BQ2__[p2 + c3 + c4]);
|
||||
score += this.ts_(this.BQ3__[p3 + c2 + c3]);
|
||||
score += this.ts_(this.BQ4__[p3 + c3 + c4]);
|
||||
score += this.ts_(this.TQ1__[p2 + c1 + c2 + c3]);
|
||||
score += this.ts_(this.TQ2__[p2 + c2 + c3 + c4]);
|
||||
score += this.ts_(this.TQ3__[p3 + c1 + c2 + c3]);
|
||||
score += this.ts_(this.TQ4__[p3 + c2 + c3 + c4]);
|
||||
var p = "O";
|
||||
if (score > 0) {
|
||||
result.push(word);
|
||||
word = "";
|
||||
p = "B";
|
||||
}
|
||||
p1 = p2;
|
||||
p2 = p3;
|
||||
p3 = p;
|
||||
word += seg[i];
|
||||
}
|
||||
result.push(word);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
lunr.TinySegmenter = TinySegmenter;
|
||||
};
|
||||
|
||||
}));
|
||||
6708
assets/javascripts/lunr/wordcut.js
Normal file
6708
assets/javascripts/lunr/wordcut.js
Normal file
File diff suppressed because one or more lines are too long
42
assets/javascripts/workers/search.2c215733.min.js
vendored
Normal file
42
assets/javascripts/workers/search.2c215733.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
7
assets/javascripts/workers/search.2c215733.min.js.map
Normal file
7
assets/javascripts/workers/search.2c215733.min.js.map
Normal file
File diff suppressed because one or more lines are too long
1
assets/stylesheets/main.484c7ddc.min.css
vendored
Normal file
1
assets/stylesheets/main.484c7ddc.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/stylesheets/main.484c7ddc.min.css.map
Normal file
1
assets/stylesheets/main.484c7ddc.min.css.map
Normal file
File diff suppressed because one or more lines are too long
1
assets/stylesheets/palette.ab4e12ef.min.css
vendored
Normal file
1
assets/stylesheets/palette.ab4e12ef.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
assets/stylesheets/palette.ab4e12ef.min.css.map
Normal file
1
assets/stylesheets/palette.ab4e12ef.min.css.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"sources":["src/templates/assets/stylesheets/palette/_scheme.scss","../../../../src/templates/assets/stylesheets/palette.scss","src/templates/assets/stylesheets/palette/_accent.scss","src/templates/assets/stylesheets/palette/_primary.scss","src/templates/assets/stylesheets/utilities/_break.scss"],"names":[],"mappings":"AA2BA,cAGE,6BAME,sDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CACA,mDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CAGA,mDAAA,CACA,gDAAA,CACA,yDAAA,CACA,4DAAA,CAGA,0BAAA,CACA,mCAAA,CAGA,iCAAA,CACA,kCAAA,CACA,mCAAA,CACA,mCAAA,CACA,kCAAA,CACA,iCAAA,CACA,+CAAA,CACA,6DAAA,CACA,gEAAA,CACA,4DAAA,CACA,4DAAA,CACA,6DAAA,CAGA,6CAAA,CAGA,+CAAA,CAGA,uDAAA,CACA,6DAAA,CACA,2DAAA,CAGA,iCAAA,CAGA,yDAAA,CACA,iEAAA,CAGA,mDAAA,CACA,mDAAA,CAGA,qDAAA,CACA,uDAAA,CAGA,8DAAA,CAKA,8DAAA,CAKA,0DAAA,CAzEA,iBCiBF,CD6DE,kHAEE,YC3DJ,CDkFE,yDACE,4BChFJ,CD+EE,2DACE,4BC7EJ,CD4EE,gEACE,4BC1EJ,CDyEE,2DACE,4BCvEJ,CDsEE,yDACE,4BCpEJ,CDmEE,0DACE,4BCjEJ,CDgEE,gEACE,4BC9DJ,CD6DE,0DACE,4BC3DJ,CD0DE,2OACE,4BC/CJ,CDsDA,+FAGE,iCCpDF,CACF,CCjDE,2BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD6CN,CCvDE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDoDN,CC9DE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD2DN,CCrEE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDkEN,CC5EE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDyEN,CCnFE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDgFN,CC1FE,kCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDuFN,CCjGE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD8FN,CCxGE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDqGN,CC/GE,6BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD4GN,CCtHE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDmHN,CC7HE,4BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCD6HN,CCpIE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDoIN,CC3IE,6BACE,yBAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCD2IN,CClJE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDkJN,CCzJE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDsJN,CE3JE,4BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwJN,CEnKE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgKN,CE3KE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwKN,CEnLE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgLN,CE3LE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwLN,CEnME,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgMN,CE3ME,mCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwMN,CEnNE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgNN,CE3NE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwNN,CEnOE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgON,CE3OE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwON,CEnPE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFmPN,CE3PE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCF2PN,CEnQE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFmQN,CE3QE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCF2QN,CEnRE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFgRN,CE3RE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFwRN,CEnSE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BF4RN,CE5SE,kCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BFqSN,CEtRE,sEACE,4BFyRJ,CE1RE,+DACE,4BF6RJ,CE9RE,iEACE,4BFiSJ,CElSE,gEACE,4BFqSJ,CEtSE,iEACE,4BFySJ,CEhSA,8BACE,mDAAA,CACA,4DAAA,CACA,0DAAA,CACA,oDAAA,CACA,2DAAA,CAGA,4BFiSF,CE9RE,yCACE,+BFgSJ,CE7RI,kDAEE,0CAAA,CACA,sCAAA,CAFA,mCFiSN,CG7MI,mCD1EA,+CACE,8CF0RJ,CEvRI,qDACE,8CFyRN,CEpRE,iEACE,mCFsRJ,CACF,CGxNI,sCDvDA,uCACE,oCFkRJ,CACF,CEzQA,8BACE,kDAAA,CACA,4DAAA,CACA,wDAAA,CACA,oDAAA,CACA,6DAAA,CAGA,4BF0QF,CEvQE,yCACE,+BFyQJ,CEtQI,kDAEE,0CAAA,CACA,sCAAA,CAFA,mCF0QN,CEnQE,yCACE,6CFqQJ,CG9NI,0CDhCA,8CACE,gDFiQJ,CACF,CGnOI,0CDvBA,iFACE,6CF6PJ,CACF,CG3PI,sCDKA,uCACE,6CFyPJ,CACF","file":"palette.css"}
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
//go:build gonum
|
||||
|
||||
package poindexter
|
||||
|
||||
import "testing"
|
||||
|
||||
// 100k-size benchmarks run only in the gonum-tag job to keep CI time reasonable.
|
||||
|
||||
func BenchmarkNearest_Linear_Uniform_100k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 100_000, 2, BackendLinear, true, 0)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Uniform_100k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 100_000, 2, BackendGonum, true, 0)
|
||||
}
|
||||
|
||||
func BenchmarkNearest_Linear_Uniform_100k_4D(b *testing.B) {
|
||||
benchNearestBackend(b, 100_000, 4, BackendLinear, true, 0)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Uniform_100k_4D(b *testing.B) {
|
||||
benchNearestBackend(b, 100_000, 4, BackendGonum, true, 0)
|
||||
}
|
||||
|
||||
func BenchmarkNearest_Linear_Clustered_100k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 100_000, 2, BackendLinear, false, 3)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Clustered_100k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 100_000, 2, BackendGonum, false, 3)
|
||||
}
|
||||
|
||||
func BenchmarkNearest_Linear_Clustered_100k_4D(b *testing.B) {
|
||||
benchNearestBackend(b, 100_000, 4, BackendLinear, false, 3)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Clustered_100k_4D(b *testing.B) {
|
||||
benchNearestBackend(b, 100_000, 4, BackendGonum, false, 3)
|
||||
}
|
||||
|
|
@ -1,180 +0,0 @@
|
|||
package poindexter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// deterministicRand returns a rand.Rand with a fixed seed for reproducible datasets.
|
||||
func deterministicRand() *rand.Rand { return rand.New(rand.NewSource(42)) }
|
||||
|
||||
func makeUniformPoints(n, dim int) []KDPoint[int] {
|
||||
r := deterministicRand()
|
||||
pts := make([]KDPoint[int], n)
|
||||
for i := 0; i < n; i++ {
|
||||
coords := make([]float64, dim)
|
||||
for d := 0; d < dim; d++ {
|
||||
coords[d] = r.Float64()
|
||||
}
|
||||
pts[i] = KDPoint[int]{ID: fmt.Sprint(i), Coords: coords, Value: i}
|
||||
}
|
||||
return pts
|
||||
}
|
||||
|
||||
// makeClusteredPoints creates n points around c clusters with small variance.
|
||||
func makeClusteredPoints(n, dim, c int) []KDPoint[int] {
|
||||
if c <= 0 {
|
||||
c = 1
|
||||
}
|
||||
r := deterministicRand()
|
||||
centers := make([][]float64, c)
|
||||
for i := 0; i < c; i++ {
|
||||
centers[i] = make([]float64, dim)
|
||||
for d := 0; d < dim; d++ {
|
||||
centers[i][d] = r.Float64()
|
||||
}
|
||||
}
|
||||
pts := make([]KDPoint[int], n)
|
||||
for i := 0; i < n; i++ {
|
||||
coords := make([]float64, dim)
|
||||
cent := centers[r.Intn(c)]
|
||||
for d := 0; d < dim; d++ {
|
||||
// small gaussian noise around center (Box-Muller)
|
||||
u1 := r.Float64()
|
||||
u2 := r.Float64()
|
||||
z := (rand.NormFloat64()) // uses global; fine for test speed
|
||||
_ = u1
|
||||
_ = u2
|
||||
coords[d] = cent[d] + 0.03*z
|
||||
if coords[d] < 0 {
|
||||
coords[d] = 0
|
||||
} else if coords[d] > 1 {
|
||||
coords[d] = 1
|
||||
}
|
||||
}
|
||||
pts[i] = KDPoint[int]{ID: fmt.Sprint(i), Coords: coords, Value: i}
|
||||
}
|
||||
return pts
|
||||
}
|
||||
|
||||
func benchNearestBackend(b *testing.B, n, dim int, backend KDBackend, uniform bool, clusters int) {
|
||||
var pts []KDPoint[int]
|
||||
if uniform {
|
||||
pts = makeUniformPoints(n, dim)
|
||||
} else {
|
||||
pts = makeClusteredPoints(n, dim, clusters)
|
||||
}
|
||||
tr, _ := NewKDTree(pts, WithBackend(backend))
|
||||
q := make([]float64, dim)
|
||||
for i := range q {
|
||||
q[i] = 0.5
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _, _ = tr.Nearest(q)
|
||||
}
|
||||
}
|
||||
|
||||
func benchKNNBackend(b *testing.B, n, dim, k int, backend KDBackend, uniform bool, clusters int) {
|
||||
var pts []KDPoint[int]
|
||||
if uniform {
|
||||
pts = makeUniformPoints(n, dim)
|
||||
} else {
|
||||
pts = makeClusteredPoints(n, dim, clusters)
|
||||
}
|
||||
tr, _ := NewKDTree(pts, WithBackend(backend))
|
||||
q := make([]float64, dim)
|
||||
for i := range q {
|
||||
q[i] = 0.5
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = tr.KNearest(q, k)
|
||||
}
|
||||
}
|
||||
|
||||
func benchRadiusBackend(b *testing.B, n, dim int, r float64, backend KDBackend, uniform bool, clusters int) {
|
||||
var pts []KDPoint[int]
|
||||
if uniform {
|
||||
pts = makeUniformPoints(n, dim)
|
||||
} else {
|
||||
pts = makeClusteredPoints(n, dim, clusters)
|
||||
}
|
||||
tr, _ := NewKDTree(pts, WithBackend(backend))
|
||||
q := make([]float64, dim)
|
||||
for i := range q {
|
||||
q[i] = 0.5
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = tr.Radius(q, r)
|
||||
}
|
||||
}
|
||||
|
||||
// Uniform 2D/4D, Linear vs Gonum (opt-in via build tag; falls back to linear if not available)
|
||||
func BenchmarkNearest_Linear_Uniform_1k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 1_000, 2, BackendLinear, true, 0)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Uniform_1k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 1_000, 2, BackendGonum, true, 0)
|
||||
}
|
||||
func BenchmarkNearest_Linear_Uniform_10k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 10_000, 2, BackendLinear, true, 0)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Uniform_10k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 10_000, 2, BackendGonum, true, 0)
|
||||
}
|
||||
|
||||
func BenchmarkNearest_Linear_Uniform_1k_4D(b *testing.B) {
|
||||
benchNearestBackend(b, 1_000, 4, BackendLinear, true, 0)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Uniform_1k_4D(b *testing.B) {
|
||||
benchNearestBackend(b, 1_000, 4, BackendGonum, true, 0)
|
||||
}
|
||||
func BenchmarkNearest_Linear_Uniform_10k_4D(b *testing.B) {
|
||||
benchNearestBackend(b, 10_000, 4, BackendLinear, true, 0)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Uniform_10k_4D(b *testing.B) {
|
||||
benchNearestBackend(b, 10_000, 4, BackendGonum, true, 0)
|
||||
}
|
||||
|
||||
// Clustered 2D/4D (3 clusters)
|
||||
func BenchmarkNearest_Linear_Clustered_1k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 1_000, 2, BackendLinear, false, 3)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Clustered_1k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 1_000, 2, BackendGonum, false, 3)
|
||||
}
|
||||
func BenchmarkNearest_Linear_Clustered_10k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 10_000, 2, BackendLinear, false, 3)
|
||||
}
|
||||
func BenchmarkNearest_Gonum_Clustered_10k_2D(b *testing.B) {
|
||||
benchNearestBackend(b, 10_000, 2, BackendGonum, false, 3)
|
||||
}
|
||||
|
||||
func BenchmarkKNN10_Linear_Uniform_10k_2D(b *testing.B) {
|
||||
benchKNNBackend(b, 10_000, 2, 10, BackendLinear, true, 0)
|
||||
}
|
||||
func BenchmarkKNN10_Gonum_Uniform_10k_2D(b *testing.B) {
|
||||
benchKNNBackend(b, 10_000, 2, 10, BackendGonum, true, 0)
|
||||
}
|
||||
func BenchmarkKNN10_Linear_Clustered_10k_2D(b *testing.B) {
|
||||
benchKNNBackend(b, 10_000, 2, 10, BackendLinear, false, 3)
|
||||
}
|
||||
func BenchmarkKNN10_Gonum_Clustered_10k_2D(b *testing.B) {
|
||||
benchKNNBackend(b, 10_000, 2, 10, BackendGonum, false, 3)
|
||||
}
|
||||
|
||||
func BenchmarkRadiusMid_Linear_Uniform_10k_2D(b *testing.B) {
|
||||
benchRadiusBackend(b, 10_000, 2, 0.5, BackendLinear, true, 0)
|
||||
}
|
||||
func BenchmarkRadiusMid_Gonum_Uniform_10k_2D(b *testing.B) {
|
||||
benchRadiusBackend(b, 10_000, 2, 0.5, BackendGonum, true, 0)
|
||||
}
|
||||
func BenchmarkRadiusMid_Linear_Clustered_10k_2D(b *testing.B) {
|
||||
benchRadiusBackend(b, 10_000, 2, 0.5, BackendLinear, false, 3)
|
||||
}
|
||||
func BenchmarkRadiusMid_Gonum_Clustered_10k_2D(b *testing.B) {
|
||||
benchRadiusBackend(b, 10_000, 2, 0.5, BackendGonum, false, 3)
|
||||
}
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
package poindexter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func makePoints(n, dim int) []KDPoint[int] {
|
||||
pts := make([]KDPoint[int], n)
|
||||
for i := 0; i < n; i++ {
|
||||
coords := make([]float64, dim)
|
||||
for d := 0; d < dim; d++ {
|
||||
coords[d] = rand.Float64()
|
||||
}
|
||||
pts[i] = KDPoint[int]{ID: fmt.Sprint(i), Coords: coords, Value: i}
|
||||
}
|
||||
return pts
|
||||
}
|
||||
|
||||
func benchNearest(b *testing.B, n, dim int) {
|
||||
pts := makePoints(n, dim)
|
||||
tr, _ := NewKDTree(pts)
|
||||
q := make([]float64, dim)
|
||||
for i := range q {
|
||||
q[i] = 0.5
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _, _ = tr.Nearest(q)
|
||||
}
|
||||
}
|
||||
|
||||
func benchKNearest(b *testing.B, n, dim, k int) {
|
||||
pts := makePoints(n, dim)
|
||||
tr, _ := NewKDTree(pts)
|
||||
q := make([]float64, dim)
|
||||
for i := range q {
|
||||
q[i] = 0.5
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = tr.KNearest(q, k)
|
||||
}
|
||||
}
|
||||
|
||||
func benchRadius(b *testing.B, n, dim int, r float64) {
|
||||
pts := makePoints(n, dim)
|
||||
tr, _ := NewKDTree(pts)
|
||||
q := make([]float64, dim)
|
||||
for i := range q {
|
||||
q[i] = 0.5
|
||||
}
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _ = tr.Radius(q, r)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNearest_1k_2D(b *testing.B) { benchNearest(b, 1_000, 2) }
|
||||
func BenchmarkNearest_10k_2D(b *testing.B) { benchNearest(b, 10_000, 2) }
|
||||
func BenchmarkNearest_1k_4D(b *testing.B) { benchNearest(b, 1_000, 4) }
|
||||
func BenchmarkNearest_10k_4D(b *testing.B) { benchNearest(b, 10_000, 4) }
|
||||
|
||||
func BenchmarkKNearest10_1k_2D(b *testing.B) { benchKNearest(b, 1_000, 2, 10) }
|
||||
func BenchmarkKNearest10_10k_2D(b *testing.B) { benchKNearest(b, 10_000, 2, 10) }
|
||||
|
||||
func BenchmarkRadiusMid_1k_2D(b *testing.B) { benchRadius(b, 1_000, 2, 0.5) }
|
||||
func BenchmarkRadiusMid_10k_2D(b *testing.B) { benchRadius(b, 10_000, 2, 0.5) }
|
||||
1057
dht-best-ping/index.html
Normal file
1057
dht-best-ping/index.html
Normal file
File diff suppressed because it is too large
Load diff
1006
dns_tools.go
1006
dns_tools.go
File diff suppressed because it is too large
Load diff
|
|
@ -1,732 +0,0 @@
|
|||
package poindexter
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// ============================================================================
|
||||
// External Tool Links Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestGetExternalToolLinks(t *testing.T) {
|
||||
links := GetExternalToolLinks("example.com")
|
||||
|
||||
if links.Target != "example.com" {
|
||||
t.Errorf("expected target=example.com, got %s", links.Target)
|
||||
}
|
||||
if links.Type != "domain" {
|
||||
t.Errorf("expected type=domain, got %s", links.Type)
|
||||
}
|
||||
|
||||
// Check MXToolbox links
|
||||
if !strings.Contains(links.MXToolboxDNS, "mxtoolbox.com") {
|
||||
t.Error("MXToolboxDNS should contain mxtoolbox.com")
|
||||
}
|
||||
if !strings.Contains(links.MXToolboxDNS, "example.com") {
|
||||
t.Error("MXToolboxDNS should contain the domain")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.MXToolboxMX, "mxtoolbox.com") {
|
||||
t.Error("MXToolboxMX should contain mxtoolbox.com")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.MXToolboxSPF, "spf") {
|
||||
t.Error("MXToolboxSPF should contain 'spf'")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.MXToolboxDMARC, "dmarc") {
|
||||
t.Error("MXToolboxDMARC should contain 'dmarc'")
|
||||
}
|
||||
|
||||
// Check DNSChecker links
|
||||
if !strings.Contains(links.DNSCheckerDNS, "dnschecker.org") {
|
||||
t.Error("DNSCheckerDNS should contain dnschecker.org")
|
||||
}
|
||||
|
||||
// Check other tools
|
||||
if !strings.Contains(links.WhoIs, "who.is") {
|
||||
t.Error("WhoIs should contain who.is")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.SSLLabs, "ssllabs.com") {
|
||||
t.Error("SSLLabs should contain ssllabs.com")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.VirusTotal, "virustotal.com") {
|
||||
t.Error("VirusTotal should contain virustotal.com")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExternalToolLinksIP(t *testing.T) {
|
||||
links := GetExternalToolLinksIP("8.8.8.8")
|
||||
|
||||
if links.Target != "8.8.8.8" {
|
||||
t.Errorf("expected target=8.8.8.8, got %s", links.Target)
|
||||
}
|
||||
if links.Type != "ip" {
|
||||
t.Errorf("expected type=ip, got %s", links.Type)
|
||||
}
|
||||
|
||||
// Check IP-specific links
|
||||
if !strings.Contains(links.IPInfo, "ipinfo.io") {
|
||||
t.Error("IPInfo should contain ipinfo.io")
|
||||
}
|
||||
if !strings.Contains(links.IPInfo, "8.8.8.8") {
|
||||
t.Error("IPInfo should contain the IP address")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.AbuseIPDB, "abuseipdb.com") {
|
||||
t.Error("AbuseIPDB should contain abuseipdb.com")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.Shodan, "shodan.io") {
|
||||
t.Error("Shodan should contain shodan.io")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.MXToolboxBlacklist, "blacklist") {
|
||||
t.Error("MXToolboxBlacklist should contain 'blacklist'")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExternalToolLinksEmail(t *testing.T) {
|
||||
// Test with email address
|
||||
links := GetExternalToolLinksEmail("test@example.com")
|
||||
|
||||
if links.Target != "test@example.com" {
|
||||
t.Errorf("expected target=test@example.com, got %s", links.Target)
|
||||
}
|
||||
if links.Type != "email" {
|
||||
t.Errorf("expected type=email, got %s", links.Type)
|
||||
}
|
||||
|
||||
// Email tools should use the domain
|
||||
if !strings.Contains(links.MXToolboxMX, "example.com") {
|
||||
t.Error("MXToolboxMX should contain the domain from email")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.MXToolboxSPF, "spf") {
|
||||
t.Error("MXToolboxSPF should contain 'spf'")
|
||||
}
|
||||
|
||||
if !strings.Contains(links.MXToolboxDMARC, "dmarc") {
|
||||
t.Error("MXToolboxDMARC should contain 'dmarc'")
|
||||
}
|
||||
|
||||
// Test with just domain
|
||||
links2 := GetExternalToolLinksEmail("example.org")
|
||||
if links2.Target != "example.org" {
|
||||
t.Errorf("expected target=example.org, got %s", links2.Target)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetExternalToolLinksSpecialChars(t *testing.T) {
|
||||
// Test URL encoding
|
||||
links := GetExternalToolLinks("test-domain.example.com")
|
||||
|
||||
if !strings.Contains(links.MXToolboxDNS, "test-domain.example.com") {
|
||||
t.Error("Should handle hyphens in domain")
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// DNS Lookup Tests (Unit tests for structure, not network)
|
||||
// ============================================================================
|
||||
|
||||
func TestDNSRecordTypes(t *testing.T) {
|
||||
types := []DNSRecordType{
|
||||
DNSRecordA,
|
||||
DNSRecordAAAA,
|
||||
DNSRecordMX,
|
||||
DNSRecordTXT,
|
||||
DNSRecordNS,
|
||||
DNSRecordCNAME,
|
||||
DNSRecordSOA,
|
||||
DNSRecordPTR,
|
||||
DNSRecordSRV,
|
||||
DNSRecordCAA,
|
||||
}
|
||||
|
||||
expected := []string{"A", "AAAA", "MX", "TXT", "NS", "CNAME", "SOA", "PTR", "SRV", "CAA"}
|
||||
|
||||
for i, typ := range types {
|
||||
if string(typ) != expected[i] {
|
||||
t.Errorf("expected type %s, got %s", expected[i], typ)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDNSRecordTypesExtended(t *testing.T) {
|
||||
// Test all ClouDNS record types are defined
|
||||
types := []DNSRecordType{
|
||||
DNSRecordALIAS,
|
||||
DNSRecordRP,
|
||||
DNSRecordSSHFP,
|
||||
DNSRecordTLSA,
|
||||
DNSRecordDS,
|
||||
DNSRecordDNSKEY,
|
||||
DNSRecordNAPTR,
|
||||
DNSRecordLOC,
|
||||
DNSRecordHINFO,
|
||||
DNSRecordCERT,
|
||||
DNSRecordSMIMEA,
|
||||
DNSRecordWR,
|
||||
DNSRecordSPF,
|
||||
}
|
||||
|
||||
expected := []string{"ALIAS", "RP", "SSHFP", "TLSA", "DS", "DNSKEY", "NAPTR", "LOC", "HINFO", "CERT", "SMIMEA", "WR", "SPF"}
|
||||
|
||||
for i, typ := range types {
|
||||
if string(typ) != expected[i] {
|
||||
t.Errorf("expected type %s, got %s", expected[i], typ)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetDNSRecordTypeInfo(t *testing.T) {
|
||||
info := GetDNSRecordTypeInfo()
|
||||
|
||||
if len(info) == 0 {
|
||||
t.Error("GetDNSRecordTypeInfo should return non-empty list")
|
||||
}
|
||||
|
||||
// Check that common types exist
|
||||
commonFound := 0
|
||||
for _, r := range info {
|
||||
if r.Common {
|
||||
commonFound++
|
||||
}
|
||||
// Each entry should have type, name, and description
|
||||
if r.Type == "" {
|
||||
t.Error("Record type should not be empty")
|
||||
}
|
||||
if r.Name == "" {
|
||||
t.Error("Record name should not be empty")
|
||||
}
|
||||
if r.Description == "" {
|
||||
t.Error("Record description should not be empty")
|
||||
}
|
||||
}
|
||||
|
||||
if commonFound < 10 {
|
||||
t.Errorf("Expected at least 10 common record types, got %d", commonFound)
|
||||
}
|
||||
|
||||
// Check for specific types
|
||||
typeMap := make(map[DNSRecordType]DNSRecordTypeInfo)
|
||||
for _, r := range info {
|
||||
typeMap[r.Type] = r
|
||||
}
|
||||
|
||||
if _, ok := typeMap[DNSRecordA]; !ok {
|
||||
t.Error("A record type should be in info")
|
||||
}
|
||||
if _, ok := typeMap[DNSRecordALIAS]; !ok {
|
||||
t.Error("ALIAS record type should be in info")
|
||||
}
|
||||
if _, ok := typeMap[DNSRecordTLSA]; !ok {
|
||||
t.Error("TLSA record type should be in info")
|
||||
}
|
||||
if _, ok := typeMap[DNSRecordWR]; !ok {
|
||||
t.Error("WR (Web Redirect) record type should be in info")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCommonDNSRecordTypes(t *testing.T) {
|
||||
types := GetCommonDNSRecordTypes()
|
||||
|
||||
if len(types) == 0 {
|
||||
t.Error("GetCommonDNSRecordTypes should return non-empty list")
|
||||
}
|
||||
|
||||
// Check that standard types are present
|
||||
typeSet := make(map[DNSRecordType]bool)
|
||||
for _, typ := range types {
|
||||
typeSet[typ] = true
|
||||
}
|
||||
|
||||
if !typeSet[DNSRecordA] {
|
||||
t.Error("A record should be in common types")
|
||||
}
|
||||
if !typeSet[DNSRecordAAAA] {
|
||||
t.Error("AAAA record should be in common types")
|
||||
}
|
||||
if !typeSet[DNSRecordMX] {
|
||||
t.Error("MX record should be in common types")
|
||||
}
|
||||
if !typeSet[DNSRecordTXT] {
|
||||
t.Error("TXT record should be in common types")
|
||||
}
|
||||
if !typeSet[DNSRecordALIAS] {
|
||||
t.Error("ALIAS record should be in common types")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetAllDNSRecordTypes(t *testing.T) {
|
||||
types := GetAllDNSRecordTypes()
|
||||
|
||||
if len(types) < 20 {
|
||||
t.Errorf("GetAllDNSRecordTypes should return at least 20 types, got %d", len(types))
|
||||
}
|
||||
|
||||
// Check for ClouDNS-specific types
|
||||
typeSet := make(map[DNSRecordType]bool)
|
||||
for _, typ := range types {
|
||||
typeSet[typ] = true
|
||||
}
|
||||
|
||||
if !typeSet[DNSRecordWR] {
|
||||
t.Error("WR (Web Redirect) should be in all types")
|
||||
}
|
||||
if !typeSet[DNSRecordNAPTR] {
|
||||
t.Error("NAPTR should be in all types")
|
||||
}
|
||||
if !typeSet[DNSRecordDS] {
|
||||
t.Error("DS should be in all types")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDNSLookupResultStructure(t *testing.T) {
|
||||
result := DNSLookupResult{
|
||||
Domain: "example.com",
|
||||
QueryType: "A",
|
||||
Records: []DNSRecord{
|
||||
{Type: DNSRecordA, Name: "example.com", Value: "93.184.216.34"},
|
||||
},
|
||||
LookupTimeMs: 50,
|
||||
}
|
||||
|
||||
if result.Domain != "example.com" {
|
||||
t.Error("Domain should be set")
|
||||
}
|
||||
if len(result.Records) != 1 {
|
||||
t.Error("Should have 1 record")
|
||||
}
|
||||
if result.Records[0].Type != DNSRecordA {
|
||||
t.Error("Record type should be A")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompleteDNSLookupStructure(t *testing.T) {
|
||||
result := CompleteDNSLookup{
|
||||
Domain: "example.com",
|
||||
A: []string{"93.184.216.34"},
|
||||
AAAA: []string{"2606:2800:220:1:248:1893:25c8:1946"},
|
||||
MX: []MXRecord{
|
||||
{Host: "mail.example.com", Priority: 10},
|
||||
},
|
||||
NS: []string{"ns1.example.com", "ns2.example.com"},
|
||||
TXT: []string{"v=spf1 include:_spf.example.com ~all"},
|
||||
}
|
||||
|
||||
if result.Domain != "example.com" {
|
||||
t.Error("Domain should be set")
|
||||
}
|
||||
if len(result.A) != 1 {
|
||||
t.Error("Should have 1 A record")
|
||||
}
|
||||
if len(result.AAAA) != 1 {
|
||||
t.Error("Should have 1 AAAA record")
|
||||
}
|
||||
if len(result.MX) != 1 {
|
||||
t.Error("Should have 1 MX record")
|
||||
}
|
||||
if result.MX[0].Priority != 10 {
|
||||
t.Error("MX priority should be 10")
|
||||
}
|
||||
if len(result.NS) != 2 {
|
||||
t.Error("Should have 2 NS records")
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// RDAP Tests (Unit tests for structure, not network)
|
||||
// ============================================================================
|
||||
|
||||
func TestRDAPResponseStructure(t *testing.T) {
|
||||
resp := RDAPResponse{
|
||||
LDHName: "example.com",
|
||||
Status: []string{"active", "client transfer prohibited"},
|
||||
Events: []RDAPEvent{
|
||||
{EventAction: "registration", EventDate: "2020-01-01T00:00:00Z"},
|
||||
{EventAction: "expiration", EventDate: "2025-01-01T00:00:00Z"},
|
||||
},
|
||||
Entities: []RDAPEntity{
|
||||
{Handle: "REGISTRAR-1", Roles: []string{"registrar"}},
|
||||
},
|
||||
Nameservers: []RDAPNs{
|
||||
{LDHName: "ns1.example.com"},
|
||||
{LDHName: "ns2.example.com"},
|
||||
},
|
||||
}
|
||||
|
||||
if resp.LDHName != "example.com" {
|
||||
t.Error("LDHName should be set")
|
||||
}
|
||||
if len(resp.Status) != 2 {
|
||||
t.Error("Should have 2 status values")
|
||||
}
|
||||
if len(resp.Events) != 2 {
|
||||
t.Error("Should have 2 events")
|
||||
}
|
||||
if resp.Events[0].EventAction != "registration" {
|
||||
t.Error("First event should be registration")
|
||||
}
|
||||
if len(resp.Nameservers) != 2 {
|
||||
t.Error("Should have 2 nameservers")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseRDAPResponse(t *testing.T) {
|
||||
resp := RDAPResponse{
|
||||
LDHName: "example.com",
|
||||
Status: []string{"active", "dnssecSigned"},
|
||||
Events: []RDAPEvent{
|
||||
{EventAction: "registration", EventDate: "2020-01-01T00:00:00Z"},
|
||||
{EventAction: "expiration", EventDate: "2025-01-01T00:00:00Z"},
|
||||
{EventAction: "last changed", EventDate: "2024-06-15T00:00:00Z"},
|
||||
},
|
||||
Entities: []RDAPEntity{
|
||||
{Handle: "REGISTRAR-123", Roles: []string{"registrar"}},
|
||||
},
|
||||
Nameservers: []RDAPNs{
|
||||
{LDHName: "ns1.example.com"},
|
||||
{LDHName: "ns2.example.com"},
|
||||
},
|
||||
}
|
||||
|
||||
info := ParseRDAPResponse(resp)
|
||||
|
||||
if info.Domain != "example.com" {
|
||||
t.Errorf("expected domain=example.com, got %s", info.Domain)
|
||||
}
|
||||
if info.RegistrationDate != "2020-01-01T00:00:00Z" {
|
||||
t.Errorf("expected registration date, got %s", info.RegistrationDate)
|
||||
}
|
||||
if info.ExpirationDate != "2025-01-01T00:00:00Z" {
|
||||
t.Errorf("expected expiration date, got %s", info.ExpirationDate)
|
||||
}
|
||||
if info.UpdatedDate != "2024-06-15T00:00:00Z" {
|
||||
t.Errorf("expected updated date, got %s", info.UpdatedDate)
|
||||
}
|
||||
if info.Registrar != "REGISTRAR-123" {
|
||||
t.Errorf("expected registrar, got %s", info.Registrar)
|
||||
}
|
||||
if len(info.Nameservers) != 2 {
|
||||
t.Error("Should have 2 nameservers")
|
||||
}
|
||||
if !info.DNSSEC {
|
||||
t.Error("DNSSEC should be true (detected from status)")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseRDAPResponseEmpty(t *testing.T) {
|
||||
resp := RDAPResponse{
|
||||
LDHName: "test.com",
|
||||
}
|
||||
|
||||
info := ParseRDAPResponse(resp)
|
||||
|
||||
if info.Domain != "test.com" {
|
||||
t.Error("Domain should be set even with minimal response")
|
||||
}
|
||||
if info.DNSSEC {
|
||||
t.Error("DNSSEC should be false with no status")
|
||||
}
|
||||
if len(info.Nameservers) != 0 {
|
||||
t.Error("Nameservers should be empty")
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// RDAP Server Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestRDAPServers(t *testing.T) {
|
||||
// Check that we have servers for common TLDs
|
||||
commonTLDs := []string{"com", "net", "org", "io"}
|
||||
for _, tld := range commonTLDs {
|
||||
if _, ok := rdapServers[tld]; !ok {
|
||||
t.Errorf("missing RDAP server for TLD: %s", tld)
|
||||
}
|
||||
}
|
||||
|
||||
// Check RIRs
|
||||
rirs := []string{"arin", "ripe", "apnic", "afrinic", "lacnic"}
|
||||
for _, rir := range rirs {
|
||||
if _, ok := rdapServers[rir]; !ok {
|
||||
t.Errorf("missing RDAP server for RIR: %s", rir)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// MX Record Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestMXRecordStructure(t *testing.T) {
|
||||
mx := MXRecord{
|
||||
Host: "mail.example.com",
|
||||
Priority: 10,
|
||||
}
|
||||
|
||||
if mx.Host != "mail.example.com" {
|
||||
t.Error("Host should be set")
|
||||
}
|
||||
if mx.Priority != 10 {
|
||||
t.Error("Priority should be 10")
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// SRV Record Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestSRVRecordStructure(t *testing.T) {
|
||||
srv := SRVRecord{
|
||||
Target: "sipserver.example.com",
|
||||
Port: 5060,
|
||||
Priority: 10,
|
||||
Weight: 100,
|
||||
}
|
||||
|
||||
if srv.Target != "sipserver.example.com" {
|
||||
t.Error("Target should be set")
|
||||
}
|
||||
if srv.Port != 5060 {
|
||||
t.Error("Port should be 5060")
|
||||
}
|
||||
if srv.Priority != 10 {
|
||||
t.Error("Priority should be 10")
|
||||
}
|
||||
if srv.Weight != 100 {
|
||||
t.Error("Weight should be 100")
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// SOA Record Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestSOARecordStructure(t *testing.T) {
|
||||
soa := SOARecord{
|
||||
PrimaryNS: "ns1.example.com",
|
||||
AdminEmail: "admin.example.com",
|
||||
Serial: 2024010101,
|
||||
Refresh: 7200,
|
||||
Retry: 3600,
|
||||
Expire: 1209600,
|
||||
MinTTL: 86400,
|
||||
}
|
||||
|
||||
if soa.PrimaryNS != "ns1.example.com" {
|
||||
t.Error("PrimaryNS should be set")
|
||||
}
|
||||
if soa.Serial != 2024010101 {
|
||||
t.Error("Serial should match")
|
||||
}
|
||||
if soa.Refresh != 7200 {
|
||||
t.Error("Refresh should be 7200")
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Extended Record Type Structure Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestCAARecordStructure(t *testing.T) {
|
||||
caa := CAARecord{
|
||||
Flag: 0,
|
||||
Tag: "issue",
|
||||
Value: "letsencrypt.org",
|
||||
}
|
||||
|
||||
if caa.Tag != "issue" {
|
||||
t.Error("Tag should be 'issue'")
|
||||
}
|
||||
if caa.Value != "letsencrypt.org" {
|
||||
t.Error("Value should be set")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSSHFPRecordStructure(t *testing.T) {
|
||||
sshfp := SSHFPRecord{
|
||||
Algorithm: 4, // Ed25519
|
||||
FPType: 2, // SHA-256
|
||||
Fingerprint: "abc123def456",
|
||||
}
|
||||
|
||||
if sshfp.Algorithm != 4 {
|
||||
t.Error("Algorithm should be 4 (Ed25519)")
|
||||
}
|
||||
if sshfp.FPType != 2 {
|
||||
t.Error("FPType should be 2 (SHA-256)")
|
||||
}
|
||||
}
|
||||
|
||||
func TestTLSARecordStructure(t *testing.T) {
|
||||
tlsa := TLSARecord{
|
||||
Usage: 3, // Domain-issued certificate
|
||||
Selector: 1, // SubjectPublicKeyInfo
|
||||
MatchingType: 1, // SHA-256
|
||||
CertData: "abcd1234",
|
||||
}
|
||||
|
||||
if tlsa.Usage != 3 {
|
||||
t.Error("Usage should be 3")
|
||||
}
|
||||
if tlsa.Selector != 1 {
|
||||
t.Error("Selector should be 1")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDSRecordStructure(t *testing.T) {
|
||||
ds := DSRecord{
|
||||
KeyTag: 12345,
|
||||
Algorithm: 13, // ECDSAP256SHA256
|
||||
DigestType: 2, // SHA-256
|
||||
Digest: "deadbeef",
|
||||
}
|
||||
|
||||
if ds.KeyTag != 12345 {
|
||||
t.Error("KeyTag should be 12345")
|
||||
}
|
||||
if ds.Algorithm != 13 {
|
||||
t.Error("Algorithm should be 13")
|
||||
}
|
||||
}
|
||||
|
||||
func TestNAPTRRecordStructure(t *testing.T) {
|
||||
naptr := NAPTRRecord{
|
||||
Order: 100,
|
||||
Preference: 10,
|
||||
Flags: "U",
|
||||
Service: "E2U+sip",
|
||||
Regexp: "!^.*$!sip:info@example.com!",
|
||||
Replacement: ".",
|
||||
}
|
||||
|
||||
if naptr.Order != 100 {
|
||||
t.Error("Order should be 100")
|
||||
}
|
||||
if naptr.Service != "E2U+sip" {
|
||||
t.Error("Service should be E2U+sip")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRPRecordStructure(t *testing.T) {
|
||||
rp := RPRecord{
|
||||
Mailbox: "admin.example.com",
|
||||
TxtDom: "info.example.com",
|
||||
}
|
||||
|
||||
if rp.Mailbox != "admin.example.com" {
|
||||
t.Error("Mailbox should be set")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLOCRecordStructure(t *testing.T) {
|
||||
loc := LOCRecord{
|
||||
Latitude: 51.5074,
|
||||
Longitude: -0.1278,
|
||||
Altitude: 11,
|
||||
Size: 10,
|
||||
HPrecis: 10,
|
||||
VPrecis: 10,
|
||||
}
|
||||
|
||||
if loc.Latitude < 51.5 || loc.Latitude > 51.6 {
|
||||
t.Error("Latitude should be near 51.5074")
|
||||
}
|
||||
}
|
||||
|
||||
func TestALIASRecordStructure(t *testing.T) {
|
||||
alias := ALIASRecord{
|
||||
Target: "target.example.com",
|
||||
}
|
||||
|
||||
if alias.Target != "target.example.com" {
|
||||
t.Error("Target should be set")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWebRedirectRecordStructure(t *testing.T) {
|
||||
wr := WebRedirectRecord{
|
||||
URL: "https://www.example.com",
|
||||
RedirectType: 301,
|
||||
Frame: false,
|
||||
}
|
||||
|
||||
if wr.URL != "https://www.example.com" {
|
||||
t.Error("URL should be set")
|
||||
}
|
||||
if wr.RedirectType != 301 {
|
||||
t.Error("RedirectType should be 301")
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Function Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestIsNoSuchHostError(t *testing.T) {
|
||||
tests := []struct {
|
||||
errStr string
|
||||
expected bool
|
||||
}{
|
||||
{"no such host", true},
|
||||
{"NXDOMAIN", true},
|
||||
{"not found", true},
|
||||
{"connection refused", false},
|
||||
{"timeout", false},
|
||||
{"", false},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
var err error
|
||||
if tc.errStr != "" {
|
||||
err = &testError{msg: tc.errStr}
|
||||
}
|
||||
result := isNoSuchHostError(err)
|
||||
if result != tc.expected {
|
||||
t.Errorf("isNoSuchHostError(%q) = %v, want %v", tc.errStr, result, tc.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type testError struct {
|
||||
msg string
|
||||
}
|
||||
|
||||
func (e *testError) Error() string {
|
||||
return e.msg
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// URL Building Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestBuildRDAPURLs(t *testing.T) {
|
||||
// These test the URL structure, not actual lookups
|
||||
|
||||
// Domain URL
|
||||
domain := "example.com"
|
||||
expectedDomainPrefix := "https://rdap.org/domain/"
|
||||
if !strings.HasPrefix("https://rdap.org/domain/"+domain, expectedDomainPrefix) {
|
||||
t.Error("Domain URL format is incorrect")
|
||||
}
|
||||
|
||||
// IP URL
|
||||
ip := "8.8.8.8"
|
||||
expectedIPPrefix := "https://rdap.org/ip/"
|
||||
if !strings.HasPrefix("https://rdap.org/ip/"+ip, expectedIPPrefix) {
|
||||
t.Error("IP URL format is incorrect")
|
||||
}
|
||||
|
||||
// ASN URL
|
||||
asn := "15169"
|
||||
expectedASNPrefix := "https://rdap.org/autnum/"
|
||||
if !strings.HasPrefix("https://rdap.org/autnum/"+asn, expectedASNPrefix) {
|
||||
t.Error("ASN URL format is incorrect")
|
||||
}
|
||||
}
|
||||
7
doc.go
7
doc.go
|
|
@ -1,7 +0,0 @@
|
|||
// Package poindexter provides sorting utilities and a KDTree with simple
|
||||
// nearest-neighbour queries. It also includes helper functions to build
|
||||
// normalised, weighted KD points for 2D/3D/4D and arbitrary N‑D use-cases.
|
||||
//
|
||||
// Distance metrics include Euclidean (L2), Manhattan (L1), Chebyshev (L∞), and
|
||||
// Cosine/Weighted-Cosine for vector similarity.
|
||||
package poindexter
|
||||
600
docs/api.md
600
docs/api.md
|
|
@ -1,600 +0,0 @@
|
|||
# API Reference
|
||||
|
||||
Complete API documentation for the Poindexter library.
|
||||
|
||||
## Core Functions
|
||||
|
||||
### Version
|
||||
|
||||
```go
|
||||
func Version() string
|
||||
```
|
||||
|
||||
Returns the current version of the library.
|
||||
|
||||
**Returns:**
|
||||
- `string`: The version string (e.g., "0.3.0")
|
||||
|
||||
**Example:**
|
||||
|
||||
```go
|
||||
version := poindexter.Version()
|
||||
fmt.Println(version) // Output: 0.3.0
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Hello
|
||||
|
||||
```go
|
||||
func Hello(name string) string
|
||||
```
|
||||
|
||||
Returns a greeting message.
|
||||
|
||||
**Parameters:**
|
||||
- `name` (string): The name to greet. If empty, defaults to "World"
|
||||
|
||||
**Returns:**
|
||||
- `string`: A greeting message
|
||||
|
||||
**Examples:**
|
||||
|
||||
```go
|
||||
// Greet the world
|
||||
message := poindexter.Hello("")
|
||||
fmt.Println(message) // Output: Hello, World!
|
||||
|
||||
// Greet a specific person
|
||||
message = poindexter.Hello("Alice")
|
||||
fmt.Println(message) // Output: Hello, Alice!
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Sorting Functions
|
||||
|
||||
### Basic Sorting
|
||||
|
||||
#### SortInts
|
||||
|
||||
```go
|
||||
func SortInts(data []int)
|
||||
```
|
||||
|
||||
Sorts a slice of integers in ascending order in place.
|
||||
|
||||
**Example:**
|
||||
|
||||
```go
|
||||
numbers := []int{3, 1, 4, 1, 5, 9}
|
||||
poindexter.SortInts(numbers)
|
||||
fmt.Println(numbers) // Output: [1 1 3 4 5 9]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### SortIntsDescending
|
||||
|
||||
```go
|
||||
func SortIntsDescending(data []int)
|
||||
```
|
||||
|
||||
Sorts a slice of integers in descending order in place.
|
||||
|
||||
**Example:**
|
||||
|
||||
```go
|
||||
numbers := []int{3, 1, 4, 1, 5, 9}
|
||||
poindexter.SortIntsDescending(numbers)
|
||||
fmt.Println(numbers) // Output: [9 5 4 3 1 1]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### SortStrings
|
||||
|
||||
```go
|
||||
func SortStrings(data []string)
|
||||
```
|
||||
|
||||
Sorts a slice of strings in ascending order in place.
|
||||
|
||||
**Example:**
|
||||
|
||||
```go
|
||||
words := []string{"banana", "apple", "cherry"}
|
||||
poindexter.SortStrings(words)
|
||||
fmt.Println(words) // Output: [apple banana cherry]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### SortStringsDescending
|
||||
|
||||
```go
|
||||
func SortStringsDescending(data []string)
|
||||
```
|
||||
|
||||
Sorts a slice of strings in descending order in place.
|
||||
|
||||
---
|
||||
|
||||
#### SortFloat64s
|
||||
|
||||
```go
|
||||
func SortFloat64s(data []float64)
|
||||
```
|
||||
|
||||
Sorts a slice of float64 values in ascending order in place.
|
||||
|
||||
---
|
||||
|
||||
#### SortFloat64sDescending
|
||||
|
||||
```go
|
||||
func SortFloat64sDescending(data []float64)
|
||||
```
|
||||
|
||||
Sorts a slice of float64 values in descending order in place.
|
||||
|
||||
---
|
||||
|
||||
### Advanced Sorting
|
||||
|
||||
#### SortBy
|
||||
|
||||
```go
|
||||
func SortBy[T any](data []T, less func(i, j int) bool)
|
||||
```
|
||||
|
||||
Sorts a slice using a custom comparison function.
|
||||
|
||||
**Parameters:**
|
||||
- `data`: The slice to sort
|
||||
- `less`: A function that returns true if data[i] should come before data[j]
|
||||
|
||||
**Example:**
|
||||
|
||||
```go
|
||||
type Person struct {
|
||||
Name string
|
||||
Age int
|
||||
}
|
||||
|
||||
people := []Person{
|
||||
{"Alice", 30},
|
||||
{"Bob", 25},
|
||||
{"Charlie", 35},
|
||||
}
|
||||
|
||||
// Sort by age
|
||||
poindexter.SortBy(people, func(i, j int) bool {
|
||||
return people[i].Age < people[j].Age
|
||||
})
|
||||
// Result: [Bob(25) Alice(30) Charlie(35)]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### SortByKey
|
||||
|
||||
```go
|
||||
func SortByKey[T any, K int | float64 | string](data []T, key func(T) K)
|
||||
```
|
||||
|
||||
Sorts a slice by extracting a comparable key from each element in ascending order.
|
||||
|
||||
**Parameters:**
|
||||
- `data`: The slice to sort
|
||||
- `key`: A function that extracts a sortable key from each element
|
||||
|
||||
**Example:**
|
||||
|
||||
```go
|
||||
type Product struct {
|
||||
Name string
|
||||
Price float64
|
||||
}
|
||||
|
||||
products := []Product{
|
||||
{"Apple", 1.50},
|
||||
{"Banana", 0.75},
|
||||
{"Cherry", 3.00},
|
||||
}
|
||||
|
||||
// Sort by price
|
||||
poindexter.SortByKey(products, func(p Product) float64 {
|
||||
return p.Price
|
||||
})
|
||||
// Result: [Banana(0.75) Apple(1.50) Cherry(3.00)]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### SortByKeyDescending
|
||||
|
||||
```go
|
||||
func SortByKeyDescending[T any, K int | float64 | string](data []T, key func(T) K)
|
||||
```
|
||||
|
||||
Sorts a slice by extracting a comparable key from each element in descending order.
|
||||
|
||||
**Example:**
|
||||
|
||||
```go
|
||||
type Student struct {
|
||||
Name string
|
||||
Score int
|
||||
}
|
||||
|
||||
students := []Student{
|
||||
{"Alice", 85},
|
||||
{"Bob", 92},
|
||||
{"Charlie", 78},
|
||||
}
|
||||
|
||||
// Sort by score descending
|
||||
poindexter.SortByKeyDescending(students, func(s Student) int {
|
||||
return s.Score
|
||||
})
|
||||
// Result: [Bob(92) Alice(85) Charlie(78)]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Checking if Sorted
|
||||
|
||||
#### IsSorted
|
||||
|
||||
```go
|
||||
func IsSorted(data []int) bool
|
||||
```
|
||||
|
||||
Checks if a slice of integers is sorted in ascending order.
|
||||
|
||||
---
|
||||
|
||||
#### IsSortedStrings
|
||||
|
||||
```go
|
||||
func IsSortedStrings(data []string) bool
|
||||
```
|
||||
|
||||
Checks if a slice of strings is sorted in ascending order.
|
||||
|
||||
---
|
||||
|
||||
#### IsSortedFloat64s
|
||||
|
||||
```go
|
||||
func IsSortedFloat64s(data []float64) bool
|
||||
```
|
||||
|
||||
Checks if a slice of float64 values is sorted in ascending order.
|
||||
|
||||
---
|
||||
|
||||
### Binary Search
|
||||
|
||||
#### BinarySearch
|
||||
|
||||
```go
|
||||
func BinarySearch(data []int, target int) int
|
||||
```
|
||||
|
||||
Performs a binary search on a sorted slice of integers.
|
||||
|
||||
**Parameters:**
|
||||
- `data`: A sorted slice of integers
|
||||
- `target`: The value to search for
|
||||
|
||||
**Returns:**
|
||||
- `int`: The index where target is found, or -1 if not found
|
||||
|
||||
**Example:**
|
||||
|
||||
```go
|
||||
numbers := []int{1, 3, 5, 7, 9, 11}
|
||||
index := poindexter.BinarySearch(numbers, 7)
|
||||
fmt.Println(index) // Output: 3
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### BinarySearchStrings
|
||||
|
||||
```go
|
||||
func BinarySearchStrings(data []string, target string) int
|
||||
```
|
||||
|
||||
Performs a binary search on a sorted slice of strings.
|
||||
|
||||
**Parameters:**
|
||||
- `data`: A sorted slice of strings
|
||||
- `target`: The value to search for
|
||||
|
||||
**Returns:**
|
||||
- `int`: The index where target is found, or -1 if not found
|
||||
|
||||
|
||||
## KDTree Helpers
|
||||
|
||||
Poindexter provides helpers to build normalized, weighted KD points from your own records. These functions min–max normalize each axis over your dataset, optionally invert axes where higher is better (to turn them into “lower cost”), and apply per‑axis weights.
|
||||
|
||||
```go
|
||||
func Build2D[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
f1, f2 func(T) float64,
|
||||
weights [2]float64,
|
||||
invert [2]bool,
|
||||
) ([]KDPoint[T], error)
|
||||
|
||||
func Build3D[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
f1, f2, f3 func(T) float64,
|
||||
weights [3]float64,
|
||||
invert [3]bool,
|
||||
) ([]KDPoint[T], error)
|
||||
|
||||
func Build4D[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
f1, f2, f3, f4 func(T) float64,
|
||||
weights [4]float64,
|
||||
invert [4]bool,
|
||||
) ([]KDPoint[T], error)
|
||||
```
|
||||
|
||||
Example (4D over ping, hops, geo, score):
|
||||
|
||||
```go
|
||||
// weights and inversion: flip score so higher is better → lower cost
|
||||
weights := [4]float64{1.0, 0.7, 0.2, 1.2}
|
||||
invert := [4]bool{false, false, false, true}
|
||||
|
||||
pts, err := poindexter.Build4D(
|
||||
peers,
|
||||
func(p Peer) string { return p.ID },
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
func(p Peer) float64 { return p.GeoKM },
|
||||
func(p Peer) float64 { return p.Score },
|
||||
weights, invert,
|
||||
)
|
||||
if err != nil { panic(err) }
|
||||
|
||||
kdt, _ := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
best, dist, _ := kdt.Nearest([]float64{0, 0, 0, 0})
|
||||
```
|
||||
|
||||
Notes:
|
||||
- Keep and reuse your normalization parameters (min/max) if you need consistency across updates; otherwise rebuild points when the candidate set changes.
|
||||
- Use `invert` to turn “higher is better” features (like scores) into lower costs for distance calculations.
|
||||
|
||||
|
||||
---
|
||||
|
||||
## KDTree Constructors and Errors
|
||||
|
||||
### NewKDTree
|
||||
|
||||
```go
|
||||
func NewKDTree[T any](pts []KDPoint[T], opts ...KDOption) (*KDTree[T], error)
|
||||
```
|
||||
|
||||
Build a KDTree from the provided points. All points must have the same dimensionality (> 0) and IDs (if provided) must be unique.
|
||||
|
||||
Possible errors:
|
||||
- `ErrEmptyPoints`: no points provided
|
||||
- `ErrZeroDim`: dimension must be at least 1
|
||||
- `ErrDimMismatch`: inconsistent dimensionality among points
|
||||
- `ErrDuplicateID`: duplicate point ID encountered
|
||||
|
||||
### NewKDTreeFromDim
|
||||
|
||||
```go
|
||||
func NewKDTreeFromDim[T any](dim int, opts ...KDOption) (*KDTree[T], error)
|
||||
```
|
||||
|
||||
Construct an empty KDTree with the given dimension, then populate later via `Insert`.
|
||||
|
||||
---
|
||||
|
||||
## KDTree Notes: Complexity, Ties, Concurrency
|
||||
|
||||
- Complexity: current implementation uses O(n) linear scans for queries (`Nearest`, `KNearest`, `Radius`). Inserts are O(1) amortized. Deletes by ID are O(1) using swap-delete (order not preserved).
|
||||
- Tie ordering: when multiple neighbors have the same distance, ordering of ties is arbitrary and not stable between calls.
|
||||
- Concurrency: KDTree is not safe for concurrent mutation. Wrap with a mutex or share immutable snapshots for read-mostly workloads.
|
||||
|
||||
See runnable examples in the repository `examples/` and the docs pages for 1D DHT and multi-dimensional KDTree usage.
|
||||
|
||||
|
||||
## KDTree Normalization Stats (reuse across updates)
|
||||
|
||||
To keep normalization consistent across dynamic updates, compute per‑axis min/max once and reuse it to build points later. This avoids drift when the candidate set changes.
|
||||
|
||||
### Types
|
||||
|
||||
```go
|
||||
// AxisStats holds the min/max observed for a single axis.
|
||||
type AxisStats struct {
|
||||
Min float64
|
||||
Max float64
|
||||
}
|
||||
|
||||
// NormStats holds per‑axis normalisation stats; for D dims, Stats has length D.
|
||||
type NormStats struct {
|
||||
Stats []AxisStats
|
||||
}
|
||||
```
|
||||
|
||||
### Compute normalization stats
|
||||
|
||||
```go
|
||||
func ComputeNormStats2D[T any](items []T, f1, f2 func(T) float64) NormStats
|
||||
func ComputeNormStats3D[T any](items []T, f1, f2, f3 func(T) float64) NormStats
|
||||
func ComputeNormStats4D[T any](items []T, f1, f2, f3, f4 func(T) float64) NormStats
|
||||
```
|
||||
|
||||
### Build with precomputed stats
|
||||
|
||||
```go
|
||||
func Build2DWithStats[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
f1, f2 func(T) float64,
|
||||
weights [2]float64,
|
||||
invert [2]bool,
|
||||
stats NormStats,
|
||||
) ([]KDPoint[T], error)
|
||||
|
||||
func Build3DWithStats[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
f1, f2, f3 func(T) float64,
|
||||
weights [3]float64,
|
||||
invert [3]bool,
|
||||
stats NormStats,
|
||||
) ([]KDPoint[T], error)
|
||||
|
||||
func Build4DWithStats[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
f1, f2, f3, f4 func(T) float64,
|
||||
weights [4]float64,
|
||||
invert [4]bool,
|
||||
stats NormStats,
|
||||
) ([]KDPoint[T], error)
|
||||
```
|
||||
|
||||
|
||||
#### Example (2D)
|
||||
|
||||
```go
|
||||
// Compute stats once over your baseline set
|
||||
stats := poindexter.ComputeNormStats2D(peers,
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
)
|
||||
|
||||
// Build points using those stats (now or later)
|
||||
pts, _ := poindexter.Build2DWithStats(
|
||||
peers,
|
||||
func(p Peer) string { return p.ID },
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
[2]float64{1,1}, [2]bool{false,false}, stats,
|
||||
)
|
||||
```
|
||||
|
||||
Notes:
|
||||
- If `min==max` for an axis, normalized value is `0` for that axis.
|
||||
- `invert[i]` flips the normalized axis as `1 - n` before applying `weights[i]`.
|
||||
- These helpers mirror `Build2D/3D/4D`, but use your provided `NormStats` instead of recomputing from the items slice.
|
||||
|
||||
|
||||
---
|
||||
|
||||
## KDTree Normalization Helpers (N‑D)
|
||||
|
||||
Poindexter includes helpers to build KD points from arbitrary dimensions.
|
||||
|
||||
```go
|
||||
func BuildND[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
features []func(T) float64,
|
||||
weights []float64,
|
||||
invert []bool,
|
||||
) ([]KDPoint[T], error)
|
||||
|
||||
// Like BuildND but never returns an error. It performs no validation beyond
|
||||
// basic length checks and propagates NaN/Inf values from feature extractors.
|
||||
func BuildNDNoErr[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
features []func(T) float64,
|
||||
weights []float64,
|
||||
invert []bool,
|
||||
) []KDPoint[T]
|
||||
```
|
||||
|
||||
- `features`: extract raw values per axis.
|
||||
- `weights`: per-axis weights, same length as `features`.
|
||||
- `invert`: if true for an axis, uses `1 - normalized` before weighting (turns “higher is better” into lower cost).
|
||||
- Use `ComputeNormStatsND` + `BuildNDWithStats` to reuse normalization between updates.
|
||||
|
||||
Example:
|
||||
|
||||
```go
|
||||
pts := poindexter.BuildNDNoErr(records,
|
||||
func(r Rec) string { return r.ID },
|
||||
[]func(Rec) float64{
|
||||
func(r Rec) float64 { return r.PingMS },
|
||||
func(r Rec) float64 { return r.Hops },
|
||||
func(r Rec) float64 { return r.GeoKM },
|
||||
func(r Rec) float64 { return r.Score },
|
||||
},
|
||||
[]float64{1.0, 0.7, 0.2, 1.2},
|
||||
[]bool{false, false, false, true},
|
||||
)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## KDTree Backend selection
|
||||
|
||||
Poindexter provides two internal backends for KDTree queries:
|
||||
|
||||
- `linear`: always available; performs O(n) scans for `Nearest`, `KNearest`, and `Radius`.
|
||||
- `gonum`: optimized KD backend compiled when you build with the `gonum` build tag; typically sub-linear on prunable datasets and modest dimensions.
|
||||
|
||||
### Types and options
|
||||
|
||||
```go
|
||||
// KDBackend selects the internal engine used by KDTree.
|
||||
type KDBackend string
|
||||
|
||||
const (
|
||||
BackendLinear KDBackend = "linear"
|
||||
BackendGonum KDBackend = "gonum"
|
||||
)
|
||||
|
||||
// WithBackend selects the internal KDTree backend ("linear" or "gonum").
|
||||
// If the requested backend is unavailable (e.g., missing build tag), the constructor
|
||||
// falls back to the linear backend.
|
||||
func WithBackend(b KDBackend) KDOption
|
||||
```
|
||||
|
||||
### Default selection
|
||||
|
||||
- Default is `linear`.
|
||||
- If you build your project with `-tags=gonum`, the default becomes `gonum`.
|
||||
|
||||
### Usage examples
|
||||
|
||||
```go
|
||||
// Default metric is Euclidean; you can override with WithMetric.
|
||||
pts := []poindexter.KDPoint[string]{
|
||||
{ID: "A", Coords: []float64{0, 0}},
|
||||
{ID: "B", Coords: []float64{1, 0}},
|
||||
}
|
||||
|
||||
// Force Linear (always available)
|
||||
lin, _ := poindexter.NewKDTree(pts, poindexter.WithBackend(poindexter.BackendLinear))
|
||||
_, _, _ = lin.Nearest([]float64{0.9, 0.1})
|
||||
|
||||
// Force Gonum (requires building with: go build -tags=gonum)
|
||||
gon, _ := poindexter.NewKDTree(pts, poindexter.WithBackend(poindexter.BackendGonum))
|
||||
_, _, _ = gon.Nearest([]float64{0.9, 0.1})
|
||||
```
|
||||
|
||||
### Supported metrics in the optimized backend
|
||||
|
||||
- Euclidean (L2), Manhattan (L1), Chebyshev (L∞).
|
||||
- Cosine and Weighted-Cosine currently use the Linear backend.
|
||||
|
||||
See also the Performance guide for measured comparisons and guidance: `docs/perf.md`.
|
||||
|
|
@ -1,116 +0,0 @@
|
|||
# Example: Find the best (lowest‑ping) peer in a DHT table
|
||||
|
||||
This example shows how to model a "made up" DHT routing table and use Poindexter's `KDTree` to quickly find:
|
||||
|
||||
- the single best peer by ping (nearest neighbor)
|
||||
- the top N best peers by ping (k‑nearest neighbors)
|
||||
- all peers under a ping threshold (radius search)
|
||||
|
||||
We keep it simple by mapping each peer to a 1‑dimensional coordinate: its ping in milliseconds. Using 1D means the KDTree's distance is just the absolute difference between pings.
|
||||
|
||||
> Tip: In a real system, you might expand to multiple dimensions (e.g., `[ping_ms, hop_count, geo_distance, score]`) and choose a metric (`L1`, `L2`, or `L∞`) that best matches your routing heuristic. See how to build normalized, weighted multi‑dimensional points with the public helpers `poindexter.Build2D/3D/4D` here: [Multi-Dimensional KDTree (DHT)](kdtree-multidimensional.md).
|
||||
|
||||
---
|
||||
|
||||
## Full example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
// Peer is our DHT peer entry (made up for this example).
|
||||
type Peer struct {
|
||||
Addr string // multiaddr or host:port
|
||||
Ping int // measured ping in milliseconds
|
||||
}
|
||||
|
||||
func main() {
|
||||
// A toy DHT routing table with made-up ping values
|
||||
table := []Peer{
|
||||
{Addr: "peer1.example:4001", Ping: 74},
|
||||
{Addr: "peer2.example:4001", Ping: 52},
|
||||
{Addr: "peer3.example:4001", Ping: 110},
|
||||
{Addr: "peer4.example:4001", Ping: 35},
|
||||
{Addr: "peer5.example:4001", Ping: 60},
|
||||
{Addr: "peer6.example:4001", Ping: 44},
|
||||
}
|
||||
|
||||
// Map peers to KD points in 1D where coordinate = ping (ms).
|
||||
// Use stable string IDs so we can delete/update later.
|
||||
pts := make([]poindexter.KDPoint[Peer], 0, len(table))
|
||||
for i, p := range table {
|
||||
pts = append(pts, poindexter.KDPoint[Peer]{
|
||||
ID: fmt.Sprintf("peer-%d", i+1),
|
||||
Coords: []float64{float64(p.Ping)},
|
||||
Value: p,
|
||||
})
|
||||
}
|
||||
|
||||
// Build a KDTree. Euclidean metric is fine for 1D ping comparisons.
|
||||
kdt, err := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// 1) Find the best (lowest-ping) peer.
|
||||
// Query is a 1D point representing desired ping target. Using 0 finds the min.
|
||||
best, d, ok := kdt.Nearest([]float64{0})
|
||||
if !ok {
|
||||
fmt.Println("no peers found")
|
||||
return
|
||||
}
|
||||
fmt.Printf("Best peer: %s (ping=%d ms), distance=%.0f\n", best.Value.Addr, best.Value.Ping, d)
|
||||
// Example output: Best peer: peer4.example:4001 (ping=35 ms), distance=35
|
||||
|
||||
// 2) Top-N best peers by ping.
|
||||
top, dists := kdt.KNearest([]float64{0}, 3)
|
||||
fmt.Println("Top 3 peers by ping:")
|
||||
for i := range top {
|
||||
fmt.Printf(" #%d %s (ping=%d ms), distance=%.0f\n", i+1, top[i].Value.Addr, top[i].Value.Ping, dists[i])
|
||||
}
|
||||
|
||||
// 3) All peers under a threshold (e.g., <= 50 ms): radius search.
|
||||
within, wd := kdt.Radius([]float64{0}, 50)
|
||||
fmt.Println("Peers with ping <= 50 ms:")
|
||||
for i := range within {
|
||||
fmt.Printf(" %s (ping=%d ms), distance=%.0f\n", within[i].Value.Addr, within[i].Value.Ping, wd[i])
|
||||
}
|
||||
|
||||
// 4) Dynamic updates: if a peer improves ping, we can delete & re-insert with a new ID
|
||||
// (or keep the same ID and just update the point if your application tracks indices).
|
||||
// Here we simulate peer5 dropping from 60 ms to 30 ms.
|
||||
if kdt.DeleteByID("peer-5") {
|
||||
improved := poindexter.KDPoint[Peer]{
|
||||
ID: "peer-5", // keep the same ID for simplicity
|
||||
Coords: []float64{30},
|
||||
Value: Peer{Addr: "peer5.example:4001", Ping: 30},
|
||||
}
|
||||
_ = kdt.Insert(improved)
|
||||
}
|
||||
|
||||
// Recompute the best after update
|
||||
best2, d2, _ := kdt.Nearest([]float64{0})
|
||||
fmt.Printf("After update, best peer: %s (ping=%d ms), distance=%.0f\n", best2.Value.Addr, best2.Value.Ping, d2)
|
||||
}
|
||||
```
|
||||
|
||||
### Why does querying with `[0]` work?
|
||||
We use Euclidean distance in 1D, so `distance = |ping - target|`. With target `0`, minimizing the distance is equivalent to minimizing the ping itself.
|
||||
|
||||
|
||||
### Extending the metric/space
|
||||
- Multi-objective: encode more routing features (lower is better) as extra dimensions, e.g. `[ping_ms, hops, queue_delay_ms]`.
|
||||
- Metric choice:
|
||||
- `EuclideanDistance` (L2): balances outliers smoothly.
|
||||
- `ManhattanDistance` (L1): linear penalty; robust for sparsity.
|
||||
- `ChebyshevDistance` (L∞): cares about the worst dimension.
|
||||
- Normalization: when mixing units (ms, hops, km), normalize or weight dimensions so the metric reflects your priority.
|
||||
|
||||
|
||||
### Notes
|
||||
- This KDTree currently uses an internal linear scan for queries. The API is stable and designed so it can be swapped to use `gonum.org/v1/gonum/spatial/kdtree` under the hood later for sub-linear queries on large datasets.
|
||||
- IDs are optional but recommended for O(1)-style deletes; keep them unique per tree.
|
||||
|
|
@ -1,127 +0,0 @@
|
|||
# Getting Started
|
||||
|
||||
This guide will help you get started with the Poindexter library.
|
||||
|
||||
## Installation
|
||||
|
||||
To install Poindexter, use `go get`:
|
||||
|
||||
```bash
|
||||
go get github.com/Snider/Poindexter
|
||||
```
|
||||
|
||||
## Basic Usage
|
||||
|
||||
### Importing the Library
|
||||
|
||||
```go
|
||||
import "github.com/Snider/Poindexter"
|
||||
```
|
||||
|
||||
### Using the Hello Function
|
||||
|
||||
The `Hello` function returns a greeting message:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Say hello to the world
|
||||
fmt.Println(poindexter.Hello(""))
|
||||
// Output: Hello, World!
|
||||
|
||||
// Say hello to someone specific
|
||||
fmt.Println(poindexter.Hello("Poindexter"))
|
||||
// Output: Hello, Poindexter!
|
||||
}
|
||||
```
|
||||
|
||||
### Getting the Version
|
||||
|
||||
You can check the library version:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
func main() {
|
||||
version := poindexter.Version()
|
||||
fmt.Println("Library version:", version)
|
||||
}
|
||||
```
|
||||
|
||||
## Sorting Data
|
||||
|
||||
Poindexter includes comprehensive sorting utilities:
|
||||
|
||||
### Basic Sorting
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Sort integers
|
||||
numbers := []int{3, 1, 4, 1, 5, 9}
|
||||
poindexter.SortInts(numbers)
|
||||
fmt.Println(numbers) // [1 1 3 4 5 9]
|
||||
|
||||
// Sort strings
|
||||
words := []string{"banana", "apple", "cherry"}
|
||||
poindexter.SortStrings(words)
|
||||
fmt.Println(words) // [apple banana cherry]
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Sorting with Custom Keys
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Product struct {
|
||||
Name string
|
||||
Price float64
|
||||
}
|
||||
|
||||
func main() {
|
||||
products := []Product{
|
||||
{"Apple", 1.50},
|
||||
{"Banana", 0.75},
|
||||
{"Cherry", 3.00},
|
||||
}
|
||||
|
||||
// Sort by price using SortByKey
|
||||
poindexter.SortByKey(products, func(p Product) float64 {
|
||||
return p.Price
|
||||
})
|
||||
|
||||
for _, p := range products {
|
||||
fmt.Printf("%s: $%.2f\n", p.Name, p.Price)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
- Check out the [API Reference](api.md) for detailed documentation
|
||||
- Try the example: [Find the best (lowest‑ping) DHT peer](dht-best-ping.md)
|
||||
- Explore multidimensional KDTree over ping/hops/geo/score: [Multidimensional KDTree (DHT)](kdtree-multidimensional.md)
|
||||
- Read about the [License](license.md)
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
# Poindexter
|
||||
|
||||
Welcome to the Poindexter Go library documentation!
|
||||
|
||||
## Overview
|
||||
|
||||
Poindexter is a Go library package licensed under EUPL-1.2.
|
||||
|
||||
## Features
|
||||
|
||||
- Simple and easy to use
|
||||
- Comprehensive sorting utilities with custom comparators
|
||||
- Generic sorting functions with type safety
|
||||
- Binary search capabilities
|
||||
- Well-documented API
|
||||
- Comprehensive test coverage
|
||||
- Cross-platform support
|
||||
|
||||
## Quick Start
|
||||
|
||||
Install the library:
|
||||
|
||||
```bash
|
||||
go get github.com/Snider/Poindexter
|
||||
```
|
||||
|
||||
Use it in your code:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
func main() {
|
||||
fmt.Println(poindexter.Hello("World"))
|
||||
fmt.Println("Version:", poindexter.Version())
|
||||
}
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the European Union Public Licence v1.2 (EUPL-1.2).
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Please feel free to submit a Pull Request.
|
||||
|
||||
|
||||
## Examples
|
||||
|
||||
- Find the best (lowest‑ping) DHT peer using KDTree: [Best Ping Peer (DHT)](dht-best-ping.md)
|
||||
- Multi-dimensional neighbor search over ping, hops, geo, and score: [Multi-Dimensional KDTree (DHT)](kdtree-multidimensional.md)
|
||||
|
||||
## Performance
|
||||
|
||||
- Benchmark methodology and guidance: [Performance](perf.md)
|
||||
|
|
@ -1,288 +0,0 @@
|
|||
# KDTree: Multi‑Dimensional Search (DHT peers)
|
||||
|
||||
This example extends the single‑dimension "best ping" demo to a realistic multi‑dimensional selection:
|
||||
|
||||
- ping_ms (lower is better)
|
||||
- hop_count (lower is better)
|
||||
- geo_distance_km (lower is better)
|
||||
- score (higher is better — e.g., capacity/reputation)
|
||||
|
||||
We will:
|
||||
- Build 4‑D points over these features
|
||||
- Run `Nearest`, `KNearest`, and `Radius` queries
|
||||
- Show subsets: ping+hop (2‑D) and ping+hop+geo (3‑D)
|
||||
- Demonstrate weighting/normalization to balance disparate units
|
||||
|
||||
> Tip: KDTree distances are geometric. Mixing units (ms, hops, km, arbitrary score) requires scaling so that each axis contributes proportionally to your decision policy.
|
||||
|
||||
## Dataset
|
||||
|
||||
We’ll use a small, made‑up set of DHT peers in each runnable example below. Each example declares its own `Peer` type and dataset so you can copy‑paste and run independently.
|
||||
|
||||
## Normalization and weights
|
||||
|
||||
To make heterogeneous units comparable (ms, hops, km, score), use the library helpers which:
|
||||
- Min‑max normalize each axis to [0,1] over your provided dataset
|
||||
- Optionally invert axes where “higher is better” so they become “lower cost”
|
||||
- Apply per‑axis weights so you can emphasize what matters
|
||||
|
||||
Build 4‑D points and query them with helpers (full program):
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Peer struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
GeoKM float64
|
||||
Score float64
|
||||
}
|
||||
|
||||
var peers = []Peer{
|
||||
{ID: "A", PingMS: 22, Hops: 3, GeoKM: 1200, Score: 0.86},
|
||||
{ID: "B", PingMS: 34, Hops: 2, GeoKM: 800, Score: 0.91},
|
||||
{ID: "C", PingMS: 15, Hops: 4, GeoKM: 4500, Score: 0.70},
|
||||
{ID: "D", PingMS: 55, Hops: 1, GeoKM: 300, Score: 0.95},
|
||||
{ID: "E", PingMS: 18, Hops: 2, GeoKM: 2200, Score: 0.80},
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Build 4‑D KDTree using Euclidean (L2)
|
||||
weights4 := [4]float64{1.0, 0.7, 0.2, 1.2}
|
||||
invert4 := [4]bool{false, false, false, true} // invert score (higher is better)
|
||||
pts, err := poindexter.Build4D(
|
||||
peers,
|
||||
func(p Peer) string { return p.ID },
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
func(p Peer) float64 { return p.GeoKM },
|
||||
func(p Peer) float64 { return p.Score },
|
||||
weights4, invert4,
|
||||
)
|
||||
if err != nil { panic(err) }
|
||||
tree, _ := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
|
||||
// Query target preferences (construct a query in normalized/weighted space)
|
||||
// Example: seek very low ping, low hops, moderate geo, high score (low score_cost)
|
||||
query := []float64{weights4[0]*0.0, weights4[1]*0.2, weights4[2]*0.3, weights4[3]*0.0}
|
||||
|
||||
// 1‑NN
|
||||
best, dist, ok := tree.Nearest(query)
|
||||
if ok {
|
||||
fmt.Printf("Best peer: %s (dist=%.4f)\n", best.ID, dist)
|
||||
}
|
||||
|
||||
// k‑NN (top 3)
|
||||
neigh, dists := tree.KNearest(query, 3)
|
||||
for i := range neigh {
|
||||
fmt.Printf("%d) %s dist=%.4f\n", i+1, neigh[i].ID, dists[i])
|
||||
}
|
||||
|
||||
// Radius query
|
||||
within, wd := tree.Radius(query, 0.35)
|
||||
fmt.Printf("Within radius 0.35: ")
|
||||
for i := range within {
|
||||
fmt.Printf("%s(%.3f) ", within[i].ID, wd[i])
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
```
|
||||
|
||||
## 2‑D: Ping + Hop
|
||||
|
||||
Sometimes you want a strict trade‑off between just latency and path length. Build 2‑D points using helpers:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Peer struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
}
|
||||
|
||||
var peers = []Peer{
|
||||
{ID: "A", PingMS: 22, Hops: 3},
|
||||
{ID: "B", PingMS: 34, Hops: 2},
|
||||
{ID: "C", PingMS: 15, Hops: 4},
|
||||
{ID: "D", PingMS: 55, Hops: 1},
|
||||
{ID: "E", PingMS: 18, Hops: 2},
|
||||
}
|
||||
|
||||
func main() {
|
||||
weights2 := [2]float64{1.0, 1.0}
|
||||
invert2 := [2]bool{false, false}
|
||||
|
||||
pts2, err := poindexter.Build2D(
|
||||
peers,
|
||||
func(p Peer) string { return p.ID }, // id
|
||||
func(p Peer) float64 { return p.PingMS },// f1: ping
|
||||
func(p Peer) float64 { return p.Hops }, // f2: hops
|
||||
weights2, invert2,
|
||||
)
|
||||
if err != nil { panic(err) }
|
||||
|
||||
tree2, _ := poindexter.NewKDTree(pts2, poindexter.WithMetric(poindexter.ManhattanDistance{})) // L1 favors axis‑aligned tradeoffs
|
||||
// Prefer very low ping, modest hops
|
||||
query2 := []float64{weights2[0]*0.0, weights2[1]*0.3}
|
||||
best2, _, _ := tree2.Nearest(query2)
|
||||
fmt.Println("2D best (ping+hop):", best2.ID)
|
||||
}
|
||||
```
|
||||
|
||||
## 3‑D: Ping + Hop + Geo
|
||||
|
||||
Add geography to discourage far peers when latency is similar. Use the 3‑D helper:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Peer struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
GeoKM float64
|
||||
}
|
||||
|
||||
var peers = []Peer{
|
||||
{ID: "A", PingMS: 22, Hops: 3, GeoKM: 1200},
|
||||
{ID: "B", PingMS: 34, Hops: 2, GeoKM: 800},
|
||||
{ID: "C", PingMS: 15, Hops: 4, GeoKM: 4500},
|
||||
{ID: "D", PingMS: 55, Hops: 1, GeoKM: 300},
|
||||
{ID: "E", PingMS: 18, Hops: 2, GeoKM: 2200},
|
||||
}
|
||||
|
||||
func main() {
|
||||
weights3 := [3]float64{1.0, 0.7, 0.3}
|
||||
invert3 := [3]bool{false, false, false}
|
||||
|
||||
pts3, err := poindexter.Build3D(
|
||||
peers,
|
||||
func(p Peer) string { return p.ID },
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
func(p Peer) float64 { return p.GeoKM },
|
||||
weights3, invert3,
|
||||
)
|
||||
if err != nil { panic(err) }
|
||||
|
||||
tree3, _ := poindexter.NewKDTree(pts3, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
// Prefer low ping/hop, modest geo
|
||||
query3 := []float64{weights3[0]*0.0, weights3[1]*0.2, weights3[2]*0.4}
|
||||
top3, _, _ := tree3.Nearest(query3)
|
||||
fmt.Println("3D best (ping+hop+geo):", top3.ID)
|
||||
}
|
||||
```
|
||||
|
||||
## Dynamic updates
|
||||
|
||||
Your routing table changes constantly. Insert/remove peers. For consistent normalization, compute and reuse your min/max stats (preferred) or rebuild points when the candidate set changes.
|
||||
|
||||
Tip: Use the WithStats helpers to reuse normalization across updates:
|
||||
|
||||
```go
|
||||
// Compute once over your baseline
|
||||
stats := poindexter.ComputeNormStats2D(peers,
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
)
|
||||
|
||||
// Build now or later using the same stats
|
||||
ts, _ := poindexter.Build2DWithStats(
|
||||
peers,
|
||||
func(p Peer) string { return p.ID },
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
[2]float64{1,1}, [2]bool{false,false}, stats,
|
||||
)
|
||||
```
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Peer struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
}
|
||||
|
||||
var peers = []Peer{
|
||||
{ID: "A", PingMS: 22, Hops: 3},
|
||||
{ID: "B", PingMS: 34, Hops: 2},
|
||||
{ID: "C", PingMS: 15, Hops: 4},
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Initial 2‑D build (ping + hops)
|
||||
weights2 := [2]float64{1.0, 1.0}
|
||||
invert2 := [2]bool{false, false}
|
||||
|
||||
// Compute normalization stats once over your baseline set
|
||||
stats := poindexter.ComputeNormStats2D(
|
||||
peers,
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
)
|
||||
|
||||
// Build using the precomputed stats so future inserts share the same scale
|
||||
pts, _ := poindexter.Build2DWithStats(
|
||||
peers,
|
||||
func(p Peer) string { return p.ID },
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
weights2, invert2, stats,
|
||||
)
|
||||
tree, _ := poindexter.NewKDTree(pts)
|
||||
|
||||
// Insert a new peer: reuse the same normalization stats to keep scale consistent
|
||||
newPeer := Peer{ID: "Z", PingMS: 12, Hops: 2}
|
||||
addPts, _ := poindexter.Build2DWithStats(
|
||||
[]Peer{newPeer},
|
||||
func(p Peer) string { return p.ID },
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
weights2, invert2, stats,
|
||||
)
|
||||
_ = tree.Insert(addPts[0])
|
||||
|
||||
// Verify nearest now prefers Z for low ping target
|
||||
best, _, _ := tree.Nearest([]float64{0, 0})
|
||||
fmt.Println("Best after insert:", best.ID)
|
||||
|
||||
// Delete by ID when peer goes offline
|
||||
_ = tree.DeleteByID("Z")
|
||||
}
|
||||
```
|
||||
|
||||
## Choosing a metric
|
||||
|
||||
- Euclidean (L2): smooth trade‑offs across axes; good default for blended preferences
|
||||
- Manhattan (L1): emphasizes per‑axis absolute differences; useful when each unit of ping/hop matters equally
|
||||
- Chebyshev (L∞): min‑max style; dominated by the worst axis (e.g., reject any peer with too many hops regardless of ping)
|
||||
|
||||
## Notes on production use
|
||||
|
||||
- Keep and reuse normalization parameters (min/max or mean/std) rather than recomputing per query to avoid drift.
|
||||
- Consider capping outliers (e.g., clamp geo distances > 5000 km).
|
||||
- For large N (≥ 1e5) and low dims (≤ 8), consider swapping the internal engine to `gonum.org/v1/gonum/spatial/kdtree` behind the same API for faster queries.
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
# License
|
||||
|
||||
This project is licensed under the **European Union Public Licence v1.2 (EUPL-1.2)**.
|
||||
|
||||
## About EUPL-1.2
|
||||
|
||||
The European Union Public Licence (EUPL) is a copyleft free/open source software license created on the initiative of and approved by the European Commission.
|
||||
|
||||
## Key Points
|
||||
|
||||
- **Open Source**: The EUPL is an OSI-approved open source license
|
||||
- **Copyleft**: Derivative works must be distributed under the same or compatible license
|
||||
- **Multilingual**: The license is available in all official EU languages
|
||||
- **Compatible**: Compatible with other major open source licenses including GPL
|
||||
|
||||
## Full License Text
|
||||
|
||||
The complete license text can be found in the [LICENSE](https://github.com/Snider/Poindexter/blob/main/LICENSE) file in the repository.
|
||||
|
||||
## Using EUPL-1.2 Licensed Code
|
||||
|
||||
When using this library:
|
||||
|
||||
1. You must retain copyright and license notices
|
||||
2. You must state significant changes made to the code
|
||||
3. Derivative works must be licensed under EUPL-1.2 or a compatible license
|
||||
|
||||
## More Information
|
||||
|
||||
For more information about the EUPL, visit:
|
||||
- [Official EUPL Website](https://joinup.ec.europa.eu/collection/eupl)
|
||||
- [EUPL on Wikipedia](https://en.wikipedia.org/wiki/European_Union_Public_Licence)
|
||||
74
docs/perf.md
74
docs/perf.md
|
|
@ -1,74 +0,0 @@
|
|||
# Performance: KDTree benchmarks and guidance
|
||||
|
||||
This page summarizes how to measure KDTree performance in this repository and how to compare the two internal backends (Linear vs Gonum) that you can select at build/runtime.
|
||||
|
||||
## How benchmarks are organized
|
||||
|
||||
- Micro-benchmarks live in `bench_kdtree_test.go`, `bench_kdtree_dual_test.go`, and `bench_kdtree_dual_100k_test.go` and cover:
|
||||
- `Nearest` in 2D and 4D with N = 1k, 10k (both backends)
|
||||
- `Nearest` in 2D and 4D with N = 100k (gonum-tag job; linear also measured there)
|
||||
- `KNearest(k=10)` in 2D/4D with N = 1k, 10k
|
||||
- `Radius` (mid radius r≈0.5 after normalization) in 2D/4D with N = 1k, 10k
|
||||
- Datasets: Uniform and 3-cluster synthetic generators in normalized [0,1] spaces.
|
||||
- Backends: Linear (always available) and Gonum (enabled when built with `-tags=gonum`).
|
||||
|
||||
Run them locally:
|
||||
|
||||
```bash
|
||||
# Linear backend (default)
|
||||
go test -bench . -benchmem -run=^$ ./...
|
||||
|
||||
# Gonum backend (optimized KD; requires build tag)
|
||||
go test -tags=gonum -bench . -benchmem -run=^$ ./...
|
||||
```
|
||||
|
||||
GitHub Actions publishes benchmark artifacts on every push/PR:
|
||||
- Linear job: artifact `bench-linear.txt`
|
||||
- Gonum job: artifact `bench-gonum.txt`
|
||||
|
||||
## Backend selection and defaults
|
||||
|
||||
- Default backend is Linear.
|
||||
- If you build with `-tags=gonum`, the default switches to the optimized KD backend.
|
||||
- You can override at runtime:
|
||||
|
||||
```
|
||||
// Force Linear
|
||||
kdt, _ := poindexter.NewKDTree(pts, poindexter.WithBackend(poindexter.BackendLinear))
|
||||
// Force Gonum (requires build tag)
|
||||
kdt, _ := poindexter.NewKDTree(pts, poindexter.WithBackend(poindexter.BackendGonum))
|
||||
```
|
||||
|
||||
Supported metrics in the optimized backend: L2 (Euclidean), L1 (Manhattan), L∞ (Chebyshev). Cosine/Weighted-Cosine currently use the Linear backend.
|
||||
|
||||
## What to expect (rule of thumb)
|
||||
|
||||
- Linear backend: O(n) per query; fast for small-to-medium datasets (≤10k), especially in low dims (≤4).
|
||||
- Gonum backend: typically sub-linear for prunable datasets and dims ≤ ~8, with noticeable gains as N grows (≥10k–100k), especially on uniform or moderately clustered data and moderate radii.
|
||||
- For large radii (many points within r) or highly correlated/pathological data, pruning may be less effective and behavior approaches O(n) even with KD-trees.
|
||||
|
||||
## Interpreting results
|
||||
|
||||
Benchmarks output something like:
|
||||
|
||||
```
|
||||
BenchmarkNearest_10k_4D_Gonum_Uniform-8 50000 12,300 ns/op 0 B/op 0 allocs/op
|
||||
```
|
||||
|
||||
- `ns/op`: lower is better (nanoseconds per operation)
|
||||
- `B/op` and `allocs/op`: memory behavior; fewer is better
|
||||
- `KNearest` incurs extra work due to sorting; `Radius` cost scales with the number of hits.
|
||||
|
||||
## Improving performance
|
||||
|
||||
- Normalize and weight features once; reuse across queries (see `Build*WithStats` helpers).
|
||||
- Choose a metric aligned with your policy: L2 usually a solid default; L1 for per-axis penalties; L∞ for hard-threshold dominated objectives.
|
||||
- Batch queries to benefit from CPU caches.
|
||||
- Prefer the Gonum backend for larger N and dims ≤ ~8; stick to Linear for tiny datasets or when using Cosine metrics.
|
||||
|
||||
## Reproducing and tracking performance
|
||||
|
||||
- Local (Linear): `go test -bench . -benchmem -run=^$ ./...`
|
||||
- Local (Gonum): `go test -tags=gonum -bench . -benchmem -run=^$ ./...`
|
||||
- CI artifacts: download `bench-linear.txt` and `bench-gonum.txt` from the latest workflow run.
|
||||
- Optional: add historical trend graphs via Benchstat or Codecov integration.
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
# Poindexter Math Expansion
|
||||
|
||||
**Date:** 2026-02-16
|
||||
**Status:** Approved
|
||||
|
||||
## Context
|
||||
|
||||
Poindexter serves as the math pillar (alongside Borg=data, Enchantrix=encryption) in the Lethean ecosystem. It currently provides KD-Tree spatial queries, 5 distance metrics, sorting utilities, and normalization helpers.
|
||||
|
||||
Analysis of math operations scattered across core/go, core/go-ai, and core/mining revealed common patterns that Poindexter should centralize: descriptive statistics, scaling/interpolation, approximate equality, weighted scoring, and signal generation.
|
||||
|
||||
## New Modules
|
||||
|
||||
### stats.go — Descriptive statistics
|
||||
Sum, Mean, Variance, StdDev, MinMax, IsUnderrepresented.
|
||||
Consumers: ml/coverage.go, lab/handler/chart.go
|
||||
|
||||
### scale.go — Normalization and interpolation
|
||||
Lerp, InverseLerp, Remap, RoundToN, Clamp, MinMaxScale.
|
||||
Consumers: lab/handler/chart.go, i18n/numbers.go
|
||||
|
||||
### epsilon.go — Approximate equality
|
||||
ApproxEqual, ApproxZero.
|
||||
Consumers: ml/exact.go
|
||||
|
||||
### score.go — Weighted composite scoring
|
||||
Factor type, WeightedScore, Ratio, Delta, DeltaPercent.
|
||||
Consumers: ml/heuristic.go, ml/compare.go
|
||||
|
||||
### signal.go — Time-series primitives
|
||||
RampUp, SineWave, Oscillate, Noise (seeded RNG).
|
||||
Consumers: mining/simulated_miner.go
|
||||
|
||||
## Constraints
|
||||
|
||||
- Zero external dependencies (WASM-compilable)
|
||||
- Pure Go, stdlib only (math, math/rand)
|
||||
- Same package (`poindexter`), flat structure
|
||||
- Table-driven tests for every function
|
||||
- No changes to existing files
|
||||
|
||||
## Not In Scope
|
||||
|
||||
- MLX tensor ops (hardware-accelerated, stays in go-ai)
|
||||
- DNS tools migration to go-netops (separate PR)
|
||||
- gonum backend integration (future work)
|
||||
171
docs/wasm.md
171
docs/wasm.md
|
|
@ -1,171 +0,0 @@
|
|||
# Browser/WebAssembly (WASM)
|
||||
|
||||
Poindexter ships a browser build compiled to WebAssembly along with a small JS loader and TypeScript types. This allows you to use the KD‑Tree functionality directly from web apps (Angular, React, Vue, plain ESM, etc.).
|
||||
|
||||
## What’s included
|
||||
|
||||
- `dist/poindexter.wasm` — the compiled Go WASM module
|
||||
- `dist/wasm_exec.js` — Go’s runtime shim required to run WASM in the browser
|
||||
- `npm/poindexter-wasm/loader.js` — ESM loader that instantiates the WASM and exposes a friendly API
|
||||
- `npm/poindexter-wasm/index.d.ts` — TypeScript typings for the loader and KD‑Tree API
|
||||
|
||||
## Quick start
|
||||
|
||||
- Build artifacts and copy `wasm_exec.js`:
|
||||
|
||||
```bash
|
||||
make wasm-build
|
||||
```
|
||||
|
||||
- Prepare the npm package folder with `dist/` and docs:
|
||||
|
||||
```bash
|
||||
make npm-pack
|
||||
```
|
||||
|
||||
- Minimal browser ESM usage (serve `dist/` statically):
|
||||
|
||||
```html
|
||||
<script type="module">
|
||||
import { init } from '/npm/poindexter-wasm/loader.js';
|
||||
const px = await init({
|
||||
wasmURL: '/dist/poindexter.wasm',
|
||||
wasmExecURL: '/dist/wasm_exec.js',
|
||||
});
|
||||
const tree = await px.newTree(2);
|
||||
await tree.insert({ id: 'a', coords: [0, 0], value: 'A' });
|
||||
const nn = await tree.nearest([0.1, 0.2]);
|
||||
console.log(nn);
|
||||
</script>
|
||||
```
|
||||
|
||||
## Building locally
|
||||
|
||||
```bash
|
||||
make wasm-build
|
||||
```
|
||||
|
||||
This produces `dist/poindexter.wasm` and copies `wasm_exec.js` into `dist/` from your Go installation. If your environment is non‑standard, you can override the path:
|
||||
|
||||
```bash
|
||||
WASM_EXEC=/custom/path/wasm_exec.js make wasm-build
|
||||
```
|
||||
|
||||
To assemble the npm package folder with the built artifacts:
|
||||
|
||||
```bash
|
||||
make npm-pack
|
||||
```
|
||||
|
||||
This populates `npm/poindexter-wasm/` with `dist/`, licence and readme files. You can then create a tarball for local testing:
|
||||
|
||||
```bash
|
||||
npm pack ./npm/poindexter-wasm
|
||||
```
|
||||
|
||||
## Using in Angular (example)
|
||||
|
||||
1) Install the package (use the tarball generated above or a published version):
|
||||
|
||||
```bash
|
||||
npm install <path-to>/snider-poindexter-wasm-0.0.0-development.tgz
|
||||
# or once published
|
||||
npm install @snider/poindexter-wasm
|
||||
```
|
||||
|
||||
2) Make the WASM runtime files available as app assets. In `angular.json` under `build.options.assets`:
|
||||
|
||||
```json
|
||||
{
|
||||
"glob": "**/*",
|
||||
"input": "node_modules/@snider/poindexter-wasm/dist",
|
||||
"output": "/assets/poindexter/"
|
||||
}
|
||||
```
|
||||
|
||||
3) Import and initialize in your code:
|
||||
|
||||
```ts
|
||||
import { init } from '@snider/poindexter-wasm';
|
||||
|
||||
const px = await init({
|
||||
// If you used the assets mapping above, these defaults should work:
|
||||
wasmURL: '/assets/poindexter/poindexter.wasm',
|
||||
wasmExecURL: '/assets/poindexter/wasm_exec.js',
|
||||
});
|
||||
|
||||
const tree = await px.newTree(2);
|
||||
await tree.insert({ id: 'a', coords: [0, 0], value: 'A' });
|
||||
const nearest = await tree.nearest([0.1, 0.2]);
|
||||
console.log(nearest);
|
||||
```
|
||||
|
||||
## JavaScript API
|
||||
|
||||
Top‑level functions returned by `init()`:
|
||||
|
||||
- `version(): string`
|
||||
- `hello(name?: string): string`
|
||||
- `newTree(dim: number): Promise<Tree>`
|
||||
|
||||
Tree methods:
|
||||
|
||||
- `dim(): Promise<number>`
|
||||
- `len(): Promise<number>`
|
||||
- `insert(p: { id: string; coords: number[]; value?: string }): Promise<void>`
|
||||
- `deleteByID(id: string): Promise<boolean>`
|
||||
- `nearest(query: number[]): Promise<{ id: string; coords: number[]; value: string; dist: number } | null>`
|
||||
- `kNearest(query: number[], k: number): Promise<Array<{ id: string; coords: number[]; value: string; dist: number }>>`
|
||||
- `radius(query: number[], r: number): Promise<Array<{ id: string; coords: number[]; value: string; dist: number }>>`
|
||||
- `exportJSON(): Promise<string>`
|
||||
|
||||
Notes:
|
||||
- The WASM bridge currently uses `KDTree[string]` for values to keep the boundary simple. You can encode richer payloads as JSON strings if needed.
|
||||
- `wasm_exec.js` must be available next to the `.wasm` file (the loader accepts explicit URLs if you place them elsewhere).
|
||||
|
||||
## CI artifacts
|
||||
|
||||
Our CI builds and uploads the following artifacts on each push/PR:
|
||||
|
||||
- `poindexter-wasm-dist` — the `dist/` folder containing `poindexter.wasm` and `wasm_exec.js`
|
||||
- `npm-poindexter-wasm` — the prepared npm package folder with `dist/` and documentation
|
||||
- `npm-poindexter-wasm-tarball` — a `.tgz` created via `npm pack` for quick local install/testing
|
||||
|
||||
You can download these artifacts from the workflow run summary in GitHub Actions.
|
||||
|
||||
## Browser demo (checked into repo)
|
||||
|
||||
There is a tiny browser demo you can load locally from this repo:
|
||||
|
||||
- Path: `examples/wasm-browser/index.html`
|
||||
- Prerequisites: run `make wasm-build` so `dist/poindexter.wasm` and `dist/wasm_exec.js` exist.
|
||||
- Serve the repo root (so relative paths resolve), for example:
|
||||
|
||||
```bash
|
||||
python3 -m http.server -b 127.0.0.1 8000
|
||||
```
|
||||
|
||||
Then open:
|
||||
|
||||
- http://127.0.0.1:8000/examples/wasm-browser/
|
||||
|
||||
Open the browser console to see outputs from `nearest`, `kNearest`, and `radius` queries.
|
||||
|
||||
### TypeScript + Vite demo (local-only)
|
||||
|
||||
A minimal TypeScript demo using Vite is also included:
|
||||
|
||||
- Path: `examples/wasm-browser-ts/`
|
||||
- Prerequisites: run `make wasm-build` at the repo root first.
|
||||
- From the example folder:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
Then open the URL printed by Vite (usually http://127.0.0.1:5173/) and check the browser console.
|
||||
|
||||
Notes:
|
||||
- The dev script copies `dist/poindexter.wasm`, `dist/wasm_exec.js`, and the ESM loader into the example's `public/` folder before serving.
|
||||
- This example is intentionally excluded from CI to keep the pipeline lean.
|
||||
14
epsilon.go
14
epsilon.go
|
|
@ -1,14 +0,0 @@
|
|||
package poindexter
|
||||
|
||||
import "math"
|
||||
|
||||
// ApproxEqual returns true if the absolute difference between a and b
|
||||
// is less than epsilon.
|
||||
func ApproxEqual(a, b, epsilon float64) bool {
|
||||
return math.Abs(a-b) < epsilon
|
||||
}
|
||||
|
||||
// ApproxZero returns true if the absolute value of v is less than epsilon.
|
||||
func ApproxZero(v, epsilon float64) bool {
|
||||
return math.Abs(v) < epsilon
|
||||
}
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
package poindexter
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestApproxEqual(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
a, b float64
|
||||
epsilon float64
|
||||
want bool
|
||||
}{
|
||||
{"equal", 1.0, 1.0, 0.01, true},
|
||||
{"close", 1.0, 1.005, 0.01, true},
|
||||
{"not_close", 1.0, 1.02, 0.01, false},
|
||||
{"negative", -1.0, -1.005, 0.01, true},
|
||||
{"zero", 0, 0.0001, 0.001, true},
|
||||
{"at_boundary", 1.0, 1.01, 0.01, false},
|
||||
{"large_epsilon", 100, 200, 150, true},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := ApproxEqual(tt.a, tt.b, tt.epsilon)
|
||||
if got != tt.want {
|
||||
t.Errorf("ApproxEqual(%v, %v, %v) = %v, want %v", tt.a, tt.b, tt.epsilon, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestApproxZero(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
v float64
|
||||
epsilon float64
|
||||
want bool
|
||||
}{
|
||||
{"zero", 0, 0.01, true},
|
||||
{"small_pos", 0.005, 0.01, true},
|
||||
{"small_neg", -0.005, 0.01, true},
|
||||
{"not_zero", 0.02, 0.01, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := ApproxZero(tt.v, tt.epsilon)
|
||||
if got != tt.want {
|
||||
t.Errorf("ApproxZero(%v, %v) = %v, want %v", tt.v, tt.epsilon, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
po "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
// BuildPingHop2D wraps poindexter.Build2D to construct 2D points from (ping_ms, hop_count).
|
||||
func BuildPingHop2D[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
ping func(T) float64,
|
||||
hops func(T) float64,
|
||||
weights [2]float64,
|
||||
invert [2]bool,
|
||||
) ([]po.KDPoint[T], error) {
|
||||
return po.Build2D(items, id, ping, hops, weights, invert)
|
||||
}
|
||||
|
||||
// BuildPingHopGeo3D wraps poindexter.Build3D for (ping_ms, hop_count, geo_km).
|
||||
func BuildPingHopGeo3D[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
ping func(T) float64,
|
||||
hops func(T) float64,
|
||||
geoKM func(T) float64,
|
||||
weights [3]float64,
|
||||
invert [3]bool,
|
||||
) ([]po.KDPoint[T], error) {
|
||||
return po.Build3D(items, id, ping, hops, geoKM, weights, invert)
|
||||
}
|
||||
|
||||
// BuildPingHopGeoScore4D wraps poindexter.Build4D for (ping_ms, hop_count, geo_km, score).
|
||||
// Typical usage sets invert for score=true so higher score => lower cost.
|
||||
func BuildPingHopGeoScore4D[T any](
|
||||
items []T,
|
||||
id func(T) string,
|
||||
ping func(T) float64,
|
||||
hops func(T) float64,
|
||||
geoKM func(T) float64,
|
||||
score func(T) float64,
|
||||
weights [4]float64,
|
||||
invert [4]bool,
|
||||
) ([]po.KDPoint[T], error) {
|
||||
return po.Build4D(items, id, ping, hops, geoKM, score, weights, invert)
|
||||
}
|
||||
|
||||
// Demo program that builds a small tree using the 2D helper and performs a query.
|
||||
func main() {
|
||||
type Peer struct {
|
||||
ID string
|
||||
PingMS, Hops float64
|
||||
}
|
||||
peers := []Peer{{"A", 20, 1}, {"B", 50, 2}, {"C", 10, 3}}
|
||||
|
||||
pts, err := BuildPingHop2D(peers,
|
||||
func(p Peer) string { return p.ID },
|
||||
func(p Peer) float64 { return p.PingMS },
|
||||
func(p Peer) float64 { return p.Hops },
|
||||
[2]float64{1.0, 0.7},
|
||||
[2]bool{false, false},
|
||||
)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
kdt, _ := po.NewKDTree(pts, po.WithMetric(po.EuclideanDistance{}))
|
||||
best, dist, _ := kdt.Nearest([]float64{0, 0})
|
||||
fmt.Println(best.ID, dist)
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
package main
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestMain_Run(t *testing.T) {
|
||||
// Just ensure the example main runs without panic to contribute to coverage
|
||||
main()
|
||||
}
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type peer struct {
|
||||
Addr string
|
||||
Ping int
|
||||
}
|
||||
|
||||
// TestExample1D ensures the 1D example logic runs and exercises KDTree paths.
|
||||
func TestExample1D(t *testing.T) {
|
||||
// Same toy table as the example
|
||||
table := []peer{
|
||||
{Addr: "peer1.example:4001", Ping: 74},
|
||||
{Addr: "peer2.example:4001", Ping: 52},
|
||||
{Addr: "peer3.example:4001", Ping: 110},
|
||||
{Addr: "peer4.example:4001", Ping: 35},
|
||||
{Addr: "peer5.example:4001", Ping: 60},
|
||||
{Addr: "peer6.example:4001", Ping: 44},
|
||||
}
|
||||
pts := make([]poindexter.KDPoint[peer], 0, len(table))
|
||||
for i, p := range table {
|
||||
pts = append(pts, poindexter.KDPoint[peer]{
|
||||
ID: fmt.Sprintf("peer-%d", i+1),
|
||||
Coords: []float64{float64(p.Ping)},
|
||||
Value: p,
|
||||
})
|
||||
}
|
||||
kdt, err := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
if err != nil {
|
||||
t.Fatalf("NewKDTree err: %v", err)
|
||||
}
|
||||
best, d, ok := kdt.Nearest([]float64{0})
|
||||
if !ok {
|
||||
t.Fatalf("no nearest")
|
||||
}
|
||||
// Expect the minimum ping (35ms)
|
||||
if best.Value.Ping != 35 {
|
||||
t.Fatalf("expected best ping 35ms, got %d", best.Value.Ping)
|
||||
}
|
||||
// Distance from [0] to [35] should be 35
|
||||
if d != 35 {
|
||||
t.Fatalf("expected distance 35, got %v", d)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Peer struct {
|
||||
Addr string
|
||||
Ping int
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Toy DHT routing table
|
||||
table := []Peer{
|
||||
{Addr: "peer1.example:4001", Ping: 74},
|
||||
{Addr: "peer2.example:4001", Ping: 52},
|
||||
{Addr: "peer3.example:4001", Ping: 110},
|
||||
{Addr: "peer4.example:4001", Ping: 35},
|
||||
{Addr: "peer5.example:4001", Ping: 60},
|
||||
{Addr: "peer6.example:4001", Ping: 44},
|
||||
}
|
||||
pts := make([]poindexter.KDPoint[Peer], 0, len(table))
|
||||
for i, p := range table {
|
||||
pts = append(pts, poindexter.KDPoint[Peer]{
|
||||
ID: fmt.Sprintf("peer-%d", i+1),
|
||||
Coords: []float64{float64(p.Ping)},
|
||||
Value: p,
|
||||
})
|
||||
}
|
||||
kdt, err := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
best, d, ok := kdt.Nearest([]float64{0})
|
||||
if !ok {
|
||||
fmt.Println("no peers found")
|
||||
return
|
||||
}
|
||||
fmt.Printf("Best peer: %s (ping=%d ms), distance=%.0f\n", best.Value.Addr, best.Value.Ping, d)
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
package main
|
||||
|
||||
import "testing"
|
||||
|
||||
// TestExampleMain runs the example's main function to ensure it executes without panic.
|
||||
// This also allows the example code paths to be included in coverage reports.
|
||||
func TestExampleMain(t *testing.T) {
|
||||
main()
|
||||
}
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type peer2 struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
}
|
||||
|
||||
func TestExample2D(t *testing.T) {
|
||||
peers := []peer2{
|
||||
{ID: "A", PingMS: 22, Hops: 3},
|
||||
{ID: "B", PingMS: 34, Hops: 2},
|
||||
{ID: "C", PingMS: 15, Hops: 4},
|
||||
{ID: "D", PingMS: 55, Hops: 1},
|
||||
{ID: "E", PingMS: 18, Hops: 2},
|
||||
}
|
||||
weights := [2]float64{1.0, 1.0}
|
||||
invert := [2]bool{false, false}
|
||||
pts, err := poindexter.Build2D(
|
||||
peers,
|
||||
func(p peer2) string { return p.ID },
|
||||
func(p peer2) float64 { return p.PingMS },
|
||||
func(p peer2) float64 { return p.Hops },
|
||||
weights, invert,
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("Build2D err: %v", err)
|
||||
}
|
||||
tr, err := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.ManhattanDistance{}))
|
||||
if err != nil {
|
||||
t.Fatalf("NewKDTree err: %v", err)
|
||||
}
|
||||
best, d, ok := tr.Nearest([]float64{0, 0.3})
|
||||
if !ok {
|
||||
t.Fatalf("no nearest")
|
||||
}
|
||||
if best.ID == "" {
|
||||
t.Fatalf("unexpected empty ID")
|
||||
}
|
||||
if d < 0 {
|
||||
t.Fatalf("negative distance: %v", d)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Peer2 struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
}
|
||||
|
||||
func main() {
|
||||
peers := []Peer2{
|
||||
{ID: "A", PingMS: 22, Hops: 3},
|
||||
{ID: "B", PingMS: 34, Hops: 2},
|
||||
{ID: "C", PingMS: 15, Hops: 4},
|
||||
{ID: "D", PingMS: 55, Hops: 1},
|
||||
{ID: "E", PingMS: 18, Hops: 2},
|
||||
}
|
||||
weights := [2]float64{1.0, 1.0}
|
||||
invert := [2]bool{false, false}
|
||||
pts, err := poindexter.Build2D(
|
||||
peers,
|
||||
func(p Peer2) string { return p.ID },
|
||||
func(p Peer2) float64 { return p.PingMS },
|
||||
func(p Peer2) float64 { return p.Hops },
|
||||
weights, invert,
|
||||
)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Build2D failed: %v", err))
|
||||
}
|
||||
tr, err := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.ManhattanDistance{}))
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("NewKDTree failed: %v", err))
|
||||
}
|
||||
best, _, ok := tr.Nearest([]float64{0, 0.3})
|
||||
if !ok {
|
||||
panic("no nearest neighbour found")
|
||||
}
|
||||
fmt.Println("2D best:", best.ID)
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
package main
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestExample2D_Main(t *testing.T) {
|
||||
main()
|
||||
}
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type peer3test struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
GeoKM float64
|
||||
}
|
||||
|
||||
func TestExample3D(t *testing.T) {
|
||||
peers := []peer3test{
|
||||
{ID: "A", PingMS: 22, Hops: 3, GeoKM: 1200},
|
||||
{ID: "B", PingMS: 34, Hops: 2, GeoKM: 800},
|
||||
{ID: "C", PingMS: 15, Hops: 4, GeoKM: 4500},
|
||||
{ID: "D", PingMS: 55, Hops: 1, GeoKM: 300},
|
||||
{ID: "E", PingMS: 18, Hops: 2, GeoKM: 2200},
|
||||
}
|
||||
weights := [3]float64{1.0, 0.7, 0.3}
|
||||
invert := [3]bool{false, false, false}
|
||||
pts, err := poindexter.Build3D(
|
||||
peers,
|
||||
func(p peer3test) string { return p.ID },
|
||||
func(p peer3test) float64 { return p.PingMS },
|
||||
func(p peer3test) float64 { return p.Hops },
|
||||
func(p peer3test) float64 { return p.GeoKM },
|
||||
weights, invert,
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("Build3D err: %v", err)
|
||||
}
|
||||
tr, err := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
if err != nil {
|
||||
t.Fatalf("NewKDTree err: %v", err)
|
||||
}
|
||||
best, d, ok := tr.Nearest([]float64{0, weights[1] * 0.2, weights[2] * 0.4})
|
||||
if !ok {
|
||||
t.Fatalf("no nearest")
|
||||
}
|
||||
if best.ID == "" {
|
||||
t.Fatalf("unexpected empty ID")
|
||||
}
|
||||
if d < 0 {
|
||||
t.Fatalf("negative distance: %v", d)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Peer3 struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
GeoKM float64
|
||||
}
|
||||
|
||||
func main() {
|
||||
peers := []Peer3{
|
||||
{ID: "A", PingMS: 22, Hops: 3, GeoKM: 1200},
|
||||
{ID: "B", PingMS: 34, Hops: 2, GeoKM: 800},
|
||||
{ID: "C", PingMS: 15, Hops: 4, GeoKM: 4500},
|
||||
{ID: "D", PingMS: 55, Hops: 1, GeoKM: 300},
|
||||
{ID: "E", PingMS: 18, Hops: 2, GeoKM: 2200},
|
||||
}
|
||||
weights := [3]float64{1.0, 0.7, 0.3}
|
||||
invert := [3]bool{false, false, false}
|
||||
pts, _ := poindexter.Build3D(
|
||||
peers,
|
||||
func(p Peer3) string { return p.ID },
|
||||
func(p Peer3) float64 { return p.PingMS },
|
||||
func(p Peer3) float64 { return p.Hops },
|
||||
func(p Peer3) float64 { return p.GeoKM },
|
||||
weights, invert,
|
||||
)
|
||||
tr, _ := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
best, _, _ := tr.Nearest([]float64{0, weights[1] * 0.2, weights[2] * 0.4})
|
||||
fmt.Println("3D best:", best.ID)
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
package main
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestExample3D_Main(t *testing.T) {
|
||||
main()
|
||||
}
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type peer4test struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
GeoKM float64
|
||||
Score float64
|
||||
}
|
||||
|
||||
func TestExample4D(t *testing.T) {
|
||||
peers := []peer4test{
|
||||
{ID: "A", PingMS: 22, Hops: 3, GeoKM: 1200, Score: 0.86},
|
||||
{ID: "B", PingMS: 34, Hops: 2, GeoKM: 800, Score: 0.91},
|
||||
{ID: "C", PingMS: 15, Hops: 4, GeoKM: 4500, Score: 0.70},
|
||||
{ID: "D", PingMS: 55, Hops: 1, GeoKM: 300, Score: 0.95},
|
||||
{ID: "E", PingMS: 18, Hops: 2, GeoKM: 2200, Score: 0.80},
|
||||
}
|
||||
weights := [4]float64{1.0, 0.7, 0.2, 1.2}
|
||||
invert := [4]bool{false, false, false, true}
|
||||
pts, err := poindexter.Build4D(
|
||||
peers,
|
||||
func(p peer4test) string { return p.ID },
|
||||
func(p peer4test) float64 { return p.PingMS },
|
||||
func(p peer4test) float64 { return p.Hops },
|
||||
func(p peer4test) float64 { return p.GeoKM },
|
||||
func(p peer4test) float64 { return p.Score },
|
||||
weights, invert,
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("Build4D err: %v", err)
|
||||
}
|
||||
tr, err := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
if err != nil {
|
||||
t.Fatalf("NewKDTree err: %v", err)
|
||||
}
|
||||
best, d, ok := tr.Nearest([]float64{0, weights[1] * 0.2, weights[2] * 0.3, 0})
|
||||
if !ok {
|
||||
t.Fatalf("no nearest")
|
||||
}
|
||||
if best.ID == "" {
|
||||
t.Fatalf("unexpected empty ID")
|
||||
}
|
||||
if d < 0 {
|
||||
t.Fatalf("negative distance: %v", d)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
poindexter "github.com/Snider/Poindexter"
|
||||
)
|
||||
|
||||
type Peer4 struct {
|
||||
ID string
|
||||
PingMS float64
|
||||
Hops float64
|
||||
GeoKM float64
|
||||
Score float64
|
||||
}
|
||||
|
||||
func main() {
|
||||
peers := []Peer4{
|
||||
{ID: "A", PingMS: 22, Hops: 3, GeoKM: 1200, Score: 0.86},
|
||||
{ID: "B", PingMS: 34, Hops: 2, GeoKM: 800, Score: 0.91},
|
||||
{ID: "C", PingMS: 15, Hops: 4, GeoKM: 4500, Score: 0.70},
|
||||
{ID: "D", PingMS: 55, Hops: 1, GeoKM: 300, Score: 0.95},
|
||||
{ID: "E", PingMS: 18, Hops: 2, GeoKM: 2200, Score: 0.80},
|
||||
}
|
||||
weights := [4]float64{1.0, 0.7, 0.2, 1.2}
|
||||
invert := [4]bool{false, false, false, true}
|
||||
pts, err := poindexter.Build4D(
|
||||
peers,
|
||||
func(p Peer4) string { return p.ID },
|
||||
func(p Peer4) float64 { return p.PingMS },
|
||||
func(p Peer4) float64 { return p.Hops },
|
||||
func(p Peer4) float64 { return p.GeoKM },
|
||||
func(p Peer4) float64 { return p.Score },
|
||||
weights, invert,
|
||||
)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
tr, err := poindexter.NewKDTree(pts, poindexter.WithMetric(poindexter.EuclideanDistance{}))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
best, _, ok := tr.Nearest([]float64{0, weights[1] * 0.2, weights[2] * 0.3, 0})
|
||||
if !ok {
|
||||
panic("no nearest neighbour found")
|
||||
}
|
||||
fmt.Println("4D best:", best.ID)
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
package main
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestExample4D_Main(t *testing.T) {
|
||||
main()
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
# WASM Browser Example (TypeScript + Vite)
|
||||
|
||||
This is a minimal TypeScript example that runs Poindexter’s WebAssembly build in the browser.
|
||||
It bundles a tiny page with Vite and demonstrates creating a KDTree and running `Nearest`,
|
||||
`KNearest`, and `Radius` queries.
|
||||
|
||||
## Prerequisites
|
||||
- Go toolchain installed
|
||||
- Node.js 18+ (tested with Node 20)
|
||||
|
||||
## Quick start
|
||||
|
||||
1) Build the WASM artifacts at the repo root:
|
||||
|
||||
```bash
|
||||
make wasm-build
|
||||
```
|
||||
|
||||
This creates `dist/poindexter.wasm` and `dist/wasm_exec.js`.
|
||||
|
||||
2) From this example directory, install deps and start the dev server (the script copies the required files into `public/` before starting Vite):
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
3) Open the URL printed by Vite (usually http://127.0.0.1:5173/). Open the browser console to see outputs.
|
||||
|
||||
## What the dev script does
|
||||
- Copies `../../dist/poindexter.wasm` and `../../dist/wasm_exec.js` into `public/`
|
||||
- Copies `../../npm/poindexter-wasm/loader.js` into `public/`
|
||||
- Starts Vite with `public/` as the static root for those assets
|
||||
|
||||
The TypeScript code imports the loader from `/loader.js` and initializes with:
|
||||
|
||||
```ts
|
||||
const px = await init({
|
||||
wasmURL: '/poindexter.wasm',
|
||||
wasmExecURL: '/wasm_exec.js',
|
||||
});
|
||||
```
|
||||
|
||||
## Notes
|
||||
- This example is local-only and not built in CI to keep jobs light.
|
||||
- You can adapt the same structure inside your own web projects; alternatively, install the published npm package when available and serve `dist/` as static assets.
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Poindexter WASM TS Demo</title>
|
||||
<style>
|
||||
body { font-family: system-ui, -apple-system, Segoe UI, Roboto, sans-serif; margin: 2rem; }
|
||||
pre { background: #f6f8fa; padding: 1rem; overflow-x: auto; }
|
||||
code { font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Poindexter WASM (TypeScript + Vite)</h1>
|
||||
<p>
|
||||
This demo initializes the WebAssembly build and performs KDTree queries. Open your browser console to see results.
|
||||
</p>
|
||||
<p>
|
||||
Before running, build the WASM artifacts at the repo root:
|
||||
</p>
|
||||
<pre><code>make wasm-build</code></pre>
|
||||
|
||||
<script type="module" src="/src/main.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"name": "poindexter-wasm-browser-ts",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"predev": "node scripts/copy-assets.mjs",
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.4.0",
|
||||
"vite": "^5.0.0"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
// Copies WASM artifacts and loader into the public/ folder before Vite dev/build.
|
||||
// Run as an npm script (predev) from this example directory.
|
||||
import { cp, mkdir } from 'node:fs/promises';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
async function main() {
|
||||
const root = resolve(__dirname, '../../..');
|
||||
const exampleDir = resolve(__dirname, '..');
|
||||
const publicDir = resolve(exampleDir, 'public');
|
||||
|
||||
await mkdir(publicDir, { recursive: true });
|
||||
|
||||
const sources = [
|
||||
// WASM artifacts built by `make wasm-build`
|
||||
resolve(root, 'dist/poindexter.wasm'),
|
||||
resolve(root, 'dist/wasm_exec.js'),
|
||||
// ESM loader shipped with the repo's npm folder
|
||||
resolve(root, 'npm/poindexter-wasm/loader.js'),
|
||||
];
|
||||
|
||||
const targets = [
|
||||
resolve(publicDir, 'poindexter.wasm'),
|
||||
resolve(publicDir, 'wasm_exec.js'),
|
||||
resolve(publicDir, 'loader.js'),
|
||||
];
|
||||
|
||||
for (let i = 0; i < sources.length; i++) {
|
||||
await cp(sources[i], targets[i]);
|
||||
console.log(`Copied ${sources[i]} -> ${targets[i]}`);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('copy-assets failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
@ -1,173 +0,0 @@
|
|||
// Minimal TypeScript demo that uses the Poindexter WASM ESM loader.
|
||||
// Precondition: run `make wasm-build` at repo root, then `npm run dev` in this folder.
|
||||
|
||||
// We copy the loader and wasm artifacts to /public via scripts/copy-assets.mjs before dev starts.
|
||||
// @ts-ignore
|
||||
import { init } from '/loader.js';
|
||||
|
||||
async function run() {
|
||||
const px = await init({
|
||||
wasmURL: '/poindexter.wasm',
|
||||
wasmExecURL: '/wasm_exec.js',
|
||||
});
|
||||
|
||||
console.log('Poindexter (WASM) version:', await px.version());
|
||||
|
||||
// =========================================================================
|
||||
// Basic KD-Tree operations
|
||||
// =========================================================================
|
||||
const tree = await px.newTree(2);
|
||||
await tree.insert({ id: 'peer-a', coords: [0, 0], value: 'Peer A' });
|
||||
await tree.insert({ id: 'peer-b', coords: [1, 0], value: 'Peer B' });
|
||||
await tree.insert({ id: 'peer-c', coords: [0, 1], value: 'Peer C' });
|
||||
await tree.insert({ id: 'peer-d', coords: [0.5, 0.5], value: 'Peer D' });
|
||||
|
||||
console.log('\n=== Basic Queries ===');
|
||||
const nn = await tree.nearest([0.9, 0.1]);
|
||||
console.log('Nearest [0.9,0.1]:', nn);
|
||||
|
||||
const kn = await tree.kNearest([0.5, 0.5], 3);
|
||||
console.log('kNN k=3 [0.5,0.5]:', kn);
|
||||
|
||||
const rad = await tree.radius([0, 0], 1.1);
|
||||
console.log('Radius r=1.1 [0,0]:', rad);
|
||||
|
||||
// =========================================================================
|
||||
// Analytics Demo
|
||||
// =========================================================================
|
||||
console.log('\n=== Tree Analytics ===');
|
||||
|
||||
// Perform more queries to generate analytics
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await tree.nearest([Math.random(), Math.random()]);
|
||||
}
|
||||
await tree.kNearest([0.2, 0.8], 2);
|
||||
await tree.kNearest([0.7, 0.3], 2);
|
||||
|
||||
// Get tree-level analytics
|
||||
const analytics = await tree.getAnalytics();
|
||||
console.log('Tree Analytics:', {
|
||||
queryCount: analytics.queryCount,
|
||||
insertCount: analytics.insertCount,
|
||||
avgQueryTimeNs: analytics.avgQueryTimeNs,
|
||||
minQueryTimeNs: analytics.minQueryTimeNs,
|
||||
maxQueryTimeNs: analytics.maxQueryTimeNs,
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Peer Selection Analytics
|
||||
// =========================================================================
|
||||
console.log('\n=== Peer Selection Analytics ===');
|
||||
|
||||
// Get all peer stats
|
||||
const peerStats = await tree.getPeerStats();
|
||||
console.log('All Peer Stats:', peerStats);
|
||||
|
||||
// Get top 3 most frequently selected peers
|
||||
const topPeers = await tree.getTopPeers(3);
|
||||
console.log('Top 3 Peers:', topPeers);
|
||||
|
||||
// =========================================================================
|
||||
// Axis Distribution Analysis
|
||||
// =========================================================================
|
||||
console.log('\n=== Axis Distributions ===');
|
||||
|
||||
const axisDists = await tree.getAxisDistributions(['latency', 'hops']);
|
||||
console.log('Axis Distributions:', axisDists);
|
||||
|
||||
// =========================================================================
|
||||
// NAT Routing / Peer Quality Scoring
|
||||
// =========================================================================
|
||||
console.log('\n=== NAT Routing & Peer Quality ===');
|
||||
|
||||
// Simulate peer network metrics
|
||||
const peerMetrics = {
|
||||
connectivityScore: 0.9,
|
||||
symmetryScore: 0.8,
|
||||
relayProbability: 0.1,
|
||||
directSuccessRate: 0.95,
|
||||
avgRttMs: 50,
|
||||
jitterMs: 10,
|
||||
packetLossRate: 0.01,
|
||||
bandwidthMbps: 100,
|
||||
natType: 'full_cone' as const,
|
||||
};
|
||||
|
||||
const qualityScore = await px.computePeerQualityScore(peerMetrics);
|
||||
console.log('Peer Quality Score (0-1):', qualityScore.toFixed(3));
|
||||
|
||||
// Get default quality weights
|
||||
const defaultWeights = await px.getDefaultQualityWeights();
|
||||
console.log('Default Quality Weights:', defaultWeights);
|
||||
|
||||
// =========================================================================
|
||||
// Trust Score Calculation
|
||||
// =========================================================================
|
||||
console.log('\n=== Trust Score ===');
|
||||
|
||||
const trustMetrics = {
|
||||
reputationScore: 0.8,
|
||||
successfulTransactions: 150,
|
||||
failedTransactions: 3,
|
||||
ageSeconds: 86400 * 30, // 30 days
|
||||
vouchCount: 5,
|
||||
flagCount: 0,
|
||||
proofOfWork: 0.5,
|
||||
};
|
||||
|
||||
const trustScore = await px.computeTrustScore(trustMetrics);
|
||||
console.log('Trust Score (0-1):', trustScore.toFixed(3));
|
||||
|
||||
// =========================================================================
|
||||
// Distribution Statistics
|
||||
// =========================================================================
|
||||
console.log('\n=== Distribution Statistics ===');
|
||||
|
||||
// Simulate some distance measurements
|
||||
const distances = [0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.5, 0.8, 1.2];
|
||||
const distStats = await px.computeDistributionStats(distances);
|
||||
console.log('Distance Distribution Stats:', {
|
||||
count: distStats.count,
|
||||
min: distStats.min.toFixed(3),
|
||||
max: distStats.max.toFixed(3),
|
||||
mean: distStats.mean.toFixed(3),
|
||||
median: distStats.median.toFixed(3),
|
||||
stdDev: distStats.stdDev.toFixed(3),
|
||||
p90: distStats.p90.toFixed(3),
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Feature Normalization for KD-Tree
|
||||
// =========================================================================
|
||||
console.log('\n=== Feature Normalization ===');
|
||||
|
||||
// Raw peer features: [latency_ms, hops, geo_km, trust_inv, bw_inv, loss, conn_inv, nat_inv]
|
||||
const rawFeatures = [100, 5, 500, 0.1, 50, 0.02, 5, 0.1];
|
||||
|
||||
// Get default feature ranges
|
||||
const featureRanges = await px.getDefaultPeerFeatureRanges();
|
||||
console.log('Feature Labels:', featureRanges.labels);
|
||||
|
||||
// Normalize features
|
||||
const normalizedFeatures = await px.normalizePeerFeatures(rawFeatures);
|
||||
console.log('Normalized Features:', normalizedFeatures.map((f: number) => f.toFixed(3)));
|
||||
|
||||
// Apply custom weights
|
||||
const customWeights = [1.5, 1.0, 0.5, 1.2, 0.8, 2.0, 1.0, 0.7];
|
||||
const weightedFeatures = await px.weightedPeerFeatures(normalizedFeatures, customWeights);
|
||||
console.log('Weighted Features:', weightedFeatures.map((f: number) => f.toFixed(3)));
|
||||
|
||||
// =========================================================================
|
||||
// Analytics Reset
|
||||
// =========================================================================
|
||||
console.log('\n=== Analytics Reset ===');
|
||||
await tree.resetAnalytics();
|
||||
const resetAnalytics = await tree.getAnalytics();
|
||||
console.log('After Reset - Query Count:', resetAnalytics.queryCount);
|
||||
|
||||
console.log('\n=== Demo Complete ===');
|
||||
}
|
||||
|
||||
run().catch((err) => {
|
||||
console.error('WASM demo error:', err);
|
||||
});
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"jsx": "react-jsx",
|
||||
"allowJs": false,
|
||||
"types": []
|
||||
},
|
||||
"include": ["src/**/*"]
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue