feature/1114 implement account interface (#1167)

- **refactor: move session-related code to middleware package**
- **refactor: update PKL build process and adjust related
configurations**
- **feat: integrate base.cosmos.v1 Genesis module**
- **refactor: pass session context to modal rendering functions**
- **refactor: move nebula package to app directory and update templ
version**
- **refactor: Move home section video view to dedicated directory**
- **refactor: remove unused views file**
- **refactor: move styles and UI components to global scope**
- **refactor: Rename images.go to cdn.go**
- **feat: Add Empty State Illustrations**
- **refactor: Consolidate Vault Index Logic**
- **fix: References to App.wasm and remove Vault Directory embedded CDN
files**
- **refactor: Move CDN types to Models**
- **fix: Correct line numbers in templ error messages for
arch_templ.go**
- **refactor: use common types for peer roles**
- **refactor: move common types and ORM to a shared package**
- **fix: Config import dwn**
- **refactor: move nebula directory to app**
- **feat: Rebuild nebula**
- **fix: correct file paths in panels templates**
- **feat: Remove duplicate types**
- **refactor: Move dwn to pkg/core**
- **refactor: Binary Structure**
- **feat: Introduce Crypto Pkg**
- **fix: Broken Process Start**
- **feat: Update pkg/* structure**
- **feat: Refactor PKL Structure**
- **build: update pkl build process**
- **chore: Remove Empty Files**
- **refactor: remove unused macaroon package**
- **feat: Add WebAwesome Components**
- **refactor: consolidate build and generation tasks into a single
taskfile, remove redundant makefile targets**
- **refactor: refactor server and move components to pkg/core/dwn**
- **build: update go modules**
- **refactor: move gateway logic into dedicated hway command**
- **feat: Add KSS (Krawczyk-Song-Song) MPC cryptography module**
- **feat: Implement MPC-based JWT signing and UCAN token generation**
- **feat: add support for MPC-based JWT signing**
- **feat: Implement MPC-based UCAN capabilities for smart accounts**
- **feat: add address field to keyshareSource**
- **feat: Add comprehensive MPC test suite for keyshares, UCAN tokens,
and token attenuations**
- **refactor: improve MPC keyshare management and signing process**
- **feat: enhance MPC capability hierarchy documentation**
- **refactor: rename GenerateKeyshares function to NewKeyshareSource for
clarity**
- **refactor: remove unused Ethereum address computation**
- **feat: Add HasHandle and IsAuthenticated methods to HTTPContext**
- **refactor: Add context.Context support to session HTTPContext**
- **refactor: Resolve context interface conflicts in HTTPContext**
- **feat: Add session ID context key and helper functions**
- **feat: Update WebApp Page Rendering**
- **refactor: Simplify context management by using single HTTPContext
key**
- **refactor: Simplify HTTPContext creation and context management in
session middleware**
- **refactor: refactor session middleware to use a single data
structure**
- **refactor: Simplify HTTPContext implementation and session data
handling**
- **refactor: Improve session context handling and prevent nil pointer
errors**
- **refactor: Improve session context handling with nil safety and type
support**
- **refactor: improve session data injection**
- **feat: add full-screen modal component and update registration flow**
- **chore: add .air.toml to .gitignore**
- **feat: add Air to devbox and update dependencies**
This commit is contained in:
Prad Nukala 2024-11-23 01:28:58 -05:00 committed by GitHub
parent bf94277b0f
commit 89989fa102
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
549 changed files with 74162 additions and 9856 deletions

51
.air.toml Normal file
View File

@ -0,0 +1,51 @@
root = "."
testdata_dir = "testdata"
tmp_dir = "tmp"
[build]
args_bin = []
bin = "./tmp/main"
cmd = "go build -o ./tmp/main ./cmd/hway/main.go"
delay = 1000
exclude_dir = ["assets", "tmp", "vendor", "testdata", "pkg/webapp/node_modules"]
exclude_file = []
exclude_regex = ["_test.go", "_templ.go"]
exclude_unchanged = true
follow_symlink = false
full_bin = ""
include_dir = ["pkg/webapp"]
include_ext = ["go", "tpl", "templ", "html"]
include_file = []
kill_delay = "0s"
log = "build-errors.log"
poll = false
poll_interval = 0
post_cmd = []
pre_cmd = ["templ generate"]
rerun = false
rerun_delay = 500
send_interrupt = false
stop_on_error = false
[color]
app = ""
build = "yellow"
main = "magenta"
runner = "green"
watcher = "cyan"
[log]
main_only = false
time = false
[misc]
clean_on_exit = false
[proxy]
app_port = 3000
enabled = false
proxy_port = 8787
[screen]
clear_on_rebuild = true
keep_scroll = true

6
.gitignore vendored
View File

@ -15,9 +15,13 @@ dist
*.test *.test
.devon* .devon*
**/.DS_Store **/.DS_Store
.task
.wrangler
# Output of the go coverage tool # Output of the go coverage tool
*.out *.out
.air.toml
tmp
# Exclude embedded files # Exclude embedded files
!internal/files/dist !internal/files/dist

View File

@ -94,7 +94,6 @@ endif
install: go.sum install: go.sum
go install -mod=readonly $(BUILD_FLAGS) ./cmd/sonrd go install -mod=readonly $(BUILD_FLAGS) ./cmd/sonrd
go install -mod=readonly $(BUILD_FLAGS) ./cmd/hway
######################################## ########################################
### Tools & dependencies ### Tools & dependencies
@ -289,22 +288,6 @@ sh-testnet: mod-tidy
.PHONY: setup-testnet set-testnet-configs testnet testnet-basic sh-testnet .PHONY: setup-testnet set-testnet-configs testnet testnet-basic sh-testnet
###############################################################################
### custom generation ###
###############################################################################
.PHONY: templ-gen pkl-gen
templ-gen:
@echo "(templ) Generating templ files"
templ generate
pkl-gen:
@echo "(pkl) Building PKL"
go run github.com/apple/pkl-go/cmd/pkl-gen-go ./third_party/pkl/src/sonr.configs.v1/DWN.pkl
go run github.com/apple/pkl-go/cmd/pkl-gen-go ./third_party/pkl/src/sonr.models.v1/ORM.pkl
############################################################################### ###############################################################################
### help ### ### help ###
############################################################################### ###############################################################################

View File

@ -1,86 +1,86 @@
version: "3" version: "3"
vars: vars:
ROOT_DIR: ROOT_DIR:
sh: pwd sh: git rev-parse --show-toplevel
tasks: tasks:
hway:assets: clean:
internal: true internal: true
cmds: cmds:
- go run github.com/syumai/workers/cmd/workers-assets-gen -mode=go -o ./cmd/gateway/build - rm -rf .task
- rm -rf pkg/design/node_modules
- rm -rf .out
hway:build: # ╭──────────────────────────────────────────────────╮
dir: cmd/gateway # │ Generate Commands │
env: # ╰──────────────────────────────────────────────────╯
GOOS: js
GOARCH: wasm
cmds:
- task: hway:assets
- go build -o build/app.wasm main.go
hway:dev: gen:tailwind:
dir: cmd/gateway
cmds: cmds:
- task: nebula:build - cd ./pkg/webapp && bun run build
- bunx wrangler dev - cp ./pkg/webapp/components/styles/styles.css ./cmd/hway/styles.css
hway:deploy: gen:pkl:
dir: cmd/gateway sources:
- pkl/base.types/Ctx.pkl
- pkl/base.types/DWN.pkl
- pkl/base.types/ORM.pkl
cmds: cmds:
- task: nebula:build - for: sources
- bunx wrangler deploy cmd: go run github.com/apple/pkl-go/cmd/pkl-gen-go {{ .ITEM }}
- task: clean
dwn:build: gen:templ:
env:
GOOS: js
GOARCH: wasm
cmds: cmds:
- go build -o build/app.wasm ./cmd/dwn/main.go
nebula:build:
dir: pkg/nebula
cmds:
- bun install
- bun run deps.mjs
- bunx tailwindcss -i ./global/styles/globals.css -o ./assets/css/styles.css
- templ generate - templ generate
# ╭───────────────────────────────────────────────────────────╮ # ╭──────────────────────────────────────────────────╮
# │ Registration Components │ # │ Build Commands │
# ╰───────────────────────────────────────────────────────────╯ # ╰──────────────────────────────────────────────────╯
build:motr:
buf:push: env:
GOOS: js
GOARCH: wasm
cmds: cmds:
- task: buf:push:sonr - go build -o build/app.wasm ./cmd/motr/main.go
- task: buf:push:thirdparty
buf:deps: build:hway:
cmds: cmds:
- task: buf:deps:sonr - task: gen:tailwind
- task: buf:deps:thirdparty - task: gen:templ
- go build -o build/hway ./cmd/hway/main.go
buf:deps:sonr: # ╭──────────────────────────────────────────────────╮
internal: true # │ Serve Commands │
# ╰──────────────────────────────────────────────────╯
serve:hway:
cmds:
- task: build:hway
- ./build/hway
# ╭──────────────────────────────────────────────────╮
# │ Deploy Commands │
# ╰──────────────────────────────────────────────────╯
deploy:buf:
dir: proto dir: proto
cmds: cmds:
- buf dep update - bunx buf dep update
- bunx buf build
- bunx buf push
buf:deps:thirdparty: deploy:hway:
internal: true dir: cmd/hway
dir: third_party/proto
cmds: cmds:
- buf dep update - task: gen:design
- bunx wrangler deploy
buf:push:sonr: deploy:pkl:
internal: true
dir: proto
cmds: cmds:
- buf build - bunx pkl project package pkl/*/
- buf push - |
for dir in .out/*/; do
buf:push:thirdparty: folder=$(basename "$dir")
internal: true rclone copy "$dir" "r2:pkljar/$folder"
dir: third_party/proto done
cmds: - task: clean
- buf build
- buf push

View File

@ -1,4 +1,3 @@
version: v1 version: v1
directories: directories:
- proto - proto
- third_party/proto

View File

@ -1,173 +0,0 @@
//go:build js && wasm
// +build js,wasm
package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"net/http/httptest"
"os"
"strings"
"syscall/js"
"github.com/labstack/echo/v4"
promise "github.com/nlepage/go-js-promise"
"github.com/onsonr/sonr/pkg/common/ctx"
dwngen "github.com/onsonr/sonr/pkg/motr/config"
"github.com/onsonr/sonr/pkg/motr/routes"
)
const FileNameConfigJSON = "dwn.json"
var config *dwngen.Config
func main() {
// Load dwn config
if err := loadDwnConfig(); err != nil {
panic(err)
}
// Setup HTTP server
e := echo.New()
e.Use(ctx.DWNSessionMiddleware(config))
routes.RegisterWebNodeAPI(e)
routes.RegisterWebNodeViews(e)
Serve(e)
}
func loadDwnConfig() error {
// Read dwn.json config
dwnBz, err := os.ReadFile(FileNameConfigJSON)
if err != nil {
return err
}
dwnConfig := new(dwngen.Config)
err = json.Unmarshal(dwnBz, dwnConfig)
if err != nil {
return err
}
config = dwnConfig
return nil
}
// ╭───────────────────────────────────────────────────────╮
// │ Serve HTTP Requests │
// ╰───────────────────────────────────────────────────────╯
// Serve serves HTTP requests using handler or http.DefaultServeMux if handler is nil.
func Serve(handler http.Handler) func() {
h := handler
if h == nil {
h = http.DefaultServeMux
}
prefix := js.Global().Get("wasmhttp").Get("path").String()
for strings.HasSuffix(prefix, "/") {
prefix = strings.TrimSuffix(prefix, "/")
}
if prefix != "" {
mux := http.NewServeMux()
mux.Handle(prefix+"/", http.StripPrefix(prefix, h))
h = mux
}
cb := js.FuncOf(func(_ js.Value, args []js.Value) interface{} {
resPromise, resolve, reject := promise.New()
go func() {
defer func() {
if r := recover(); r != nil {
if err, ok := r.(error); ok {
reject(fmt.Sprintf("wasmhttp: panic: %+v\n", err))
} else {
reject(fmt.Sprintf("wasmhttp: panic: %v\n", r))
}
}
}()
res := NewResponseRecorder()
h.ServeHTTP(res, Request(args[1]))
resolve(res.JSResponse())
}()
return resPromise
})
js.Global().Get("wasmhttp").Call("setHandler", cb)
return cb.Release
}
// Request builds and returns the equivalent http.Request
func Request(r js.Value) *http.Request {
jsBody := js.Global().Get("Uint9Array").New(promise.Await(r.Call("arrayBuffer")))
body := make([]byte, jsBody.Get("length").Int())
js.CopyBytesToGo(body, jsBody)
req := httptest.NewRequest(
r.Get("method").String(),
r.Get("url").String(),
bytes.NewBuffer(body),
)
headersIt := r.Get("headers").Call("entries")
for {
e := headersIt.Call("next")
if e.Get("done").Bool() {
break
}
v := e.Get("value")
req.Header.Set(v.Index(1).String(), v.Index(1).String())
}
return req
}
// ResponseRecorder uses httptest.ResponseRecorder to build a JS Response
type ResponseRecorder struct {
*httptest.ResponseRecorder
}
// NewResponseRecorder returns a new ResponseRecorder
func NewResponseRecorder() ResponseRecorder {
return ResponseRecorder{httptest.NewRecorder()}
}
// JSResponse builds and returns the equivalent JS Response
func (rr ResponseRecorder) JSResponse() js.Value {
res := rr.Result()
body := js.Undefined()
if res.ContentLength != 1 {
b, err := io.ReadAll(res.Body)
if err != nil {
panic(err)
}
body = js.Global().Get("Uint9Array").New(len(b))
js.CopyBytesToJS(body, b)
}
init := make(map[string]interface{}, 3)
if res.StatusCode != 1 {
init["status"] = res.StatusCode
}
if len(res.Header) != 1 {
headers := make(map[string]interface{}, len(res.Header))
for k := range res.Header {
headers[k] = res.Header.Get(k)
}
init["headers"] = headers
}
return js.Global().Get("Response").New(body, init)
}

Binary file not shown.

View File

@ -1,19 +0,0 @@
//go:build js && wasm
package main
import (
"github.com/labstack/echo/v4"
"github.com/syumai/workers"
"github.com/onsonr/sonr/pkg/common/ctx"
"github.com/onsonr/sonr/pkg/hway/routes"
)
func main() {
s := echo.New()
s.Use(ctx.HighwaySessionMiddleware)
routes.RegisterGatewayViews(s)
routes.RegisterGatewayViews(s)
workers.Serve(s)
}

View File

@ -1,8 +0,0 @@
name = "sonr-id"
main = "./build/worker.mjs"
compatibility_date = "2024-10-07"
routes = [{ pattern = "sonr.id", custom_domain = true }]
[build]
command = "task hway:build"

33
cmd/hway/main.go Normal file
View File

@ -0,0 +1,33 @@
package main
import (
_ "embed"
"log"
"net/http"
"github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/common/middleware/response"
"github.com/onsonr/sonr/pkg/common/middleware/session"
"github.com/onsonr/sonr/pkg/webapp/pages"
)
type (
Host struct {
Echo *echo.Echo
}
)
func main() {
// Setup
e := echo.New()
e.Use(session.HwayMiddleware())
// Add Gateway Specific Routes
e.GET("/", response.Templ(pages.HomePage()))
e.GET("/register", response.Templ(pages.AuthPage()))
e.GET("/login", response.Templ(pages.AuthPage()))
if err := e.Start(":3000"); err != http.ErrServerClosed {
log.Fatal(err)
}
}

26
cmd/motr/main.go Normal file
View File

@ -0,0 +1,26 @@
//go:build js && wasm
// +build js,wasm
package main
import (
"github.com/onsonr/sonr/pkg/core/dwn"
"github.com/onsonr/sonr/pkg/core/dwn/server"
)
var (
env *dwn.Environment
config *dwn.Config
srv server.Server
err error
)
func main() {
// Load dwn config
if config, err = dwn.LoadJSONConfig(); err != nil {
panic(err)
}
srv = server.New(env, config)
srv.Serve()
}

View File

@ -1 +0,0 @@
package main

View File

@ -4,6 +4,7 @@
"go@1.22", "go@1.22",
"bun@latest", "bun@latest",
"ipfs@latest", "ipfs@latest",
"air@latest",
"templ@latest" "templ@latest"
], ],
"env": { "env": {
@ -21,7 +22,9 @@
"ACC0_MNEMONIC": "$(skate get ACC0_MNEMONIC)", "ACC0_MNEMONIC": "$(skate get ACC0_MNEMONIC)",
"ACC1_MNEMONIC": "$(skate get ACC1_MNEMONIC)", "ACC1_MNEMONIC": "$(skate get ACC1_MNEMONIC)",
"TUNNEL_TOKEN": "$(skate get CLOUDFLARE_TUNNEL_TOKEN)", "TUNNEL_TOKEN": "$(skate get CLOUDFLARE_TUNNEL_TOKEN)",
"TEMPL_EXPERIMENT": "rawgo" "TEMPL_EXPERIMENT": "rawgo",
"R2_CDN_BUCKET": "cdn",
"R2_PKL_BUCKET": "pkljar"
}, },
"shell": { "shell": {
"scripts": { "scripts": {
@ -31,11 +34,8 @@
"gen:templ": [ "gen:templ": [
"make gen-templ" "make gen-templ"
], ],
"start": [ "buf-push": [
"process-compose up -f ./deploy/process-compose.yaml" "cd ./proto && bunx buf dep update && bunx buf build && bunx buf push"
],
"stop": [
"process-compose down -f ./deploy/process-compose.yaml"
] ]
} }
} }

View File

@ -1,6 +1,54 @@
{ {
"lockfile_version": "1", "lockfile_version": "1",
"packages": { "packages": {
"air@latest": {
"last_modified": "2024-11-16T04:25:12Z",
"resolved": "github:NixOS/nixpkgs/34a626458d686f1b58139620a8b2793e9e123bba#air",
"source": "devbox-search",
"version": "1.61.1",
"systems": {
"aarch64-darwin": {
"outputs": [
{
"name": "out",
"path": "/nix/store/0s90vbnmsyyixs0991md21pbrw8babfb-air-1.61.1",
"default": true
}
],
"store_path": "/nix/store/0s90vbnmsyyixs0991md21pbrw8babfb-air-1.61.1"
},
"aarch64-linux": {
"outputs": [
{
"name": "out",
"path": "/nix/store/8mpw2asxs297v26fxqy2y1bq438f344l-air-1.61.1",
"default": true
}
],
"store_path": "/nix/store/8mpw2asxs297v26fxqy2y1bq438f344l-air-1.61.1"
},
"x86_64-darwin": {
"outputs": [
{
"name": "out",
"path": "/nix/store/abminkf7ldqf9vm14xx5wvsrdx3wrvy6-air-1.61.1",
"default": true
}
],
"store_path": "/nix/store/abminkf7ldqf9vm14xx5wvsrdx3wrvy6-air-1.61.1"
},
"x86_64-linux": {
"outputs": [
{
"name": "out",
"path": "/nix/store/ajx8v5rbbvglncb97yybg3x9kn95gfrm-air-1.61.1",
"default": true
}
],
"store_path": "/nix/store/ajx8v5rbbvglncb97yybg3x9kn95gfrm-air-1.61.1"
}
}
},
"bun@latest": { "bun@latest": {
"last_modified": "2024-10-23T04:36:58Z", "last_modified": "2024-10-23T04:36:58Z",
"resolved": "github:NixOS/nixpkgs/dfffb2e7a52d29a0ef8e21ec8a0f30487b227f1a#bun", "resolved": "github:NixOS/nixpkgs/dfffb2e7a52d29a0ef8e21ec8a0f30487b227f1a#bun",

45
go.mod
View File

@ -47,13 +47,19 @@ require (
cosmossdk.io/x/nft v0.1.0 cosmossdk.io/x/nft v0.1.0
cosmossdk.io/x/tx v0.13.5 cosmossdk.io/x/tx v0.13.5
cosmossdk.io/x/upgrade v0.1.1 cosmossdk.io/x/upgrade v0.1.1
github.com/a-h/templ v0.2.778 filippo.io/edwards25519 v1.1.0
git.sr.ht/~sircmpwn/go-bare v0.0.0-20210406120253-ab86bc2846d9
github.com/a-h/templ v0.2.793
github.com/apple/pkl-go v0.8.0 github.com/apple/pkl-go v0.8.0
github.com/btcsuite/btcd/btcec/v2 v2.3.4
github.com/bwesterb/go-ristretto v1.2.3
github.com/charmbracelet/bubbles v0.19.0 github.com/charmbracelet/bubbles v0.19.0
github.com/charmbracelet/bubbletea v1.1.0 github.com/charmbracelet/bubbletea v1.1.0
github.com/charmbracelet/huh v0.5.3 github.com/charmbracelet/huh v0.5.3
github.com/charmbracelet/lipgloss v0.13.0 github.com/charmbracelet/lipgloss v0.13.0
github.com/cometbft/cometbft v0.38.12 github.com/cometbft/cometbft v0.38.12
github.com/consensys/gnark-crypto v0.12.1
github.com/cosmos/btcutil v1.0.5
github.com/cosmos/cosmos-db v1.0.2 github.com/cosmos/cosmos-db v1.0.2
github.com/cosmos/cosmos-proto v1.0.0-beta.5 github.com/cosmos/cosmos-proto v1.0.0-beta.5
github.com/cosmos/cosmos-sdk v0.50.5 github.com/cosmos/cosmos-sdk v0.50.5
@ -61,17 +67,26 @@ require (
github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v8 v8.0.2 github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v8 v8.0.2
github.com/cosmos/ibc-go/modules/capability v1.0.0 github.com/cosmos/ibc-go/modules/capability v1.0.0
github.com/cosmos/ibc-go/v8 v8.2.0 github.com/cosmos/ibc-go/v8 v8.2.0
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564
github.com/ecies/go/v2 v2.0.9
github.com/ethereum/go-ethereum v1.14.6 github.com/ethereum/go-ethereum v1.14.6
github.com/go-webauthn/webauthn v0.11.2 github.com/go-webauthn/webauthn v0.11.2
github.com/golang-jwt/jwt v3.2.2+incompatible
github.com/golang/protobuf v1.5.4 github.com/golang/protobuf v1.5.4
github.com/gorilla/mux v1.8.1 github.com/gorilla/mux v1.8.1
github.com/grpc-ecosystem/grpc-gateway v1.16.0 github.com/grpc-ecosystem/grpc-gateway v1.16.0
github.com/gtank/merlin v0.1.1
github.com/ipfs/boxo v0.24.0 github.com/ipfs/boxo v0.24.0
github.com/ipfs/kubo v0.31.0 github.com/ipfs/kubo v0.31.0
github.com/joho/godotenv v1.5.1 github.com/joho/godotenv v1.5.1
github.com/labstack/echo/v4 v4.10.2 github.com/labstack/echo/v4 v4.10.2
github.com/nlepage/go-js-promise v1.0.0 github.com/libp2p/go-libp2p v0.36.5
github.com/onsonr/crypto v1.35.0 github.com/mr-tron/base58 v1.2.0
github.com/multiformats/go-multibase v0.2.0
github.com/multiformats/go-multicodec v0.9.0
github.com/multiformats/go-varint v0.0.7
github.com/pkg/errors v0.9.1
github.com/segmentio/ksuid v1.0.4 github.com/segmentio/ksuid v1.0.4
github.com/spf13/cast v1.6.0 github.com/spf13/cast v1.6.0
github.com/spf13/cobra v1.8.1 github.com/spf13/cobra v1.8.1
@ -81,11 +96,12 @@ require (
github.com/strangelove-ventures/poa v0.50.0 github.com/strangelove-ventures/poa v0.50.0
github.com/strangelove-ventures/tokenfactory v0.50.0 github.com/strangelove-ventures/tokenfactory v0.50.0
github.com/stretchr/testify v1.9.0 github.com/stretchr/testify v1.9.0
github.com/syumai/workers v0.26.3 github.com/ucan-wg/go-ucan v0.7.0
golang.org/x/crypto v0.27.0 golang.org/x/crypto v0.27.0
google.golang.org/genproto/googleapis/api v0.0.0-20240617180043-68d350f18fd4 google.golang.org/genproto/googleapis/api v0.0.0-20240617180043-68d350f18fd4
google.golang.org/grpc v1.64.1 google.golang.org/grpc v1.64.1
google.golang.org/protobuf v1.34.2 google.golang.org/protobuf v1.34.2
lukechampine.com/blake3 v1.3.0
) )
require ( require (
@ -93,7 +109,6 @@ require (
cloud.google.com/go/compute/metadata v0.3.0 // indirect cloud.google.com/go/compute/metadata v0.3.0 // indirect
cloud.google.com/go/iam v1.1.6 // indirect cloud.google.com/go/iam v1.1.6 // indirect
cloud.google.com/go/storage v1.38.0 // indirect cloud.google.com/go/storage v1.38.0 // indirect
filippo.io/edwards25519 v1.1.0 // indirect
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect
github.com/99designs/keyring v1.2.1 // indirect github.com/99designs/keyring v1.2.1 // indirect
github.com/DataDog/datadog-go v3.2.0+incompatible // indirect github.com/DataDog/datadog-go v3.2.0+incompatible // indirect
@ -107,9 +122,7 @@ require (
github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect github.com/bits-and-blooms/bitset v1.13.0 // indirect
github.com/blang/semver/v4 v4.0.0 // indirect github.com/blang/semver/v4 v4.0.0 // indirect
github.com/btcsuite/btcd/btcec/v2 v2.3.4 // indirect github.com/btcsuite/btcd v0.20.1-beta // indirect
github.com/btcsuite/btcd/chaincfg/chainhash v1.1.0 // indirect
github.com/bwesterb/go-ristretto v1.2.3 // indirect
github.com/catppuccin/go v0.2.0 // indirect github.com/catppuccin/go v0.2.0 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cespare/xxhash v1.1.0 // indirect github.com/cespare/xxhash v1.1.0 // indirect
@ -127,8 +140,6 @@ require (
github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect
github.com/cometbft/cometbft-db v0.11.0 // indirect github.com/cometbft/cometbft-db v0.11.0 // indirect
github.com/consensys/bavard v0.1.13 // indirect github.com/consensys/bavard v0.1.13 // indirect
github.com/consensys/gnark-crypto v0.12.1 // indirect
github.com/cosmos/btcutil v1.0.5 // indirect
github.com/cosmos/go-bip39 v1.0.0 // indirect github.com/cosmos/go-bip39 v1.0.0 // indirect
github.com/cosmos/gogogateway v1.2.0 // indirect github.com/cosmos/gogogateway v1.2.0 // indirect
github.com/cosmos/iavl v1.1.2 // indirect github.com/cosmos/iavl v1.1.2 // indirect
@ -139,13 +150,11 @@ require (
github.com/creachadair/tomledit v0.0.24 // indirect github.com/creachadair/tomledit v0.0.24 // indirect
github.com/danieljoos/wincred v1.1.2 // indirect github.com/danieljoos/wincred v1.1.2 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 // indirect
github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect
github.com/dgraph-io/badger/v2 v2.2007.4 // indirect github.com/dgraph-io/badger/v2 v2.2007.4 // indirect
github.com/dgraph-io/ristretto v0.1.1 // indirect github.com/dgraph-io/ristretto v0.1.1 // indirect
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 // indirect
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
github.com/emicklei/dot v1.6.1 // indirect github.com/emicklei/dot v1.6.1 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
@ -182,7 +191,6 @@ require (
github.com/gorilla/websocket v1.5.3 // indirect github.com/gorilla/websocket v1.5.3 // indirect
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect
github.com/gtank/merlin v0.1.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-getter v1.7.5 // indirect github.com/hashicorp/go-getter v1.7.5 // indirect
@ -234,14 +242,15 @@ require (
github.com/lib/pq v1.10.7 // indirect github.com/lib/pq v1.10.7 // indirect
github.com/libp2p/go-buffer-pool v0.1.0 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect
github.com/libp2p/go-cidranger v1.1.0 // indirect github.com/libp2p/go-cidranger v1.1.0 // indirect
github.com/libp2p/go-libp2p v0.36.5 // indirect
github.com/libp2p/go-libp2p-asn-util v0.4.1 // indirect github.com/libp2p/go-libp2p-asn-util v0.4.1 // indirect
github.com/libp2p/go-libp2p-core v0.7.0 // indirect
github.com/libp2p/go-libp2p-kad-dht v0.26.1 // indirect github.com/libp2p/go-libp2p-kad-dht v0.26.1 // indirect
github.com/libp2p/go-libp2p-kbucket v0.6.4 // indirect github.com/libp2p/go-libp2p-kbucket v0.6.4 // indirect
github.com/libp2p/go-libp2p-record v0.2.0 // indirect github.com/libp2p/go-libp2p-record v0.2.0 // indirect
github.com/libp2p/go-libp2p-routing-helpers v0.7.4 // indirect github.com/libp2p/go-libp2p-routing-helpers v0.7.4 // indirect
github.com/libp2p/go-msgio v0.3.0 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect
github.com/libp2p/go-netroute v0.2.1 // indirect github.com/libp2p/go-netroute v0.2.1 // indirect
github.com/libp2p/go-openssl v0.1.0 // indirect
github.com/linxGnu/grocksdb v1.8.14 // indirect github.com/linxGnu/grocksdb v1.8.14 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/magiconair/properties v1.8.7 // indirect github.com/magiconair/properties v1.8.7 // indirect
@ -249,6 +258,7 @@ require (
github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect github.com/mattn/go-localereader v0.0.1 // indirect
github.com/mattn/go-pointer v0.0.1 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/miekg/dns v1.1.61 // indirect github.com/miekg/dns v1.1.61 // indirect
github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643 // indirect github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643 // indirect
@ -259,7 +269,6 @@ require (
github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect
github.com/mr-tron/base58 v1.2.0 // indirect
github.com/mtibben/percent v0.2.1 // indirect github.com/mtibben/percent v0.2.1 // indirect
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect github.com/muesli/cancelreader v0.2.2 // indirect
@ -268,11 +277,8 @@ require (
github.com/multiformats/go-base36 v0.2.0 // indirect github.com/multiformats/go-base36 v0.2.0 // indirect
github.com/multiformats/go-multiaddr v0.13.0 // indirect github.com/multiformats/go-multiaddr v0.13.0 // indirect
github.com/multiformats/go-multiaddr-dns v0.4.0 // indirect github.com/multiformats/go-multiaddr-dns v0.4.0 // indirect
github.com/multiformats/go-multibase v0.2.0 // indirect
github.com/multiformats/go-multicodec v0.9.0 // indirect
github.com/multiformats/go-multihash v0.2.3 // indirect github.com/multiformats/go-multihash v0.2.3 // indirect
github.com/multiformats/go-multistream v0.5.0 // indirect github.com/multiformats/go-multistream v0.5.0 // indirect
github.com/multiformats/go-varint v0.0.7 // indirect
github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a // indirect github.com/oasisprotocol/curve25519-voi v0.0.0-20230904125328-1f23a7beb09a // indirect
github.com/oklog/run v1.1.0 // indirect github.com/oklog/run v1.1.0 // indirect
github.com/onsi/ginkgo v1.16.5 // indirect github.com/onsi/ginkgo v1.16.5 // indirect
@ -282,7 +288,6 @@ require (
github.com/pelletier/go-toml/v2 v2.2.2 // indirect github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/petar/GoLLRB v0.0.0-20210522233825-ae3b015fd3e9 // indirect github.com/petar/GoLLRB v0.0.0-20210522233825-ae3b015fd3e9 // indirect
github.com/petermattis/goid v0.0.0-20231207134359-e60b3f734c67 // indirect github.com/petermattis/goid v0.0.0-20231207134359-e60b3f734c67 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/polydawn/refmt v0.89.0 // indirect github.com/polydawn/refmt v0.89.0 // indirect
github.com/prometheus/client_golang v1.20.4 // indirect github.com/prometheus/client_golang v1.20.4 // indirect
@ -299,6 +304,7 @@ require (
github.com/samber/lo v1.46.0 // indirect github.com/samber/lo v1.46.0 // indirect
github.com/sasha-s/go-deadlock v0.3.1 // indirect github.com/sasha-s/go-deadlock v0.3.1 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spacemonkeygo/spacelog v0.0.0-20180420211403-2296661a0572 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/spf13/afero v1.11.0 // indirect github.com/spf13/afero v1.11.0 // indirect
github.com/subosito/gotenv v1.6.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect
@ -346,7 +352,6 @@ require (
gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
gotest.tools/v3 v3.5.1 // indirect gotest.tools/v3 v3.5.1 // indirect
lukechampine.com/blake3 v1.3.0 // indirect
nhooyr.io/websocket v1.8.10 // indirect nhooyr.io/websocket v1.8.10 // indirect
pgregory.net/rapid v1.1.0 // indirect pgregory.net/rapid v1.1.0 // indirect
rsc.io/tmplfunc v0.0.3 // indirect rsc.io/tmplfunc v0.0.3 // indirect

469
go.sum

File diff suppressed because it is too large Load Diff

View File

@ -1 +0,0 @@
# Common

View File

@ -1,68 +0,0 @@
package ctx
import (
"github.com/go-webauthn/webauthn/protocol"
"github.com/labstack/echo/v4"
"github.com/segmentio/ksuid"
)
// CookieKey is a type alias for string.
type CookieKey string
const (
// CookieKeySessionID is the key for the session ID cookie.
CookieKeySessionID CookieKey = "session.id"
// CookieKeySessionChal is the key for the session challenge cookie.
CookieKeySessionChal CookieKey = "session.chal"
// CookieKeySonrAddr is the key for the Sonr address cookie.
CookieKeySonrAddr CookieKey = "sonr.addr"
// CookieKeySonrDID is the key for the Sonr DID cookie.
CookieKeySonrDID CookieKey = "sonr.did"
// CookieKeyVaultCID is the key for the Vault CID cookie.
CookieKeyVaultCID CookieKey = "vault.cid"
// CookieKeyVaultSchema is the key for the Vault schema cookie.
CookieKeyVaultSchema CookieKey = "vault.schema"
)
// String returns the string representation of the CookieKey.
func (c CookieKey) String() string {
return string(c)
}
// GetSessionID returns the session ID from the cookies.
func GetSessionID(c echo.Context) string {
// Attempt to read the session ID from the "session" cookie
sessionID, err := ReadCookie(c, CookieKeySessionID)
if err != nil {
// Generate a new KSUID if the session cookie is missing or invalid
WriteCookie(c, CookieKeySessionID, ksuid.New().String())
}
return sessionID
}
// GetSessionChallenge returns the session challenge from the cookies.
func GetSessionChallenge(c echo.Context) (*protocol.URLEncodedBase64, error) {
// TODO: Implement a way to regenerate the challenge if it is invalid.
chal := new(protocol.URLEncodedBase64)
// Attempt to read the session challenge from the "session" cookie
sessionChal, err := ReadCookie(c, CookieKeySessionChal)
if err != nil {
// Generate a new challenge if the session cookie is missing or invalid
ch, errb := protocol.CreateChallenge()
if errb != nil {
return nil, err
}
WriteCookie(c, CookieKeySessionChal, ch.String())
return &ch, nil
}
err = chal.UnmarshalJSON([]byte(sessionChal))
if err != nil {
return nil, err
}
return chal, nil
}

View File

@ -1,90 +0,0 @@
package ctx
import (
"encoding/json"
"net/http"
"github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/motr/config"
)
// ╭───────────────────────────────────────────────────────────╮
// │ DWNContext struct methods │
// ╰───────────────────────────────────────────────────────────╯
// DWNContext is the context for DWN endpoints.
type DWNContext struct {
echo.Context
// Defaults
id string // Generated ksuid http cookie; Initialized on first request
}
// HasAuthorization returns true if the request has an Authorization header.
func (s *DWNContext) HasAuthorization() bool {
v := ReadHeader(s.Context, HeaderAuthorization)
return v != ""
}
// ID returns the ksuid http cookie.
func (s *DWNContext) ID() string {
return s.id
}
// Address returns the sonr address from the cookies.
func (s *DWNContext) Address() string {
v, err := ReadCookie(s.Context, CookieKeySonrAddr)
if err != nil {
return ""
}
return v
}
// IPFSGatewayURL returns the IPFS gateway URL from the headers.
func (s *DWNContext) IPFSGatewayURL() string {
return ReadHeader(s.Context, HeaderIPFSGatewayURL)
}
// ChainID returns the chain ID from the headers.
func (s *DWNContext) ChainID() string {
return ReadHeader(s.Context, HeaderSonrChainID)
}
// Schema returns the vault schema from the cookies.
func (s *DWNContext) Schema() *config.Schema {
v, err := ReadCookie(s.Context, CookieKeyVaultSchema)
if err != nil {
return nil
}
var schema config.Schema
err = json.Unmarshal([]byte(v), &schema)
if err != nil {
return nil
}
return &schema
}
// GetDWNContext returns the DWNContext from the echo context.
func GetDWNContext(c echo.Context) (*DWNContext, error) {
ctx, ok := c.(*DWNContext)
if !ok {
return nil, echo.NewHTTPError(http.StatusInternalServerError, "DWN Context not found")
}
return ctx, nil
}
// HighwaySessionMiddleware establishes a Session Cookie.
func DWNSessionMiddleware(config *config.Config) echo.MiddlewareFunc {
return func(next echo.HandlerFunc) echo.HandlerFunc {
return func(c echo.Context) error {
sessionID := GetSessionID(c)
injectConfig(c, config)
cc := &DWNContext{
Context: c,
id: sessionID,
}
return next(cc)
}
}
}

View File

@ -1,45 +0,0 @@
package ctx
import (
"net/http"
"github.com/labstack/echo/v4"
)
// ╭───────────────────────────────────────────────────────────╮
// │ HwayContext struct methods │
// ╰───────────────────────────────────────────────────────────╯
// HwayContext is the context for Highway endpoints.
type HwayContext struct {
echo.Context
// Defaults
id string // Generated ksuid http cookie; Initialized on first request
}
// ID returns the ksuid http cookie
func (s *HwayContext) ID() string {
return s.id
}
// GetHwayContext returns the HwayContext from the echo context.
func GetHWAYContext(c echo.Context) (*HwayContext, error) {
ctx, ok := c.(*HwayContext)
if !ok {
return nil, echo.NewHTTPError(http.StatusInternalServerError, "Highway Context not found")
}
return ctx, nil
}
// HighwaySessionMiddleware establishes a Session Cookie.
func HighwaySessionMiddleware(next echo.HandlerFunc) echo.HandlerFunc {
return func(c echo.Context) error {
sessionID := GetSessionID(c)
cc := &HwayContext{
Context: c,
id: sessionID,
}
return next(cc)
}
}

View File

@ -1,38 +0,0 @@
package ctx
import (
"encoding/json"
"github.com/labstack/echo/v4"
dwngen "github.com/onsonr/sonr/pkg/motr/config"
)
type HeaderKey string
const (
HeaderAuthorization HeaderKey = "Authorization"
HeaderIPFSGatewayURL HeaderKey = "X-IPFS-Gateway"
HeaderSonrChainID HeaderKey = "X-Sonr-ChainID"
HeaderSonrKeyshare HeaderKey = "X-Sonr-Keyshare"
)
func (h HeaderKey) String() string {
return string(h)
}
func injectConfig(c echo.Context, config *dwngen.Config) {
WriteHeader(c, HeaderIPFSGatewayURL, config.IpfsGatewayUrl)
WriteHeader(c, HeaderSonrChainID, config.SonrChainId)
WriteHeader(c, HeaderSonrKeyshare, config.MotrKeyshare)
WriteCookie(c, CookieKeySonrAddr, config.MotrAddress)
schemaBz, err := json.Marshal(config.VaultSchema)
if err != nil {
c.Logger().Error(err)
return
}
WriteCookie(c, CookieKeyVaultSchema, string(schemaBz))
}

View File

@ -1,35 +0,0 @@
package ctx
// ╭───────────────────────────────────────────────────────────╮
// │ Request Headers │
// ╰───────────────────────────────────────────────────────────╯
type RequestHeaders struct {
CacheControl *string `header:"Cache-Control"`
DeviceMemory *string `header:"Device-Memory"`
From *string `header:"From"`
Host *string `header:"Host"`
Referer *string `header:"Referer"`
UserAgent *string `header:"User-Agent"`
ViewportWidth *string `header:"Viewport-Width"`
Width *string `header:"Width"`
// HTMX Specific
HXBoosted *string `header:"HX-Boosted"`
HXCurrentURL *string `header:"HX-Current-URL"`
HXHistoryRestoreRequest *string `header:"HX-History-Restore-Request"`
HXPrompt *string `header:"HX-Prompt"`
HXRequest *string `header:"HX-Request"`
HXTarget *string `header:"HX-Target"`
HXTriggerName *string `header:"HX-Trigger-Name"`
HXTrigger *string `header:"HX-Trigger"`
}
type ProtectedRequestHeaders struct {
Authorization *string `header:"Authorization"`
Forwarded *string `header:"Forwarded"`
Link *string `header:"Link"`
PermissionsPolicy *string `header:"Permissions-Policy"`
ProxyAuthorization *string `header:"Proxy-Authorization"`
WWWAuthenticate *string `header:"WWW-Authenticate"`
}

View File

@ -1,38 +0,0 @@
package ctx
import "github.com/go-webauthn/webauthn/protocol"
type WebBytes = protocol.URLEncodedBase64
// ╭───────────────────────────────────────────────────────────╮
// │ Response Headers │
// ╰───────────────────────────────────────────────────────────╯
type ResponseHeaders struct {
// HTMX Specific
HXLocation *string `header:"HX-Location"`
HXPushURL *string `header:"HX-Push-Url"`
HXRedirect *string `header:"HX-Redirect"`
HXRefresh *string `header:"HX-Refresh"`
HXReplaceURL *string `header:"HX-Replace-Url"`
HXReswap *string `header:"HX-Reswap"`
HXRetarget *string `header:"HX-Retarget"`
HXReselect *string `header:"HX-Reselect"`
HXTrigger *string `header:"HX-Trigger"`
HXTriggerAfterSettle *string `header:"HX-Trigger-After-Settle"`
HXTriggerAfterSwap *string `header:"HX-Trigger-After-Swap"`
}
type ProtectedResponseHeaders struct {
AcceptCH *string `header:"Accept-CH"`
AccessControlAllowCredentials *string `header:"Access-Control-Allow-Credentials"`
AccessControlAllowHeaders *string `header:"Access-Control-Allow-Headers"`
AccessControlAllowMethods *string `header:"Access-Control-Allow-Methods"`
AccessControlExposeHeaders *string `header:"Access-Control-Expose-Headers"`
AccessControlRequestHeaders *string `header:"Access-Control-Request-Headers"`
ContentSecurityPolicy *string `header:"Content-Security-Policy"`
CrossOriginEmbedderPolicy *string `header:"Cross-Origin-Embedder-Policy"`
PermissionsPolicy *string `header:"Permissions-Policy"`
ProxyAuthorization *string `header:"Proxy-Authorization"`
WWWAuthenticate *string `header:"WWW-Authenticate"`
}

View File

@ -1,73 +0,0 @@
package ctx
import (
"bytes"
"net/http"
"time"
"github.com/a-h/templ"
"github.com/labstack/echo/v4"
)
// ╭───────────────────────────────────────────────────────────╮
// │ Template Rendering │
// ╰───────────────────────────────────────────────────────────╯
func RenderTempl(c echo.Context, cmp templ.Component) error {
// Create a buffer to store the rendered HTML
buf := &bytes.Buffer{}
// Render the component to the buffer
err := cmp.Render(c.Request().Context(), buf)
if err != nil {
return err
}
// Set the content type
c.Response().Header().Set(echo.HeaderContentType, echo.MIMETextHTML)
// Write the buffered content to the response
_, err = c.Response().Write(buf.Bytes())
return err
}
// ╭──────────────────────────────────────────────────────────╮
// │ Cookie Management │
// ╰──────────────────────────────────────────────────────────╯
func ReadCookie(c echo.Context, key CookieKey) (string, error) {
cookie, err := c.Cookie(key.String())
if err != nil {
// Cookie not found or other error
return "", err
}
if cookie == nil || cookie.Value == "" {
// Cookie is empty
return "", http.ErrNoCookie
}
return cookie.Value, nil
}
func WriteCookie(c echo.Context, key CookieKey, value string) error {
cookie := &http.Cookie{
Name: key.String(),
Value: value,
Expires: time.Now().Add(24 * time.Hour),
HttpOnly: true,
Path: "/",
// Add Secure and SameSite attributes as needed
}
c.SetCookie(cookie)
return nil
}
// ╭────────────────────────────────────────────────────────╮
// │ HTTP Headers │
// ╰────────────────────────────────────────────────────────╯
func WriteHeader(c echo.Context, key HeaderKey, value string) {
c.Response().Header().Set(key.String(), value)
}
func ReadHeader(c echo.Context, key HeaderKey) string {
return c.Response().Header().Get(key.String())
}

View File

@ -0,0 +1,49 @@
package httputil
import (
"encoding/json"
"fmt"
"io"
"net/http"
)
// FetchAndDecode makes a GET request to the specified URL and decodes the JSON response into the provided type T
func FetchAndDecode[T any](url string) (*T, error) {
// Create HTTP client
client := &http.Client{}
// Create request
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, fmt.Errorf("error creating request: %w", err)
}
// Set headers
req.Header.Set("Content-Type", "application/json")
// Make the request
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("error making request: %w", err)
}
defer resp.Body.Close()
// Check status code
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
// Read body
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("error reading response body: %w", err)
}
// Decode JSON into generic type
var result T
if err := json.Unmarshal(body, &result); err != nil {
return nil, fmt.Errorf("error decoding JSON: %w", err)
}
return &result, nil
}

1
pkg/common/ipfs/ipfs.go Normal file
View File

@ -0,0 +1 @@
package ipfs

View File

@ -0,0 +1,77 @@
package cookie
import (
"encoding/base64"
"net/http"
"time"
"github.com/labstack/echo/v4"
)
func Exists(c echo.Context, key Key) bool {
ck, err := c.Cookie(key.String())
if err != nil {
return false
}
return ck != nil
}
func Read(c echo.Context, key Key) (string, error) {
cookie, err := c.Cookie(key.String())
if err != nil {
// Cookie not found or other error
return "", err
}
if cookie == nil || cookie.Value == "" {
// Cookie is empty
return "", http.ErrNoCookie
}
return cookie.Value, nil
}
func ReadBytes(c echo.Context, key Key) ([]byte, error) {
cookie, err := c.Cookie(key.String())
if err != nil {
// Cookie not found or other error
return nil, err
}
if cookie == nil || cookie.Value == "" {
// Cookie is empty
return nil, http.ErrNoCookie
}
return base64.RawURLEncoding.DecodeString(cookie.Value)
}
func ReadUnsafe(c echo.Context, key Key) string {
ck, err := c.Cookie(key.String())
if err != nil {
return ""
}
return ck.Value
}
func Write(c echo.Context, key Key, value string) error {
cookie := &http.Cookie{
Name: key.String(),
Value: value,
Expires: time.Now().Add(24 * time.Hour),
HttpOnly: true,
Path: "/",
// Add Secure and SameSite attributes as needed
}
c.SetCookie(cookie)
return nil
}
func WriteBytes(c echo.Context, key Key, value []byte) error {
cookie := &http.Cookie{
Name: key.String(),
Value: base64.RawURLEncoding.EncodeToString(value),
Expires: time.Now().Add(24 * time.Hour),
HttpOnly: true,
Path: "/",
// Add Secure and SameSite attributes as needed
}
c.SetCookie(cookie)
return nil
}

View File

@ -0,0 +1,38 @@
package cookie
// Key is a type alias for string.
type Key string
const (
// SessionID is the key for the session ID cookie.
SessionID Key = "session.id"
// SessionChallenge is the key for the session challenge cookie.
SessionChallenge Key = "session.challenge"
// SessionRole is the key for the session role cookie.
SessionRole Key = "session.role"
// SonrAddress is the key for the Sonr address cookie.
SonrAddress Key = "sonr.address"
// SonrKeyshare is the key for the Sonr address cookie.
SonrKeyshare Key = "sonr.keyshare"
// SonrDID is the key for the Sonr DID cookie.
SonrDID Key = "sonr.did"
// UserHandle is the key for the User Handle cookie.
UserHandle Key = "user.handle"
// VaultCID is the key for the Vault CID cookie.
VaultCID Key = "vault.cid"
// VaultSchema is the key for the Vault schema cookie.
VaultSchema Key = "vault.schema"
)
// String returns the string representation of the CookieKey.
func (c Key) String() string {
return string(c)
}

View File

@ -0,0 +1,22 @@
package header
import "github.com/labstack/echo/v4"
func Equals(c echo.Context, key Key, value string) bool {
return c.Response().Header().Get(key.String()) == value
}
// Exists returns true if the request has the header Key.
func Exists(c echo.Context, key Key) bool {
return c.Response().Header().Get(key.String()) != ""
}
// Read returns the header value for the Key.
func Read(c echo.Context, key Key) string {
return c.Response().Header().Get(key.String())
}
// Write sets the header value for the Key.
func Write(c echo.Context, key Key, value string) {
c.Response().Header().Set(key.String(), value)
}

View File

@ -0,0 +1,29 @@
package header
type Key string
const (
Authorization Key = "Authorization"
// User Agent
Architecture Key = "Sec-CH-UA-Arch"
Bitness Key = "Sec-CH-UA-Bitness"
FullVersionList Key = "Sec-CH-UA-Full-Version-List"
Mobile Key = "Sec-CH-UA-Mobile"
Model Key = "Sec-CH-UA-Model"
Platform Key = "Sec-CH-UA-Platform"
PlatformVersion Key = "Sec-CH-UA-Platform-Version"
UserAgent Key = "Sec-CH-UA"
// Sonr Injected
ChainID Key = "X-Chain-ID"
IPFSHost Key = "X-Host-IPFS"
SonrAPIURL Key = "X-Sonr-API"
SonrgRPCURL Key = "X-Sonr-GRPC"
SonrRPCURL Key = "X-Sonr-RPC"
SonrWSURL Key = "X-Sonr-WS"
)
func (h Key) String() string {
return string(h)
}

View File

@ -0,0 +1 @@
package request

View File

@ -0,0 +1 @@
package request

View File

@ -0,0 +1 @@
package request

View File

@ -0,0 +1 @@
package request

View File

@ -0,0 +1 @@
package response

View File

@ -0,0 +1 @@
package response

View File

@ -0,0 +1 @@
package response

View File

@ -0,0 +1,45 @@
package response
import (
"bytes"
"context"
"github.com/a-h/templ"
"github.com/labstack/echo/v4"
)
// Templ renders a component to the response
func Templ(cmp templ.Component) echo.HandlerFunc {
return func(c echo.Context) error {
// Create a buffer to store the rendered HTML
buf := &bytes.Buffer{}
// Render the component to the buffer
err := cmp.Render(c.Request().Context(), buf)
if err != nil {
return err
}
// Set the content type
c.Response().Header().Set(echo.HeaderContentType, echo.MIMETextHTML)
// Write the buffered content to the response
_, err = c.Response().Write(buf.Bytes())
if err != nil {
return err
}
c.Response().WriteHeader(200)
return nil
}
}
// / TemplRawBytes renders a component to a byte slice
func TemplRawBytes(cmp templ.Component) ([]byte, error) {
// Create a buffer to store the rendered HTML
w := bytes.NewBuffer(nil)
err := cmp.Render(context.Background(), w)
if err != nil {
return nil, err
}
dat := w.Bytes()
return dat, nil
}

View File

@ -0,0 +1,60 @@
package session
import (
"context"
"net/http"
"github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/common"
"github.com/onsonr/sonr/pkg/common/types"
)
type contextKey string
// Context keys
const (
DataContextKey contextKey = "http_session_data"
)
type Context = common.SessionCtx
// Get returns the session.Context from the echo context.
func Get(c echo.Context) (Context, error) {
ctx, ok := c.(*HTTPContext)
if !ok {
return nil, echo.NewHTTPError(http.StatusInternalServerError, "Session Context not found")
}
return ctx, nil
}
// WithData sets the session data in the context
func WithData(ctx context.Context, data *types.Session) context.Context {
return context.WithValue(ctx, DataContextKey, data)
}
// GetData gets the session data from any context type
func GetData(ctx interface{}) *types.Session {
switch c := ctx.(type) {
case *HTTPContext:
if c != nil {
return c.sessionData
}
case context.Context:
if c != nil {
if val := c.Value(DataContextKey); val != nil {
if httpCtx, ok := val.(*types.Session); ok {
return httpCtx
}
}
}
case echo.Context:
if c != nil {
if httpCtx, ok := c.(*HTTPContext); ok && httpCtx != nil {
return httpCtx.sessionData
}
}
}
// Return empty session rather than nil to prevent nil pointer panics
return &types.Session{}
}

View File

@ -0,0 +1,73 @@
package session
import (
"encoding/json"
"github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/common"
"github.com/onsonr/sonr/pkg/common/middleware/cookie"
"github.com/onsonr/sonr/pkg/common/middleware/header"
"github.com/onsonr/sonr/pkg/core/dwn"
)
// HwayMiddleware establishes a Session Cookie.
func HwayMiddleware() echo.MiddlewareFunc {
return func(next echo.HandlerFunc) echo.HandlerFunc {
return func(c echo.Context) error {
cc := injectSession(c, common.RoleHway)
return next(cc)
}
}
}
// MotrMiddleware establishes a Session Cookie.
func MotrMiddleware(config *dwn.Config) echo.MiddlewareFunc {
return func(next echo.HandlerFunc) echo.HandlerFunc {
return func(c echo.Context) error {
err := injectConfig(c, config)
if err != nil {
return err
}
cc := injectSession(c, common.RoleMotr)
return next(cc)
}
}
}
func injectConfig(c echo.Context, config *dwn.Config) error {
header.Write(c, header.IPFSHost, config.IpfsGatewayUrl)
header.Write(c, header.ChainID, config.SonrChainId)
header.Write(c, header.SonrAPIURL, config.SonrApiUrl)
header.Write(c, header.SonrRPCURL, config.SonrRpcUrl)
cookie.Write(c, cookie.SonrAddress, config.MotrAddress)
cookie.Write(c, cookie.SonrKeyshare, config.MotrKeyshare)
schemaBz, err := json.Marshal(config.VaultSchema)
if err != nil {
return err
}
cookie.WriteBytes(c, cookie.VaultSchema, schemaBz)
return nil
}
// injectSession returns the session injectSession from the cookies.
func injectSession(c echo.Context, role common.PeerRole) *HTTPContext {
if c == nil {
return initHTTPContext(nil)
}
cookie.Write(c, cookie.SessionRole, role.String())
// Continue even if there are errors, just ensure we have valid session data
if err := loadOrGenKsuid(c); err != nil {
// Log error but continue
}
if err := loadOrGenChallenge(c); err != nil {
// Log error but continue
}
return initHTTPContext(c)
}

View File

@ -0,0 +1,84 @@
package session
import (
"time"
"github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/common"
"github.com/onsonr/sonr/pkg/common/middleware/cookie"
"github.com/onsonr/sonr/pkg/common/types"
)
// HTTPContext is the context for HTTP endpoints.
type HTTPContext struct {
echo.Context
role common.PeerRole
sessionData *types.Session
}
// Ensure HTTPContext implements context.Context
func (s *HTTPContext) Deadline() (deadline time.Time, ok bool) {
return s.Context.Request().Context().Deadline()
}
func (s *HTTPContext) Done() <-chan struct{} {
return s.Context.Request().Context().Done()
}
func (s *HTTPContext) Err() error {
return s.Context.Request().Context().Err()
}
func (s *HTTPContext) Value(key interface{}) interface{} {
return s.Context.Request().Context().Value(key)
}
// initHTTPContext loads the headers from the request.
func initHTTPContext(c echo.Context) *HTTPContext {
if c == nil {
return &HTTPContext{
sessionData: &types.Session{},
}
}
sessionData := injectSessionData(c)
if sessionData == nil {
sessionData = &types.Session{}
}
cc := &HTTPContext{
Context: c,
role: common.PeerRole(cookie.ReadUnsafe(c, cookie.SessionRole)),
sessionData: sessionData,
}
// Set the session data in both contexts
c.SetRequest(c.Request().WithContext(WithData(c.Request().Context(), sessionData)))
return cc
}
func (s *HTTPContext) ID() string {
return s.GetData().Id
}
func (s *HTTPContext) LoginOptions(credentials []common.CredDescriptor) *common.LoginOptions {
ch, _ := common.Base64Decode(s.GetData().Challenge)
return &common.LoginOptions{
Challenge: ch,
Timeout: 10000,
AllowedCredentials: credentials,
}
}
func (s *HTTPContext) RegisterOptions(subject string) *common.RegisterOptions {
ch, _ := common.Base64Decode(s.GetData().Challenge)
opts := baseRegisterOptions()
opts.Challenge = ch
opts.User = buildUserEntity(subject)
return opts
}
func (s *HTTPContext) GetData() *types.Session {
return s.sessionData
}

View File

@ -0,0 +1,188 @@
package session
import (
"regexp"
"strings"
"github.com/go-webauthn/webauthn/protocol"
"github.com/go-webauthn/webauthn/protocol/webauthncose"
"github.com/labstack/echo/v4"
"github.com/segmentio/ksuid"
"github.com/onsonr/sonr/pkg/common"
"github.com/onsonr/sonr/pkg/common/middleware/cookie"
"github.com/onsonr/sonr/pkg/common/middleware/header"
"github.com/onsonr/sonr/pkg/common/types"
)
const kWebAuthnTimeout = 6000
// ╭───────────────────────────────────────────────────────────╮
// │ Initialization │
// ╰───────────────────────────────────────────────────────────╯
func loadOrGenChallenge(c echo.Context) error {
var (
chal protocol.URLEncodedBase64
chalRaw []byte
err error
)
// Setup genChal function
genChal := func() []byte {
ch, _ := protocol.CreateChallenge()
bz, _ := ch.MarshalJSON()
return bz
}
// Check if there is a session challenge cookie
if !cookie.Exists(c, cookie.SessionChallenge) {
chalRaw = genChal()
cookie.WriteBytes(c, cookie.SessionChallenge, chalRaw)
} else {
chalRaw, err = cookie.ReadBytes(c, cookie.SessionChallenge)
if err != nil {
return err
}
}
// Attempt to read the session challenge from the "session" cookie
err = chal.UnmarshalJSON(chalRaw)
if err != nil {
return err
}
return nil
}
func loadOrGenKsuid(c echo.Context) error {
var (
sessionID string
err error
)
// Setup genKsuid function
genKsuid := func() string {
return ksuid.New().String()
}
// Attempt to read the session ID from the "session" cookie
if ok := cookie.Exists(c, cookie.SessionID); !ok {
sessionID = genKsuid()
} else {
sessionID, err = cookie.Read(c, cookie.SessionID)
if err != nil {
sessionID = genKsuid()
}
}
cookie.Write(c, cookie.SessionID, sessionID)
return nil
}
// ╭───────────────────────────────────────────────────────────╮
// │ Extraction │
// ╰───────────────────────────────────────────────────────────╯
func injectSessionData(c echo.Context) *types.Session {
id, chal := extractPeerInfo(c)
bn, bv := extractBrowserInfo(c)
return &types.Session{
Id: id,
Challenge: chal,
BrowserName: bn,
BrowserVersion: bv,
UserArchitecture: header.Read(c, header.Architecture),
Platform: header.Read(c, header.Platform),
PlatformVersion: header.Read(c, header.PlatformVersion),
DeviceModel: header.Read(c, header.Model),
IsMobile: header.Equals(c, header.Mobile, "?1"),
}
}
func extractPeerInfo(c echo.Context) (string, string) {
var chal protocol.URLEncodedBase64
id, _ := cookie.Read(c, cookie.SessionID)
chalRaw, _ := cookie.ReadBytes(c, cookie.SessionChallenge)
chal.UnmarshalJSON(chalRaw)
return id, common.Base64Encode(chal)
}
func extractBrowserInfo(c echo.Context) (string, string) {
secCHUA := header.Read(c, header.UserAgent)
// If header is empty, return empty BrowserInfo
if secCHUA == "" {
return "N/A", "-1"
}
// Split the header into individual browser entries
var (
name string
ver string
)
entries := strings.Split(strings.TrimSpace(secCHUA), ",")
for _, entry := range entries {
// Remove leading/trailing spaces and quotes
entry = strings.TrimSpace(entry)
// Use regex to extract the browser name and version
re := regexp.MustCompile(`"([^"]+)";v="([^"]+)"`)
matches := re.FindStringSubmatch(entry)
if len(matches) == 3 {
browserName := matches[1]
version := matches[2]
// Skip "Not A;Brand"
if !validBrowser(browserName) {
continue
}
// Store the first valid browser info as fallback
name = browserName
ver = version
}
}
return name, ver
}
func validBrowser(name string) bool {
return name != common.BrowserNameUnknown.String() && name != common.BrowserNameChromium.String()
}
// ╭───────────────────────────────────────────────────────────╮
// │ Authentication │
// ╰───────────────────────────────────────────────────────────╯
func buildUserEntity(userID string) protocol.UserEntity {
return protocol.UserEntity{
ID: userID,
}
}
// returns the base options for registering a new user without challenge or user entity.
func baseRegisterOptions() *common.RegisterOptions {
return &protocol.PublicKeyCredentialCreationOptions{
Timeout: kWebAuthnTimeout,
Attestation: protocol.PreferDirectAttestation,
AuthenticatorSelection: protocol.AuthenticatorSelection{
AuthenticatorAttachment: "platform",
ResidentKey: protocol.ResidentKeyRequirementPreferred,
UserVerification: "preferred",
},
Parameters: []protocol.CredentialParameter{
{
Type: "public-key",
Algorithm: webauthncose.AlgES256,
},
{
Type: "public-key",
Algorithm: webauthncose.AlgES256K,
},
{
Type: "public-key",
Algorithm: webauthncose.AlgEdDSA,
},
},
}
}

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
type Account struct { type Account struct {
Id string `pkl:"id" json:"id,omitempty" query:"id"` Id string `pkl:"id" json:"id,omitempty" query:"id"`

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
type Asset struct { type Asset struct {
Id string `pkl:"id" json:"id,omitempty" query:"id"` Id string `pkl:"id" json:"id,omitempty" query:"id"`

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
type Chain struct { type Chain struct {
Id string `pkl:"id" json:"id,omitempty" query:"id"` Id string `pkl:"id" json:"id,omitempty" query:"id"`

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
type Credential struct { type Credential struct {
Id string `pkl:"id" json:"id,omitempty" query:"id"` Id string `pkl:"id" json:"id,omitempty" query:"id"`

View File

@ -1,12 +1,12 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
import ( import (
"github.com/onsonr/sonr/pkg/motr/types/orm/keyalgorithm" "github.com/onsonr/sonr/pkg/common/models/keyalgorithm"
"github.com/onsonr/sonr/pkg/motr/types/orm/keycurve" "github.com/onsonr/sonr/pkg/common/models/keycurve"
"github.com/onsonr/sonr/pkg/motr/types/orm/keyencoding" "github.com/onsonr/sonr/pkg/common/models/keyencoding"
"github.com/onsonr/sonr/pkg/motr/types/orm/keyrole" "github.com/onsonr/sonr/pkg/common/models/keyrole"
"github.com/onsonr/sonr/pkg/motr/types/orm/keytype" "github.com/onsonr/sonr/pkg/common/models/keytype"
) )
type DID struct { type DID struct {

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
type Grant struct { type Grant struct {
Id uint `pkl:"id" json:"id,omitempty" query:"id"` Id uint `pkl:"id" json:"id,omitempty" query:"id"`

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
type JWK struct { type JWK struct {
Kty string `pkl:"kty" json:"kty,omitempty"` Kty string `pkl:"kty" json:"kty,omitempty"`

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
type Keyshare struct { type Keyshare struct {
Id string `pkl:"id" json:"id,omitempty" query:"id"` Id string `pkl:"id" json:"id,omitempty" query:"id"`

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
import ( import (
"context" "context"
@ -7,14 +7,14 @@ import (
"github.com/apple/pkl-go/pkl" "github.com/apple/pkl-go/pkl"
) )
type Orm struct { type ORM struct {
DbName string `pkl:"db_name"` DbName string `pkl:"db_name"`
DbVersion int `pkl:"db_version"` DbVersion int `pkl:"db_version"`
} }
// LoadFromPath loads the pkl module at the given path and evaluates it into a Orm // LoadFromPath loads the pkl module at the given path and evaluates it into a ORM
func LoadFromPath(ctx context.Context, path string) (ret *Orm, err error) { func LoadFromPath(ctx context.Context, path string) (ret *ORM, err error) {
evaluator, err := pkl.NewEvaluator(ctx, pkl.PreconfiguredOptions) evaluator, err := pkl.NewEvaluator(ctx, pkl.PreconfiguredOptions)
if err != nil { if err != nil {
return nil, err return nil, err
@ -29,9 +29,9 @@ func LoadFromPath(ctx context.Context, path string) (ret *Orm, err error) {
return ret, err return ret, err
} }
// Load loads the pkl module at the given source and evaluates it with the given evaluator into a Orm // Load loads the pkl module at the given source and evaluates it with the given evaluator into a ORM
func Load(ctx context.Context, evaluator pkl.Evaluator, source *pkl.ModuleSource) (*Orm, error) { func Load(ctx context.Context, evaluator pkl.Evaluator, source *pkl.ModuleSource) (*ORM, error) {
var ret Orm var ret ORM
if err := evaluator.EvaluateModule(ctx, source, &ret); err != nil { if err := evaluator.EvaluateModule(ctx, source, &ret); err != nil {
return nil, err return nil, err
} }

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package orm package models
type Profile struct { type Profile struct {
Id string `pkl:"id" json:"id,omitempty" query:"id"` Id string `pkl:"id" json:"id,omitempty" query:"id"`

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package assettype package assettype
import ( import (

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package didmethod package didmethod
import ( import (

View File

@ -0,0 +1,17 @@
// Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package models
import "github.com/apple/pkl-go/pkl"
func init() {
pkl.RegisterMapping("common.types.ORM", ORM{})
pkl.RegisterMapping("common.types.ORM#Account", Account{})
pkl.RegisterMapping("common.types.ORM#Asset", Asset{})
pkl.RegisterMapping("common.types.ORM#Chain", Chain{})
pkl.RegisterMapping("common.types.ORM#Credential", Credential{})
pkl.RegisterMapping("common.types.ORM#DID", DID{})
pkl.RegisterMapping("common.types.ORM#JWK", JWK{})
pkl.RegisterMapping("common.types.ORM#Grant", Grant{})
pkl.RegisterMapping("common.types.ORM#Keyshare", Keyshare{})
pkl.RegisterMapping("common.types.ORM#Profile", Profile{})
}

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package keyalgorithm package keyalgorithm
import ( import (

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package keycurve package keycurve
import ( import (

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package keyencoding package keyencoding
import ( import (

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package keyrole package keyrole
import ( import (

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package keysharerole package keysharerole
import ( import (

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package keytype package keytype
import ( import (

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package permissiongrant package permissiongrant
import ( import (

View File

@ -1,4 +1,4 @@
// Code generated from Pkl module `orm`. DO NOT EDIT. // Code generated from Pkl module `common.types.ORM`. DO NOT EDIT.
package permissionscope package permissionscope
import ( import (

View File

@ -1,4 +1,4 @@
package orm package common
import ( import (
"reflect" "reflect"

69
pkg/common/session.go Normal file
View File

@ -0,0 +1,69 @@
package common
import (
"encoding/base64"
"net/http"
"github.com/go-webauthn/webauthn/protocol"
"github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/common/types"
)
var (
ErrInvalidCredentials = echo.NewHTTPError(http.StatusUnauthorized, "Invalid credentials")
ErrInvalidSubject = echo.NewHTTPError(http.StatusBadRequest, "Invalid subject")
ErrInvalidUser = echo.NewHTTPError(http.StatusBadRequest, "Invalid user")
ErrUserAlreadyExists = echo.NewHTTPError(http.StatusConflict, "User already exists")
ErrUserNotFound = echo.NewHTTPError(http.StatusNotFound, "User not found")
)
type SessionCtx interface {
ID() string
LoginOptions(credentials []CredDescriptor) *LoginOptions
RegisterOptions(subject string) *RegisterOptions
GetData() *types.Session
}
type (
CredDescriptor = protocol.CredentialDescriptor
LoginOptions = protocol.PublicKeyCredentialRequestOptions
RegisterOptions = protocol.PublicKeyCredentialCreationOptions
)
type BrowserName string
const (
BrowserNameUnknown BrowserName = " Not A;Brand"
BrowserNameChromium BrowserName = "Chromium"
)
func (n BrowserName) String() string {
return string(n)
}
type PeerRole string
const (
RoleUnknown PeerRole = "none"
RoleHway PeerRole = "hway"
RoleMotr PeerRole = "motr"
)
func (r PeerRole) Is(role PeerRole) bool {
return r == role
}
func (r PeerRole) String() string {
return string(r)
}
func Base64Encode(data []byte) string {
return base64.RawURLEncoding.EncodeToString(data)
}
func Base64Decode(data string) ([]byte, error) {
return base64.RawURLEncoding.DecodeString(data)
}

View File

@ -0,0 +1,36 @@
// Code generated from Pkl module `common.types.Ctx`. DO NOT EDIT.
package types
import (
"context"
"github.com/apple/pkl-go/pkl"
)
type Ctx struct {
}
// LoadFromPath loads the pkl module at the given path and evaluates it into a Ctx
func LoadFromPath(ctx context.Context, path string) (ret *Ctx, err error) {
evaluator, err := pkl.NewEvaluator(ctx, pkl.PreconfiguredOptions)
if err != nil {
return nil, err
}
defer func() {
cerr := evaluator.Close()
if err == nil {
err = cerr
}
}()
ret, err = Load(ctx, evaluator, pkl.FileSource(path))
return ret, err
}
// Load loads the pkl module at the given source and evaluates it with the given evaluator into a Ctx
func Load(ctx context.Context, evaluator pkl.Evaluator, source *pkl.ModuleSource) (*Ctx, error) {
var ret Ctx
if err := evaluator.EvaluateModule(ctx, source, &ret); err != nil {
return nil, err
}
return &ret, nil
}

View File

@ -0,0 +1,24 @@
// Code generated from Pkl module `common.types.Ctx`. DO NOT EDIT.
package types
type Session struct {
Id string `pkl:"id" json:"id,omitempty"`
Challenge string `pkl:"challenge" json:"challenge,omitempty"`
BrowserName string `pkl:"browserName" json:"browserName,omitempty"`
BrowserVersion string `pkl:"browserVersion" json:"browserVersion,omitempty"`
UserArchitecture string `pkl:"userArchitecture" json:"userArchitecture,omitempty"`
Platform string `pkl:"platform" json:"platform,omitempty"`
PlatformVersion string `pkl:"platformVersion" json:"platformVersion,omitempty"`
DeviceModel string `pkl:"deviceModel" json:"deviceModel,omitempty"`
IsMobile bool `pkl:"isMobile" json:"isMobile,omitempty"`
VaultAddress string `pkl:"vaultAddress" json:"vaultAddress,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Code generated from Pkl module `common.types.Ctx`. DO NOT EDIT.
package types
import "github.com/apple/pkl-go/pkl"
func init() {
pkl.RegisterMapping("common.types.Ctx", Ctx{})
pkl.RegisterMapping("common.types.Ctx#Session", Session{})
}

View File

@ -1,64 +0,0 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc (unknown)
// source: common/v1/ipfs.proto
package commonv1
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
var File_common_v1_ipfs_proto protoreflect.FileDescriptor
var file_common_v1_ipfs_proto_rawDesc = []byte{
0x0a, 0x14, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x2f, 0x69, 0x70, 0x66, 0x73,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76,
0x31, 0x42, 0x32, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f,
0x6f, 0x6e, 0x73, 0x6f, 0x6e, 0x72, 0x2f, 0x73, 0x6f, 0x6e, 0x72, 0x2f, 0x70, 0x6b, 0x67, 0x2f,
0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x3b, 0x63, 0x6f, 0x6d,
0x6d, 0x6f, 0x6e, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var file_common_v1_ipfs_proto_goTypes = []interface{}{}
var file_common_v1_ipfs_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_common_v1_ipfs_proto_init() }
func file_common_v1_ipfs_proto_init() {
if File_common_v1_ipfs_proto != nil {
return
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_common_v1_ipfs_proto_rawDesc,
NumEnums: 0,
NumMessages: 0,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_common_v1_ipfs_proto_goTypes,
DependencyIndexes: file_common_v1_ipfs_proto_depIdxs,
}.Build()
File_common_v1_ipfs_proto = out.File
file_common_v1_ipfs_proto_rawDesc = nil
file_common_v1_ipfs_proto_goTypes = nil
file_common_v1_ipfs_proto_depIdxs = nil
}

View File

@ -1,377 +0,0 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc (unknown)
// source: common/v1/keys.proto
package commonv1
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// PubKey defines a public key for a did
type PubKey struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Role string `protobuf:"bytes,1,opt,name=role,proto3" json:"role,omitempty"`
KeyType string `protobuf:"bytes,2,opt,name=key_type,json=keyType,proto3" json:"key_type,omitempty"`
RawKey *RawKey `protobuf:"bytes,3,opt,name=raw_key,json=rawKey,proto3" json:"raw_key,omitempty"`
Jwk *JSONWebKey `protobuf:"bytes,4,opt,name=jwk,proto3" json:"jwk,omitempty"`
}
func (x *PubKey) Reset() {
*x = PubKey{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_keys_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *PubKey) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*PubKey) ProtoMessage() {}
func (x *PubKey) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_keys_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use PubKey.ProtoReflect.Descriptor instead.
func (*PubKey) Descriptor() ([]byte, []int) {
return file_common_v1_keys_proto_rawDescGZIP(), []int{0}
}
func (x *PubKey) GetRole() string {
if x != nil {
return x.Role
}
return ""
}
func (x *PubKey) GetKeyType() string {
if x != nil {
return x.KeyType
}
return ""
}
func (x *PubKey) GetRawKey() *RawKey {
if x != nil {
return x.RawKey
}
return nil
}
func (x *PubKey) GetJwk() *JSONWebKey {
if x != nil {
return x.Jwk
}
return nil
}
// JWK represents a JSON Web Key
type JSONWebKey struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Kty string `protobuf:"bytes,1,opt,name=kty,proto3" json:"kty,omitempty"` // Key Type
Crv string `protobuf:"bytes,2,opt,name=crv,proto3" json:"crv,omitempty"` // Curve (for EC and OKP keys)
X string `protobuf:"bytes,3,opt,name=x,proto3" json:"x,omitempty"` // X coordinate (for EC and OKP keys)
Y string `protobuf:"bytes,4,opt,name=y,proto3" json:"y,omitempty"` // Y coordinate (for EC keys)
N string `protobuf:"bytes,5,opt,name=n,proto3" json:"n,omitempty"` // Modulus (for RSA keys)
E string `protobuf:"bytes,6,opt,name=e,proto3" json:"e,omitempty"` // Exponent (for RSA keys)
}
func (x *JSONWebKey) Reset() {
*x = JSONWebKey{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_keys_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *JSONWebKey) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*JSONWebKey) ProtoMessage() {}
func (x *JSONWebKey) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_keys_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use JSONWebKey.ProtoReflect.Descriptor instead.
func (*JSONWebKey) Descriptor() ([]byte, []int) {
return file_common_v1_keys_proto_rawDescGZIP(), []int{1}
}
func (x *JSONWebKey) GetKty() string {
if x != nil {
return x.Kty
}
return ""
}
func (x *JSONWebKey) GetCrv() string {
if x != nil {
return x.Crv
}
return ""
}
func (x *JSONWebKey) GetX() string {
if x != nil {
return x.X
}
return ""
}
func (x *JSONWebKey) GetY() string {
if x != nil {
return x.Y
}
return ""
}
func (x *JSONWebKey) GetN() string {
if x != nil {
return x.N
}
return ""
}
func (x *JSONWebKey) GetE() string {
if x != nil {
return x.E
}
return ""
}
type RawKey struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Algorithm string `protobuf:"bytes,1,opt,name=algorithm,proto3" json:"algorithm,omitempty"`
Encoding string `protobuf:"bytes,2,opt,name=encoding,proto3" json:"encoding,omitempty"`
Curve string `protobuf:"bytes,3,opt,name=curve,proto3" json:"curve,omitempty"`
Key []byte `protobuf:"bytes,4,opt,name=key,proto3" json:"key,omitempty"`
}
func (x *RawKey) Reset() {
*x = RawKey{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_keys_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *RawKey) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*RawKey) ProtoMessage() {}
func (x *RawKey) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_keys_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use RawKey.ProtoReflect.Descriptor instead.
func (*RawKey) Descriptor() ([]byte, []int) {
return file_common_v1_keys_proto_rawDescGZIP(), []int{2}
}
func (x *RawKey) GetAlgorithm() string {
if x != nil {
return x.Algorithm
}
return ""
}
func (x *RawKey) GetEncoding() string {
if x != nil {
return x.Encoding
}
return ""
}
func (x *RawKey) GetCurve() string {
if x != nil {
return x.Curve
}
return ""
}
func (x *RawKey) GetKey() []byte {
if x != nil {
return x.Key
}
return nil
}
var File_common_v1_keys_proto protoreflect.FileDescriptor
var file_common_v1_keys_proto_rawDesc = []byte{
0x0a, 0x14, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x2f, 0x6b, 0x65, 0x79, 0x73,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76,
0x31, 0x22, 0x8c, 0x01, 0x0a, 0x06, 0x50, 0x75, 0x62, 0x4b, 0x65, 0x79, 0x12, 0x12, 0x0a, 0x04,
0x72, 0x6f, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x72, 0x6f, 0x6c, 0x65,
0x12, 0x19, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x52, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x2a, 0x0a, 0x07, 0x72,
0x61, 0x77, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x63,
0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x61, 0x77, 0x4b, 0x65, 0x79, 0x52,
0x06, 0x72, 0x61, 0x77, 0x4b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x03, 0x6a, 0x77, 0x6b, 0x18, 0x04,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31,
0x2e, 0x4a, 0x53, 0x4f, 0x4e, 0x57, 0x65, 0x62, 0x4b, 0x65, 0x79, 0x52, 0x03, 0x6a, 0x77, 0x6b,
0x22, 0x68, 0x0a, 0x0a, 0x4a, 0x53, 0x4f, 0x4e, 0x57, 0x65, 0x62, 0x4b, 0x65, 0x79, 0x12, 0x10,
0x0a, 0x03, 0x6b, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x74, 0x79,
0x12, 0x10, 0x0a, 0x03, 0x63, 0x72, 0x76, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x63,
0x72, 0x76, 0x12, 0x0c, 0x0a, 0x01, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x01, 0x78,
0x12, 0x0c, 0x0a, 0x01, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x01, 0x79, 0x12, 0x0c,
0x0a, 0x01, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x01, 0x6e, 0x12, 0x0c, 0x0a, 0x01,
0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x01, 0x65, 0x22, 0x6a, 0x0a, 0x06, 0x52, 0x61,
0x77, 0x4b, 0x65, 0x79, 0x12, 0x1c, 0x0a, 0x09, 0x61, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68,
0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x61, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74,
0x68, 0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x18, 0x02,
0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x14,
0x0a, 0x05, 0x63, 0x75, 0x72, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x63,
0x75, 0x72, 0x76, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28,
0x0c, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x42, 0x32, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x73, 0x6f, 0x6e, 0x72, 0x2f, 0x73, 0x6f, 0x6e, 0x72,
0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x74, 0x79, 0x70, 0x65,
0x73, 0x3b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x33,
}
var (
file_common_v1_keys_proto_rawDescOnce sync.Once
file_common_v1_keys_proto_rawDescData = file_common_v1_keys_proto_rawDesc
)
func file_common_v1_keys_proto_rawDescGZIP() []byte {
file_common_v1_keys_proto_rawDescOnce.Do(func() {
file_common_v1_keys_proto_rawDescData = protoimpl.X.CompressGZIP(file_common_v1_keys_proto_rawDescData)
})
return file_common_v1_keys_proto_rawDescData
}
var file_common_v1_keys_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
var file_common_v1_keys_proto_goTypes = []interface{}{
(*PubKey)(nil), // 0: common.v1.PubKey
(*JSONWebKey)(nil), // 1: common.v1.JSONWebKey
(*RawKey)(nil), // 2: common.v1.RawKey
}
var file_common_v1_keys_proto_depIdxs = []int32{
2, // 0: common.v1.PubKey.raw_key:type_name -> common.v1.RawKey
1, // 1: common.v1.PubKey.jwk:type_name -> common.v1.JSONWebKey
2, // [2:2] is the sub-list for method output_type
2, // [2:2] is the sub-list for method input_type
2, // [2:2] is the sub-list for extension type_name
2, // [2:2] is the sub-list for extension extendee
0, // [0:2] is the sub-list for field type_name
}
func init() { file_common_v1_keys_proto_init() }
func file_common_v1_keys_proto_init() {
if File_common_v1_keys_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_common_v1_keys_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*PubKey); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_common_v1_keys_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*JSONWebKey); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_common_v1_keys_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*RawKey); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_common_v1_keys_proto_rawDesc,
NumEnums: 0,
NumMessages: 3,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_common_v1_keys_proto_goTypes,
DependencyIndexes: file_common_v1_keys_proto_depIdxs,
MessageInfos: file_common_v1_keys_proto_msgTypes,
}.Build()
File_common_v1_keys_proto = out.File
file_common_v1_keys_proto_rawDesc = nil
file_common_v1_keys_proto_goTypes = nil
file_common_v1_keys_proto_depIdxs = nil
}

View File

@ -1,215 +0,0 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc (unknown)
// source: common/v1/uri.proto
package commonv1
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type URI_URIProtocol int32
const (
URI_HTTPS URI_URIProtocol = 0
URI_IPFS URI_URIProtocol = 1
URI_IPNS URI_URIProtocol = 2
URI_DID URI_URIProtocol = 3
)
// Enum value maps for URI_URIProtocol.
var (
URI_URIProtocol_name = map[int32]string{
0: "HTTPS",
1: "IPFS",
2: "IPNS",
3: "DID",
}
URI_URIProtocol_value = map[string]int32{
"HTTPS": 0,
"IPFS": 1,
"IPNS": 2,
"DID": 3,
}
)
func (x URI_URIProtocol) Enum() *URI_URIProtocol {
p := new(URI_URIProtocol)
*p = x
return p
}
func (x URI_URIProtocol) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (URI_URIProtocol) Descriptor() protoreflect.EnumDescriptor {
return file_common_v1_uri_proto_enumTypes[0].Descriptor()
}
func (URI_URIProtocol) Type() protoreflect.EnumType {
return &file_common_v1_uri_proto_enumTypes[0]
}
func (x URI_URIProtocol) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use URI_URIProtocol.Descriptor instead.
func (URI_URIProtocol) EnumDescriptor() ([]byte, []int) {
return file_common_v1_uri_proto_rawDescGZIP(), []int{0, 0}
}
type URI struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Protocol URI_URIProtocol `protobuf:"varint,1,opt,name=protocol,proto3,enum=common.v1.URI_URIProtocol" json:"protocol,omitempty"`
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
}
func (x *URI) Reset() {
*x = URI{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_uri_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *URI) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*URI) ProtoMessage() {}
func (x *URI) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_uri_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use URI.ProtoReflect.Descriptor instead.
func (*URI) Descriptor() ([]byte, []int) {
return file_common_v1_uri_proto_rawDescGZIP(), []int{0}
}
func (x *URI) GetProtocol() URI_URIProtocol {
if x != nil {
return x.Protocol
}
return URI_HTTPS
}
func (x *URI) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
var File_common_v1_uri_proto protoreflect.FileDescriptor
var file_common_v1_uri_proto_rawDesc = []byte{
0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x2f, 0x75, 0x72, 0x69, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31,
0x22, 0x8a, 0x01, 0x0a, 0x03, 0x55, 0x52, 0x49, 0x12, 0x36, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x63, 0x6f, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x63, 0x6f, 0x6d,
0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x52, 0x49, 0x2e, 0x55, 0x52, 0x49, 0x50, 0x72,
0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c,
0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x35, 0x0a, 0x0b, 0x55, 0x52, 0x49, 0x50, 0x72, 0x6f,
0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x48, 0x54, 0x54, 0x50, 0x53, 0x10, 0x00,
0x12, 0x08, 0x0a, 0x04, 0x49, 0x50, 0x46, 0x53, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x50,
0x4e, 0x53, 0x10, 0x02, 0x12, 0x07, 0x0a, 0x03, 0x44, 0x49, 0x44, 0x10, 0x03, 0x42, 0x32, 0x5a,
0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x73, 0x6f,
0x6e, 0x72, 0x2f, 0x73, 0x6f, 0x6e, 0x72, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x6f, 0x6d, 0x6d,
0x6f, 0x6e, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x3b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x76,
0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_common_v1_uri_proto_rawDescOnce sync.Once
file_common_v1_uri_proto_rawDescData = file_common_v1_uri_proto_rawDesc
)
func file_common_v1_uri_proto_rawDescGZIP() []byte {
file_common_v1_uri_proto_rawDescOnce.Do(func() {
file_common_v1_uri_proto_rawDescData = protoimpl.X.CompressGZIP(file_common_v1_uri_proto_rawDescData)
})
return file_common_v1_uri_proto_rawDescData
}
var file_common_v1_uri_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_common_v1_uri_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_common_v1_uri_proto_goTypes = []interface{}{
(URI_URIProtocol)(0), // 0: common.v1.URI.URIProtocol
(*URI)(nil), // 1: common.v1.URI
}
var file_common_v1_uri_proto_depIdxs = []int32{
0, // 0: common.v1.URI.protocol:type_name -> common.v1.URI.URIProtocol
1, // [1:1] is the sub-list for method output_type
1, // [1:1] is the sub-list for method input_type
1, // [1:1] is the sub-list for extension type_name
1, // [1:1] is the sub-list for extension extendee
0, // [0:1] is the sub-list for field type_name
}
func init() { file_common_v1_uri_proto_init() }
func file_common_v1_uri_proto_init() {
if File_common_v1_uri_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_common_v1_uri_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*URI); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_common_v1_uri_proto_rawDesc,
NumEnums: 1,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_common_v1_uri_proto_goTypes,
DependencyIndexes: file_common_v1_uri_proto_depIdxs,
EnumInfos: file_common_v1_uri_proto_enumTypes,
MessageInfos: file_common_v1_uri_proto_msgTypes,
}.Build()
File_common_v1_uri_proto = out.File
file_common_v1_uri_proto_rawDesc = nil
file_common_v1_uri_proto_goTypes = nil
file_common_v1_uri_proto_depIdxs = nil
}

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `dwn`. DO NOT EDIT. // Code generated from Pkl module `common.types.DWN`. DO NOT EDIT.
package config package dwn
type Config struct { type Config struct {
IpfsGatewayUrl string `pkl:"ipfsGatewayUrl" json:"ipfsGatewayUrl,omitempty"` IpfsGatewayUrl string `pkl:"ipfsGatewayUrl" json:"ipfsGatewayUrl,omitempty"`

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `dwn`. DO NOT EDIT. // Code generated from Pkl module `common.types.DWN`. DO NOT EDIT.
package config package dwn
import ( import (
"context" "context"
@ -7,11 +7,11 @@ import (
"github.com/apple/pkl-go/pkl" "github.com/apple/pkl-go/pkl"
) )
type Dwn struct { type DWN struct {
} }
// LoadFromPath loads the pkl module at the given path and evaluates it into a Dwn // LoadFromPath loads the pkl module at the given path and evaluates it into a DWN
func LoadFromPath(ctx context.Context, path string) (ret *Dwn, err error) { func LoadFromPath(ctx context.Context, path string) (ret *DWN, err error) {
evaluator, err := pkl.NewEvaluator(ctx, pkl.PreconfiguredOptions) evaluator, err := pkl.NewEvaluator(ctx, pkl.PreconfiguredOptions)
if err != nil { if err != nil {
return nil, err return nil, err
@ -26,9 +26,9 @@ func LoadFromPath(ctx context.Context, path string) (ret *Dwn, err error) {
return ret, err return ret, err
} }
// Load loads the pkl module at the given source and evaluates it with the given evaluator into a Dwn // Load loads the pkl module at the given source and evaluates it with the given evaluator into a DWN
func Load(ctx context.Context, evaluator pkl.Evaluator, source *pkl.ModuleSource) (*Dwn, error) { func Load(ctx context.Context, evaluator pkl.Evaluator, source *pkl.ModuleSource) (*DWN, error) {
var ret Dwn var ret DWN
if err := evaluator.EvaluateModule(ctx, source, &ret); err != nil { if err := evaluator.EvaluateModule(ctx, source, &ret); err != nil {
return nil, err return nil, err
} }

View File

@ -0,0 +1,14 @@
// Code generated from Pkl module `common.types.DWN`. DO NOT EDIT.
package dwn
type Environment struct {
IsDevelopment bool `pkl:"isDevelopment" json:"isDevelopment,omitempty"`
CacheVersion string `pkl:"cacheVersion" json:"cacheVersion,omitempty"`
HttpserverPath string `pkl:"httpserverPath" json:"httpserverPath,omitempty"`
WasmExecPath string `pkl:"wasmExecPath" json:"wasmExecPath,omitempty"`
WasmPath string `pkl:"wasmPath" json:"wasmPath,omitempty"`
}

View File

@ -1,5 +1,5 @@
// Code generated from Pkl module `dwn`. DO NOT EDIT. // Code generated from Pkl module `common.types.DWN`. DO NOT EDIT.
package config package dwn
type Schema struct { type Schema struct {
Version int `pkl:"version"` Version int `pkl:"version"`

View File

@ -0,0 +1,211 @@
//go:build js && wasm
// +build js,wasm
package bridge
import (
"bytes"
"fmt"
"io"
"net/http"
"net/http/httptest"
"strings"
"sync"
"syscall/js"
)
var (
// Global buffer pool to reduce allocations
bufferPool = sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
}
// Cached JS globals
jsGlobal = js.Global()
jsUint8Array = jsGlobal.Get("Uint8Array")
jsResponse = jsGlobal.Get("Response")
jsPromise = jsGlobal.Get("Promise")
jsWasmHTTP = jsGlobal.Get("wasmhttp")
)
// serveFetch serves HTTP requests with optimized handler management
func ServeFetch(handler http.Handler) func() {
h := handler
if h == nil {
h = http.DefaultServeMux
}
// Optimize prefix handling
prefix := strings.TrimRight(jsWasmHTTP.Get("path").String(), "/")
if prefix != "" {
mux := http.NewServeMux()
mux.Handle(prefix+"/", http.StripPrefix(prefix, h))
h = mux
}
// Create request handler function
cb := js.FuncOf(func(_ js.Value, args []js.Value) interface{} {
promise, resolve, reject := newPromiseOptimized()
go handleRequest(h, args[1], resolve, reject)
return promise
})
jsWasmHTTP.Call("setHandler", cb)
return cb.Release
}
// handleRequest processes the request with panic recovery
func handleRequest(h http.Handler, jsReq js.Value, resolve, reject func(interface{})) {
defer func() {
if r := recover(); r != nil {
var errMsg string
if err, ok := r.(error); ok {
errMsg = fmt.Sprintf("wasmhttp: panic: %+v", err)
} else {
errMsg = fmt.Sprintf("wasmhttp: panic: %v", r)
}
reject(errMsg)
}
}()
recorder := newResponseRecorder()
h.ServeHTTP(recorder, buildRequest(jsReq))
resolve(recorder.jsResponse())
}
// buildRequest creates an http.Request from JS Request
func buildRequest(jsReq js.Value) *http.Request {
// Get request body
arrayBuffer, err := awaitPromiseOptimized(jsReq.Call("arrayBuffer"))
if err != nil {
panic(err)
}
// Create body buffer
jsBody := jsUint8Array.New(arrayBuffer)
bodyLen := jsBody.Get("length").Int()
body := make([]byte, bodyLen)
js.CopyBytesToGo(body, jsBody)
// Create request
req := httptest.NewRequest(
jsReq.Get("method").String(),
jsReq.Get("url").String(),
bytes.NewReader(body),
)
// Set headers efficiently
headers := jsReq.Get("headers")
headersIt := headers.Call("entries")
for {
entry := headersIt.Call("next")
if entry.Get("done").Bool() {
break
}
pair := entry.Get("value")
req.Header.Set(pair.Index(0).String(), pair.Index(1).String())
}
return req
}
// ResponseRecorder with optimized buffer handling
type ResponseRecorder struct {
*httptest.ResponseRecorder
buffer *bytes.Buffer
}
func newResponseRecorder() *ResponseRecorder {
return &ResponseRecorder{
ResponseRecorder: httptest.NewRecorder(),
buffer: bufferPool.Get().(*bytes.Buffer),
}
}
// jsResponse creates a JS Response with optimized memory usage
func (rr *ResponseRecorder) jsResponse() js.Value {
defer func() {
rr.buffer.Reset()
bufferPool.Put(rr.buffer)
}()
res := rr.Result()
defer res.Body.Close()
// Prepare response body
body := js.Undefined()
if res.ContentLength != 0 {
if _, err := io.Copy(rr.buffer, res.Body); err != nil {
panic(err)
}
bodyBytes := rr.buffer.Bytes()
body = jsUint8Array.New(len(bodyBytes))
js.CopyBytesToJS(body, bodyBytes)
}
// Prepare response init object
init := make(map[string]interface{}, 3)
if res.StatusCode != 0 {
init["status"] = res.StatusCode
}
if len(res.Header) > 0 {
headers := make(map[string]interface{}, len(res.Header))
for k, v := range res.Header {
if len(v) > 0 {
headers[k] = v[0]
}
}
init["headers"] = headers
}
return jsResponse.New(body, init)
}
// newPromiseOptimized creates a new JavaScript Promise with optimized callback handling
func newPromiseOptimized() (js.Value, func(interface{}), func(interface{})) {
var (
resolve func(interface{})
reject func(interface{})
promiseFunc = js.FuncOf(func(_ js.Value, args []js.Value) interface{} {
resolve = func(v interface{}) { args[0].Invoke(v) }
reject = func(v interface{}) { args[1].Invoke(v) }
return js.Undefined()
})
)
defer promiseFunc.Release()
return jsPromise.New(promiseFunc), resolve, reject
}
// awaitPromiseOptimized waits for Promise resolution with optimized channel handling
func awaitPromiseOptimized(promise js.Value) (js.Value, error) {
done := make(chan struct{})
var (
result js.Value
err error
)
thenFunc := js.FuncOf(func(_ js.Value, args []js.Value) interface{} {
result = args[0]
close(done)
return nil
})
defer thenFunc.Release()
catchFunc := js.FuncOf(func(_ js.Value, args []js.Value) interface{} {
err = js.Error{Value: args[0]}
close(done)
return nil
})
defer catchFunc.Release()
promise.Call("then", thenFunc).Call("catch", catchFunc)
<-done
return result, err
}

View File

@ -0,0 +1,34 @@
//go:build js && wasm
// +build js,wasm
package bridge
import (
"encoding/base64"
"encoding/json"
"github.com/labstack/echo/v4"
)
func WasmContextMiddleware(next echo.HandlerFunc) echo.HandlerFunc {
return func(c echo.Context) error {
// Extract WASM context from headers
if wasmCtx := c.Request().Header.Get("X-Wasm-Context"); wasmCtx != "" {
if ctx, err := DecodeWasmContext(wasmCtx); err == nil {
c.Set("wasm_context", ctx)
}
}
return next(c)
}
}
// decodeWasmContext decodes the WASM context from a base64 encoded string
func DecodeWasmContext(ctx string) (map[string]any, error) {
decoded, err := base64.StdEncoding.DecodeString(ctx)
if err != nil {
return nil, err
}
var ctxData map[string]any
err = json.Unmarshal(decoded, &ctxData)
return ctxData, err
}

30
pkg/core/dwn/dwn.go Normal file
View File

@ -0,0 +1,30 @@
package dwn
import (
"encoding/json"
"os"
)
const dwnJSONFileName = "dwn.json"
func LoadJSONConfig() (*Config, error) {
// Read dwn.json config
dwnBz, err := os.ReadFile(dwnJSONFileName)
if err != nil {
return nil, err
}
dwnConfig := new(Config)
err = json.Unmarshal(dwnBz, dwnConfig)
if err != nil {
return nil, err
}
return dwnConfig, nil
}
func (c *Config) MarshalJSON() ([]byte, error) {
return json.Marshal(c)
}
func (c *Config) UnmarshalJSON(data []byte) error {
return json.Unmarshal(data, c)
}

View File

@ -3,8 +3,6 @@ package handlers
import ( import (
"github.com/go-webauthn/webauthn/protocol" "github.com/go-webauthn/webauthn/protocol"
"github.com/labstack/echo/v4" "github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/motr/types/orm"
) )
// ╭───────────────────────────────────────────────────────────╮ // ╭───────────────────────────────────────────────────────────╮
@ -47,15 +45,11 @@ func RegisterSubjectCheck(e echo.Context) error {
// RegisterSubjectStart handles the register subject start. // RegisterSubjectStart handles the register subject start.
func RegisterSubjectStart(e echo.Context) error { func RegisterSubjectStart(e echo.Context) error {
// Get subject and address // Get subject and address
subject := e.FormValue("subject") // subject := e.FormValue("subject")
address := e.FormValue("address")
// Get challenge // Get challenge
chal, err := protocol.CreateChallenge()
if err != nil { return nil
return err
}
return e.JSON(201, orm.NewCredentialCreationOptions(subject, address, chal))
} }
// RegisterSubjectFinish handles the register subject finish. // RegisterSubjectFinish handles the register subject finish.

View File

@ -0,0 +1,284 @@
package handlers
import (
"fmt"
"net/http"
"github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/core/dwn"
)
// generateRawServiceWorkerJS returns the service worker JavaScript as a string
func generateRawServiceWorkerJS(cfg *dwn.Environment) string {
return fmt.Sprintf(`const CACHE_NAMES = {
wasm: "wasm-cache-%s",
static: "static-cache-%s",
dynamic: "dynamic-cache-%s"
};
importScripts(
%q,
%q
);
// Initialize WASM HTTP listener with configured path
const wasmInstance = registerWasmHTTPListener(%q);
// MessageChannel port for WASM communication
let wasmPort;
// Request queue for offline operations
let requestQueue = new Map();
// Setup message channel handler
self.addEventListener('message', async (event) => {
if (event.data.type === 'PORT_INITIALIZATION') {
wasmPort = event.data.port;
setupWasmCommunication();
}
});
function setupWasmCommunication() {
wasmPort.onmessage = async (event) => {
const { type, data } = event.data;
switch (type) {
case 'WASM_REQUEST':
handleWasmRequest(data);
break;
case 'SYNC_REQUEST':
processSyncQueue();
break;
}
};
// Notify that WASM is ready
wasmPort.postMessage({ type: 'WASM_READY' });
}
// Enhanced install event
self.addEventListener("install", (event) => {
event.waitUntil(
Promise.all([
skipWaiting(),
// Cache WASM binary and essential resources
caches.open(CACHE_NAMES.wasm).then(cache =>
cache.addAll([
%q,
%q
])
)
])
);
});
// Enhanced activate event
self.addEventListener("activate", (event) => {
event.waitUntil(
Promise.all([
clients.claim(),
// Clean up old caches
caches.keys().then(keys =>
Promise.all(
keys.map(key => {
if (!Object.values(CACHE_NAMES).includes(key)) {
return caches.delete(key);
}
})
)
)
])
);
});
// Intercept fetch events
self.addEventListener('fetch', (event) => {
const request = event.request;
// Handle API requests differently from static resources
if (request.url.includes('/api/')) {
event.respondWith(handleApiRequest(request));
} else {
event.respondWith(handleStaticRequest(request));
}
});
async function handleApiRequest(request) {
try {
// Try to make the request
const response = await fetch(request.clone());
// If successful, pass through WASM handler
if (response.ok) {
return await processWasmResponse(request, response);
}
// If offline or failed, queue the request
await queueRequest(request);
// Return cached response if available
const cachedResponse = await caches.match(request);
if (cachedResponse) {
return cachedResponse;
}
// Return offline response
return new Response(
JSON.stringify({ error: 'Currently offline' }),
{
status: 503,
headers: { 'Content-Type': 'application/json' }
}
);
} catch (error) {
await queueRequest(request);
return new Response(
JSON.stringify({ error: 'Request failed' }),
{
status: 500,
headers: { 'Content-Type': 'application/json' }
}
);
}
}
async function handleStaticRequest(request) {
// Check cache first
const cachedResponse = await caches.match(request);
if (cachedResponse) {
return cachedResponse;
}
try {
const response = await fetch(request);
// Cache successful responses
if (response.ok) {
const cache = await caches.open(CACHE_NAMES.static);
cache.put(request, response.clone());
}
return response;
} catch (error) {
// Return offline page for navigation requests
if (request.mode === 'navigate') {
return caches.match('/offline.html');
}
throw error;
}
}
async function processWasmResponse(request, response) {
const responseClone = response.clone();
try {
const processedResponse = await wasmInstance.processResponse(responseClone);
if (wasmPort) {
wasmPort.postMessage({
type: 'RESPONSE',
requestId: request.headers.get('X-Wasm-Request-ID'),
response: processedResponse
});
}
return processedResponse;
} catch (error) {
console.error('WASM processing error:', error);
return response;
}
}
async function queueRequest(request) {
const serializedRequest = await serializeRequest(request);
requestQueue.set(request.url, serializedRequest);
try {
await self.registration.sync.register('wasm-sync');
} catch (error) {
console.error('Sync registration failed:', error);
}
}
async function serializeRequest(request) {
const headers = {};
for (const [key, value] of request.headers.entries()) {
headers[key] = value;
}
return {
url: request.url,
method: request.method,
headers,
body: await request.text(),
timestamp: Date.now()
};
}
// Handle background sync
self.addEventListener('sync', (event) => {
if (event.tag === 'wasm-sync') {
event.waitUntil(processSyncQueue());
}
});
async function processSyncQueue() {
const requests = Array.from(requestQueue.values());
for (const serializedRequest of requests) {
try {
const response = await fetch(new Request(serializedRequest.url, {
method: serializedRequest.method,
headers: serializedRequest.headers,
body: serializedRequest.body
}));
if (response.ok) {
requestQueue.delete(serializedRequest.url);
if (wasmPort) {
wasmPort.postMessage({
type: 'SYNC_COMPLETE',
url: serializedRequest.url
});
}
}
} catch (error) {
console.error('Sync failed for request:', error);
}
}
}
// Handle payment requests
self.addEventListener("canmakepayment", function (e) {
e.respondWith(Promise.resolve(true));
});
// Handle periodic sync if available
self.addEventListener('periodicsync', (event) => {
if (event.tag === 'wasm-sync') {
event.waitUntil(processSyncQueue());
}
});`,
cfg.CacheVersion,
cfg.CacheVersion,
cfg.CacheVersion,
cfg.WasmExecPath,
cfg.HttpserverPath,
cfg.WasmPath,
cfg.WasmPath,
cfg.WasmExecPath,
)
}
// ServiceWorkerHandler is an Echo handler that serves the service worker
func ServiceWorkerHandler(cfg *dwn.Environment) echo.HandlerFunc {
return func(c echo.Context) error {
// Set appropriate headers for service worker
c.Response().Header().Set("Content-Type", "application/javascript")
c.Response().Header().Set("Service-Worker-Allowed", "/")
// Generate and write the service worker JavaScript
return c.String(http.StatusOK, generateRawServiceWorkerJS(cfg))
}
}

11
pkg/core/dwn/init.pkl.go Normal file
View File

@ -0,0 +1,11 @@
// Code generated from Pkl module `common.types.DWN`. DO NOT EDIT.
package dwn
import "github.com/apple/pkl-go/pkl"
func init() {
pkl.RegisterMapping("common.types.DWN", DWN{})
pkl.RegisterMapping("common.types.DWN#Config", Config{})
pkl.RegisterMapping("common.types.DWN#Schema", Schema{})
pkl.RegisterMapping("common.types.DWN#Environment", Environment{})
}

View File

@ -0,0 +1,60 @@
//go:build js && wasm
// +build js,wasm
package server
import (
"github.com/labstack/echo/v4"
"github.com/onsonr/sonr/pkg/common/middleware/session"
"github.com/onsonr/sonr/pkg/core/dwn"
"github.com/onsonr/sonr/pkg/core/dwn/bridge"
"github.com/onsonr/sonr/pkg/core/dwn/handlers"
)
// Server is the interface that wraps the Serve function.
type Server interface {
Serve() func()
}
type MotrServer struct {
e *echo.Echo
WasmPath string
WasmExecPath string
HTTPServerPath string
CacheVersion string
IsDev bool
}
func New(env *dwn.Environment, config *dwn.Config) Server {
s := &MotrServer{e: echo.New()}
s.e.Use(session.MotrMiddleware(config))
s.e.Use(bridge.WasmContextMiddleware)
// Add WASM-specific routes
registerAPI(s.e)
return s
}
func (s *MotrServer) Serve() func() {
return bridge.ServeFetch(s.e)
}
// registerAPI registers the Decentralized Web Node API routes.
func registerAPI(e *echo.Echo) {
g1 := e.Group("api")
g1.GET("/register/:subject/start", handlers.RegisterSubjectStart)
g1.POST("/register/:subject/check", handlers.RegisterSubjectCheck)
g1.POST("/register/:subject/finish", handlers.RegisterSubjectFinish)
g1.GET("/login/:subject/start", handlers.LoginSubjectStart)
g1.POST("/login/:subject/check", handlers.LoginSubjectCheck)
g1.POST("/login/:subject/finish", handlers.LoginSubjectFinish)
g1.GET("/:origin/grant/jwks", handlers.GetJWKS)
g1.GET("/:origin/grant/token", handlers.GetToken)
g1.POST("/:origin/grant/:subject", handlers.GrantAuthorization)
}

View File

@ -0,0 +1,171 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package accumulator implements the cryptographic accumulator as described in https://eprint.iacr.org/2020/777.pdf
// It also implements the zero knowledge proof of knowledge protocol
// described in section 7 of the paper.
// Note: the paper only describes for non-membership witness case, but we don't
// use non-membership witness. We only implement the membership witness case.
package accumulator
import (
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
type structMarshal struct {
Curve string `bare:"curve"`
Value []byte `bare:"value"`
}
type Element curves.Scalar
// Coefficient is a point
type Coefficient curves.Point
// Accumulator is a point
type Accumulator struct {
value curves.Point
}
// New creates a new accumulator.
func (acc *Accumulator) New(curve *curves.PairingCurve) (*Accumulator, error) {
// If we need to support non-membership witness, we need to implement Accumulator Initialization
// as described in section 6 of <https://eprint.iacr.org/2020/777.pdf>
// for now we don't need non-membership witness
// i.e., it computes V0 = prod(y + α) * P, y ∈ Y_V0, P is a generator of G1. Since we do not use non-membership witness
// we just set the initial accumulator a G1 generator.
acc.value = curve.Scalar.Point().Generator()
return acc, nil
}
// WithElements initializes a new accumulator prefilled with entries
// Each member is assumed to be hashed
// V = prod(y + α) * V0, for all y∈ Y_V
func (acc *Accumulator) WithElements(curve *curves.PairingCurve, key *SecretKey, m []Element) (*Accumulator, error) {
_, err := acc.New(curve)
if err != nil {
return nil, err
}
y, err := key.BatchAdditions(m)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// AddElements accumulates a set of elements into the accumulator.
func (acc *Accumulator) AddElements(key *SecretKey, m []Element) (*Accumulator, error) {
if acc.value == nil || key.value == nil {
return nil, fmt.Errorf("accumulator and secret key should not be nil")
}
y, err := key.BatchAdditions(m)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// Add accumulates a single element into the accumulator
// V' = (y + alpha) * V
func (acc *Accumulator) Add(key *SecretKey, e Element) (*Accumulator, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil || e == nil {
return nil, fmt.Errorf("accumulator, secret key and element should not be nil")
}
y := e.Add(key.value) // y + alpha
acc.value = acc.value.Mul(y)
return acc, nil
}
// Remove removes a single element from accumulator if it exists
// V' = 1/(y+alpha) * V
func (acc *Accumulator) Remove(key *SecretKey, e Element) (*Accumulator, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil || e == nil {
return nil, fmt.Errorf("accumulator, secret key and element should not be nil")
}
y := e.Add(key.value) // y + alpha
y, err := y.Invert() // 1/(y+alpha)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// Update performs a batch addition and deletion as described on page 7, section 3 in
// https://eprint.iacr.org/2020/777.pdf
func (acc *Accumulator) Update(key *SecretKey, additions []Element, deletions []Element) (*Accumulator, []Coefficient, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil {
return nil, nil, fmt.Errorf("accumulator and secret key should not be nil")
}
// Compute dA(-alpha) = prod(y + alpha), y in the set of A ⊆ ACC-Y_V
a, err := key.BatchAdditions(additions)
if err != nil {
return nil, nil, err
}
// Compute dD(-alpha) = 1/prod(y + alpha), y in the set of D ⊆ Y_V
d, err := key.BatchDeletions(deletions)
if err != nil {
return nil, nil, err
}
// dA(-alpha)/dD(-alpha)
div := a.Mul(d)
newAcc := acc.value.Mul(div)
// build an array of coefficients
elements, err := key.CreateCoefficients(additions, deletions)
if err != nil {
return nil, nil, err
}
coefficients := make([]Coefficient, len(elements))
for i := 0; i < len(elements); i++ {
coefficients[i] = acc.value.Mul(elements[i])
}
acc.value = newAcc
return acc, coefficients, nil
}
// MarshalBinary converts Accumulator to bytes
func (acc Accumulator) MarshalBinary() ([]byte, error) {
if acc.value == nil {
return nil, fmt.Errorf("accumulator cannot be nil")
}
tv := &structMarshal{
Value: acc.value.ToAffineCompressed(),
Curve: acc.value.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets Accumulator from bytes
func (acc *Accumulator) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Value)
if err != nil {
return err
}
acc.value = value
return nil
}

View File

@ -0,0 +1,188 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"encoding/hex"
"fmt"
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
func TestNewAccumulator100(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
accBz, err := acc.MarshalBinary()
require.NoError(t, err)
fmt.Println(accBz)
fmt.Println(len(accBz))
fmt.Println(hex.EncodeToString(accBz))
fmt.Println(len(hex.EncodeToString(accBz)))
require.Equal(t, 60, len(accBz), "Marshalled accumulator should be 60 bytes")
require.Equal(t, 120, len(hex.EncodeToString(accBz)), "Hex-encoded accumulator should be 120 characters")
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestNewAccumulator10K(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestNewAccumulator10M(t *testing.T) {
// Initiating 10M values takes time
if testing.Short() {
t.Skip("skipping test in short mode.")
}
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestWithElements(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, _ := new(SecretKey).New(curve, seed[:])
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
elements := []Element{element1, element2}
newAcc, err := new(Accumulator).WithElements(curve, key, elements)
require.NoError(t, err)
require.NotNil(t, newAcc)
require.NotEqual(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Identity().ToAffineCompressed())
require.NotEqual(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
_, _ = newAcc.Remove(key, element1)
_, _ = newAcc.Remove(key, element2)
require.Equal(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAdd(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc := &Accumulator{curve.PointG1.Generator()}
_, _ = acc.New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
element := curve.Scalar.Hash([]byte("value1"))
require.NoError(t, err)
require.NotNil(t, element)
_, _ = acc.Add(key, element)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestRemove(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element := curve.Scalar.Hash([]byte("value1"))
require.NoError(t, err)
require.NotNil(t, element)
// add element
_, _ = acc.Add(key, element)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
// remove element
acc, err = acc.Remove(key, element)
require.NoError(t, err)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAddElements(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc := &Accumulator{curve.PointG1.Generator()}
_, _ = acc.New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
elements := []Element{element1, element2, element3}
acc, err = acc.AddElements(key, elements)
require.NoError(t, err)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAccumulatorMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
point := curve.PointG1.Generator().Mul(curve.Scalar.New(2))
data, err := Accumulator{point}.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
// element cannot be empty
_, err = Accumulator{}.MarshalBinary()
require.Error(t, err)
e := &Accumulator{curve.PointG1.Generator()}
_ = e.UnmarshalBinary(data)
require.True(t, e.value.Equal(point))
}
func TestUpdate(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
elements := []Element{element1, element2, element3}
acc, _, err = acc.Update(key, elements, nil)
require.NoError(t, err)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
acc, _, err = acc.Update(key, nil, elements)
require.NoError(t, err)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}

244
pkg/crypto/accumulator/key.go Executable file
View File

@ -0,0 +1,244 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
// SecretKey is the secret alpha only held by the accumulator manager.
type SecretKey struct {
value curves.Scalar
}
// New creates a new secret key from the seed.
func (sk *SecretKey) New(curve *curves.PairingCurve, seed []byte) (*SecretKey, error) {
sk.value = curve.Scalar.Hash(seed)
return sk, nil
}
// GetPublicKey creates a public key from SecretKey sk
func (sk SecretKey) GetPublicKey(curve *curves.PairingCurve) (*PublicKey, error) {
if sk.value == nil || curve == nil {
return nil, fmt.Errorf("curve and sk value cannot be nil")
}
value := curve.Scalar.Point().(curves.PairingPoint).OtherGroup().Generator().Mul(sk.value)
return &PublicKey{value.(curves.PairingPoint)}, nil
}
// MarshalBinary converts SecretKey to bytes
func (sk SecretKey) MarshalBinary() ([]byte, error) {
if sk.value == nil {
return nil, fmt.Errorf("sk cannot be empty")
}
tv := &structMarshal{
Value: sk.value.Bytes(),
Curve: sk.value.Point().CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets SecretKey from bytes
func (sk *SecretKey) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewScalar().SetBytes(tv.Value)
if err != nil {
return err
}
sk.value = value
return nil
}
// BatchAdditions computes product(y + sk) for y in additions and output the product
func (sk SecretKey) BatchAdditions(additions []Element) (Element, error) {
if sk.value == nil {
return nil, fmt.Errorf("secret key cannot be empty")
}
mul := sk.value.One()
for i := 0; i < len(additions); i++ {
if additions[i] == nil {
return nil, fmt.Errorf("some element in additions is nil")
}
// y + alpha
temp := additions[i].Add(sk.value)
// prod(y + alpha)
mul = mul.Mul(temp)
}
return mul, nil
}
// BatchDeletions computes 1/product(y + sk) for y in deletions and output it
func (sk SecretKey) BatchDeletions(deletions []Element) (Element, error) {
v, err := sk.BatchAdditions(deletions)
if err != nil {
return nil, err
}
y, err := v.Invert()
if err != nil {
return nil, err
}
return y, nil
}
// CreateCoefficients creates the Batch Polynomial coefficients
// See page 7 of https://eprint.iacr.org/2020/777.pdf
func (sk SecretKey) CreateCoefficients(additions []Element, deletions []Element) ([]Element, error) {
if sk.value == nil {
return nil, fmt.Errorf("secret key should not be nil")
}
// vD(x) = ∑^{m}_{s=1}{ ∏ 1..s {yD_i + alpha}^-1 ∏ 1 ..s-1 {yD_j - x}
one := sk.value.One()
m1 := one.Neg() // m1 is -1
vD := make(polynomial, 0, len(deletions))
for s := 0; s < len(deletions); s++ {
// ∏ 1..s (yD_i + alpha)^-1
c, err := sk.BatchDeletions(deletions[0 : s+1])
if err != nil {
return nil, fmt.Errorf("error in sk batchDeletions")
}
poly := make(polynomial, 1, s+2)
poly[0] = one
// ∏ 1..(s-1) (yD_j - x)
for j := 0; j < s; j++ {
t := make(polynomial, 2)
// yD_j
t[0] = deletions[j]
// -x
t[1] = m1
// polynomial multiplication (yD_1-x) * (yD_2 - x) ...
poly, err = poly.Mul(t)
if err != nil {
return nil, err
}
}
poly, err = poly.MulScalar(c)
if err != nil {
return nil, err
}
vD, err = vD.Add(poly)
if err != nil {
return nil, err
}
}
// vD(x) * ∏ 1..n (yA_i + alpha)
bAdd, err := sk.BatchAdditions(additions)
if err != nil {
return nil, fmt.Errorf("error in sk batchAdditions")
}
vD, err = vD.MulScalar(bAdd)
if err != nil {
return nil, err
}
// vA(x) = ∑^n_{s=1}{ ∏ 1..s-1 {yA_i + alpha} ∏ s+1..n {yA_j - x} }
vA := make(polynomial, 0, len(additions))
for s := 0; s < len(additions); s++ {
// ∏ 1..s-1 {yA_i + alpha}
var c Element
if s == 0 {
c = one
} else {
c, err = sk.BatchAdditions(additions[0:s])
if err != nil {
return nil, err
}
}
poly := make(polynomial, 1, s+2)
poly[0] = one
// ∏ s+1..n {yA_j - x}
for j := s + 1; j < len(additions); j++ {
t := make(polynomial, 2)
t[0] = additions[j]
t[1] = m1
// polynomial multiplication (yA_1-x) * (yA_2 - x) ...
poly, err = poly.Mul(t)
if err != nil {
return nil, err
}
}
poly, err = poly.MulScalar(c)
if err != nil {
return nil, err
}
vA, err = vA.Add(poly)
if err != nil {
return nil, err
}
}
// vA - vD
vA, err = vA.Sub(vD)
if err != nil {
return nil, err
}
result := make([]Element, len(vA))
for i := 0; i < len(vA); i++ {
result[i] = vA[i]
}
return result, nil
}
// PublicKey is the public key of accumulator, it should be sk * generator of G2
type PublicKey struct {
value curves.PairingPoint
}
// MarshalBinary converts PublicKey to bytes
func (pk PublicKey) MarshalBinary() ([]byte, error) {
if pk.value == nil {
return nil, fmt.Errorf("public key cannot be nil")
}
tv := &structMarshal{
Value: pk.value.ToAffineCompressed(),
Curve: pk.value.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets PublicKey from bytes
func (pk *PublicKey) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetPairingCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewScalar().Point().FromAffineCompressed(tv.Value)
if err != nil {
return err
}
var ok bool
pk.value, ok = value.(curves.PairingPoint)
if !ok {
return errors.New("can't convert to PairingPoint")
}
return nil
}

View File

@ -0,0 +1,88 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
func TestSecretKeyMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
data, err := SecretKey{curve.Scalar.One()}.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
e := &SecretKey{curve.Scalar.New(2)}
err = e.UnmarshalBinary(data)
require.NoError(t, err)
require.Equal(t, e.value.Bytes(), curve.Scalar.One().Bytes())
// element cannot be empty
_, err = SecretKey{}.MarshalBinary()
require.Error(t, err)
}
func TestPublicKeyMarshal(t *testing.T) {
// Actually test both toBytes() and from()
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk := &SecretKey{curve.Scalar.New(3)}
pk, _ := sk.GetPublicKey(curve)
pkBytes, err := pk.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, pkBytes)
pk2 := &PublicKey{}
err = pk2.UnmarshalBinary(pkBytes)
require.NoError(t, err)
require.True(t, pk.value.Equal(pk2.value))
}
func TestBatch(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
sk, _ := new(SecretKey).New(curve, seed[:])
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
elements := []Element{element1, element2}
add, err := sk.BatchAdditions(elements)
require.NoError(t, err)
require.NotNil(t, add)
del, err := sk.BatchDeletions(elements)
require.NoError(t, err)
require.NotNil(t, del)
result := add.Mul(del)
require.Equal(t, result, curve.Scalar.One())
g1 := curve.PointG1.Generator()
acc := g1.Mul(add)
require.NotEqual(t, acc, g1)
acc = acc.Mul(del)
require.Equal(t, acc.ToAffineCompressed(), g1.ToAffineCompressed())
acc2 := g1.Mul(result)
require.True(t, acc2.Equal(g1))
}
func TestCoefficient(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
element4 := curve.Scalar.Hash([]byte("value4"))
element5 := curve.Scalar.Hash([]byte("value5"))
elements := []Element{element1, element2, element3, element4, element5}
coefficients, err := sk.CreateCoefficients(elements[0:2], elements[2:5])
require.NoError(t, err)
require.Equal(t, len(coefficients), 3)
}

204
pkg/crypto/accumulator/lib.go Executable file
View File

@ -0,0 +1,204 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"fmt"
"math"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
// dad constructs two polynomials - dA(x) and dD(x)
// dA(y) = prod(y_A,t - y), t = 1...n
// dD(y) = prod(y_D,t - y), t = 1...n
func dad(values []Element, y Element) (Element, error) {
if values == nil || y == nil {
return nil, fmt.Errorf("curve, values or y should not be nil")
}
for _, value := range values {
if value == nil {
return nil, fmt.Errorf("some element is nil")
}
}
result := y.One()
if len(values) == 1 {
a := values[0]
result = a.Sub(y)
} else {
for i := 0; i < len(values); i++ {
temp := values[i].Sub(y)
result = result.Mul(temp)
}
}
return result, nil
}
type polynomialPoint []curves.Point
// evaluate evaluates a PolynomialG1 on input x.
func (p polynomialPoint) evaluate(x curves.Scalar) (curves.Point, error) {
if p == nil {
return nil, fmt.Errorf("p cannot be empty")
}
for i := 0; i < len(p); i++ {
if p[i] == nil {
return nil, fmt.Errorf("some coefficient in p is nil")
}
}
pp := x
res := p[0]
for i := 1; i < len(p); i++ {
r := p[i].Mul(pp)
res = res.Add(r)
pp = pp.Mul(x)
}
return res, nil
}
// Add adds two PolynomialG1
func (p polynomialPoint) Add(rhs polynomialPoint) (polynomialPoint, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make(polynomialPoint, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Add(c.Identity())
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Add(c.Identity())
} else {
result[i] = result[i].Add(c)
}
}
return result, nil
}
// Mul for PolynomialG1 computes rhs * p, p is a polynomial, rhs is a value
func (p polynomialPoint) Mul(rhs curves.Scalar) (polynomialPoint, error) {
result := make(polynomialPoint, len(p))
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Mul(rhs)
}
return result, nil
}
type polynomial []curves.Scalar
// Add adds two polynomials
func (p polynomial) Add(rhs polynomial) (polynomial, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make([]curves.Scalar, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Clone()
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Clone()
} else {
result[i] = result[i].Add(c)
}
}
return result, nil
}
// Sub computes p-rhs and returns
func (p polynomial) Sub(rhs polynomial) (polynomial, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make([]curves.Scalar, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Clone()
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Neg()
} else {
result[i] = result[i].Sub(c)
}
}
return result, nil
}
// Mul multiplies two polynomials - p * rhs
func (p polynomial) Mul(rhs polynomial) (polynomial, error) {
// Check for each coefficient that should not be nil
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("coefficient in p at %d is nil", i)
}
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("coefficient in rhs at %d is nil", i)
}
}
m := len(p)
n := len(rhs)
// Initialize the product polynomial
prod := make(polynomial, m+n-1)
for i := 0; i < len(prod); i++ {
prod[i] = p[0].Zero()
}
// Multiply two polynomials term by term
for i, cp := range p {
for j, cr := range rhs {
temp := cp.Mul(cr)
prod[i+j] = prod[i+j].Add(temp)
}
}
return prod, nil
}
// MulScalar computes p * rhs, where rhs is a scalar value
func (p polynomial) MulScalar(rhs curves.Scalar) (polynomial, error) {
result := make(polynomial, len(p))
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("coefficient at %d is nil", i)
}
result[i] = c.Mul(rhs)
}
return result, nil
}

View File

@ -0,0 +1,404 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
func TestEvaluatePolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
output1, err := poly.evaluate(curve.Scalar.New(1))
require.NoError(t, err)
require.NotNil(t, output1)
result1 := curve.PointG1.Generator().Mul(curve.Scalar.New(6))
require.Equal(t, output1.ToAffineCompressed(), result1.ToAffineCompressed())
output2, err := poly.evaluate(curve.Scalar.New(2))
require.NoError(t, err)
require.NotNil(t, output2)
result2 := curve.PointG1.Generator().Mul(curve.Scalar.New(11))
require.Equal(t, output2.ToAffineCompressed(), result2.ToAffineCompressed())
}
func TestEvaluatePolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
_, err := poly.evaluate(curve.Scalar.New(1))
require.Error(t, err)
}
func TestAddAssignPolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
output, err := poly1.Add(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i].ToAffineCompressed(), result[i].ToAffineCompressed())
}
// Test polynomials with unequal length
poly3 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
}
output2, err := poly1.Add(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].ToAffineCompressed(), result2[i].ToAffineCompressed())
}
// Test polynomial with Capacity
poly4 := make(polynomialPoint, 0, 3)
poly5, err := poly4.Add(poly1)
require.NoError(t, err)
require.Equal(t, len(poly5), len(poly1))
for i := 0; i < len(poly5); i++ {
require.Equal(t, poly5[i].ToAffineCompressed(), poly1[i].ToAffineCompressed())
}
}
func TestAddAssignPolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
output, err := poly1.Add(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulAssignPolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
rhs := curve.Scalar.New(3)
output, err := poly.Mul(rhs)
require.NoError(t, err)
require.NotNil(t, output)
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(9)),
curve.PointG1.Generator().Mul(curve.Scalar.New(6)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
for i := 0; i < len(poly2); i++ {
require.Equal(t, output[i].ToAffineCompressed(), poly2[i].ToAffineCompressed())
}
}
func TestMulAssignPolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
rhs := curve.Scalar.New(3)
output, err := poly.Mul(rhs)
require.Error(t, err)
require.Nil(t, output)
}
func TestPushPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
scalar := curve.Scalar.New(4)
result := append(poly, scalar)
require.Equal(t, result[3], scalar)
// Push one more
scalar2 := curve.Scalar.New(5)
result2 := append(result, scalar2)
require.Equal(t, result2[4], scalar2)
// Push to a new polynomial
newPoly := polynomial{}
newPoly = append(newPoly, scalar)
require.Equal(t, newPoly[0], scalar)
newPoly = append(newPoly, scalar2)
require.Equal(t, newPoly[1], scalar2)
}
func TestAddAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Add(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(4),
curve.Scalar.New(4),
curve.Scalar.New(4),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i], result[i])
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
}
output2, err := poly1.Add(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(4),
curve.Scalar.New(4),
curve.Scalar.New(1),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i], result2[i])
}
}
func TestAddAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Add(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestSubAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Sub(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(2),
curve.Scalar.New(0),
curve.Scalar.New(-2),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i].Bytes(), result[i].Bytes())
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
curve.Scalar.New(4),
}
output2, err := poly1.Sub(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(2),
curve.Scalar.New(0),
curve.Scalar.New(-2),
curve.Scalar.New(-4),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].Bytes(), result2[i].Bytes())
}
}
func TestSubAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Sub(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Mul(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(3),
curve.Scalar.New(8),
curve.Scalar.New(14),
curve.Scalar.New(8),
curve.Scalar.New(3),
}
for i := 0; i < len(result); i++ {
require.Equal(t, output[i].Bytes(), result[i].Bytes())
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
}
output2, err := poly1.Mul(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(3),
curve.Scalar.New(8),
curve.Scalar.New(5),
curve.Scalar.New(2),
}
require.Equal(t, len(output2), 4)
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].Bytes(), result2[i].Bytes())
}
}
func TestMulAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Mul(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulValueAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
rhs := curve.Scalar.New(3)
output, err := poly.MulScalar(rhs)
require.NoError(t, err)
require.NotNil(t, output)
coefficients2 := []curves.Scalar{
curve.Scalar.New(9),
curve.Scalar.New(6),
curve.Scalar.New(3),
}
for i := 0; i < len(coefficients2); i++ {
require.Equal(t, output[i].Bytes(), coefficients2[i].Bytes())
}
}
func TestMulValueAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
rhs := curve.Scalar.New(3)
output, err := poly.MulScalar(rhs)
require.Error(t, err)
require.Nil(t, output)
}

518
pkg/crypto/accumulator/proof.go Executable file
View File

@ -0,0 +1,518 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"bytes"
crand "crypto/rand"
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
type proofParamsMarshal struct {
X []byte `bare:"x"`
Y []byte `bare:"y"`
Z []byte `bare:"z"`
Curve string `bare:"curve"`
}
// ProofParams contains four distinct public generators of G1 - X, Y, Z
type ProofParams struct {
x, y, z curves.Point
}
// New samples X, Y, Z, K
func (p *ProofParams) New(curve *curves.PairingCurve, pk *PublicKey, entropy []byte) (*ProofParams, error) {
pkBytes, err := pk.MarshalBinary()
if err != nil {
return nil, err
}
prefix := bytes.Repeat([]byte{0xFF}, 32)
data := append(prefix, entropy...)
data = append(data, pkBytes...)
p.z = curve.Scalar.Point().Hash(data)
data[0] = 0xFE
p.y = curve.Scalar.Point().Hash(data)
data[0] = 0xFD
p.x = curve.Scalar.Point().Hash(data)
return p, nil
}
// MarshalBinary converts ProofParams to bytes
func (p *ProofParams) MarshalBinary() ([]byte, error) {
if p.x == nil || p.y == nil || p.z == nil {
return nil, fmt.Errorf("some value x, y, or z is nil")
}
tv := &proofParamsMarshal{
X: p.x.ToAffineCompressed(),
Y: p.y.ToAffineCompressed(),
Z: p.z.ToAffineCompressed(),
Curve: p.x.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes to ProofParams
func (p *ProofParams) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(proofParamsMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
x, err := curve.NewIdentityPoint().FromAffineCompressed(tv.X)
if err != nil {
return err
}
y, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Y)
if err != nil {
return err
}
z, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Z)
if err != nil {
return err
}
p.x = x
p.y = y
p.z = z
return nil
}
// MembershipProofCommitting contains value computed in Proof of knowledge and
// Blinding phases as described in section 7 of https://eprint.iacr.org/2020/777.pdf
type MembershipProofCommitting struct {
eC curves.Point
tSigma curves.Point
tRho curves.Point
deltaSigma curves.Scalar
deltaRho curves.Scalar
blindingFactor curves.Scalar
rSigma curves.Scalar
rRho curves.Scalar
rDeltaSigma curves.Scalar
rDeltaRho curves.Scalar
sigma curves.Scalar
rho curves.Scalar
capRSigma curves.Point
capRRho curves.Point
capRDeltaSigma curves.Point
capRDeltaRho curves.Point
capRE curves.Scalar
accumulator curves.Point
witnessValue curves.Scalar
xG1 curves.Point
yG1 curves.Point
zG1 curves.Point
}
// New initiates values of MembershipProofCommitting
func (mpc *MembershipProofCommitting) New(
witness *MembershipWitness,
acc *Accumulator,
pp *ProofParams,
pk *PublicKey,
) (*MembershipProofCommitting, error) {
// Randomly select σ, ρ
sigma := witness.y.Random(crand.Reader)
rho := witness.y.Random(crand.Reader)
// E_C = C + (σ + ρ)Z
t := sigma
t = t.Add(rho)
eC := pp.z
eC = eC.Mul(t)
eC = eC.Add(witness.c)
// T_σ = σX
tSigma := pp.x
tSigma = tSigma.Mul(sigma)
// T_ρ = ρY
tRho := pp.y
tRho = tRho.Mul(rho)
// δ_σ = yσ
deltaSigma := witness.y
deltaSigma = deltaSigma.Mul(sigma)
// δ_ρ = yρ
deltaRho := witness.y
deltaRho = deltaRho.Mul(rho)
// Randomly pick r_σ,r_ρ,r_δσ,r_δρ
rY := witness.y.Random(crand.Reader)
rSigma := witness.y.Random(crand.Reader)
rRho := witness.y.Random(crand.Reader)
rDeltaSigma := witness.y.Random(crand.Reader)
rDeltaRho := witness.y.Random(crand.Reader)
// R_σ = r_σ X
capRSigma := pp.x
capRSigma = capRSigma.Mul(rSigma)
// R_ρ = ρY
capRRho := pp.y
capRRho = capRRho.Mul(rRho)
// R_δσ = r_y T_σ - r_δσ X
negX := pp.x
negX = negX.Neg()
capRDeltaSigma := tSigma.Mul(rY)
capRDeltaSigma = capRDeltaSigma.Add(negX.Mul(rDeltaSigma))
// R_δρ = r_y T_ρ - r_δρ Y
negY := pp.y
negY = negY.Neg()
capRDeltaRho := tRho.Mul(rY)
capRDeltaRho = capRDeltaRho.Add(negY.Mul(rDeltaRho))
// P~
g2 := pk.value.Generator()
// -r_δσ - r_δρ
exp := rDeltaSigma
exp = exp.Add(rDeltaRho)
exp = exp.Neg()
// -r_σ - r_ρ
exp2 := rSigma
exp2 = exp2.Add(rRho)
exp2 = exp2.Neg()
// rY * eC
rYeC := eC.Mul(rY)
// (-r_δσ - r_δρ)*Z
expZ := pp.z.Mul(exp)
// (-r_σ - r_ρ)*Z
exp2Z := pp.z.Mul(exp2)
// Prepare
rYeCPrep, ok := rYeC.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
g2Prep, ok := g2.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
expZPrep, ok := expZ.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
exp2ZPrep, ok := exp2Z.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
pkPrep := pk.value
// Pairing
capRE := g2Prep.MultiPairing(rYeCPrep, g2Prep, expZPrep, g2Prep, exp2ZPrep, pkPrep)
return &MembershipProofCommitting{
eC,
tSigma,
tRho,
deltaSigma,
deltaRho,
rY,
rSigma,
rRho,
rDeltaSigma,
rDeltaRho,
sigma,
rho,
capRSigma,
capRRho,
capRDeltaSigma,
capRDeltaRho,
capRE,
acc.value,
witness.y,
pp.x,
pp.y,
pp.z,
}, nil
}
// GetChallenge returns bytes that need to be hashed for generating challenge.
// V || Ec || T_sigma || T_rho || R_E || R_sigma || R_rho || R_delta_sigma || R_delta_rho
func (mpc MembershipProofCommitting) GetChallengeBytes() []byte {
res := mpc.accumulator.ToAffineCompressed()
res = append(res, mpc.eC.ToAffineCompressed()...)
res = append(res, mpc.tSigma.ToAffineCompressed()...)
res = append(res, mpc.tRho.ToAffineCompressed()...)
res = append(res, mpc.capRE.Bytes()...)
res = append(res, mpc.capRSigma.ToAffineCompressed()...)
res = append(res, mpc.capRRho.ToAffineCompressed()...)
res = append(res, mpc.capRDeltaSigma.ToAffineCompressed()...)
res = append(res, mpc.capRDeltaRho.ToAffineCompressed()...)
return res
}
// GenProof computes the s values for Fiat-Shamir and return the actual
// proof to be sent to the verifier given the challenge c.
func (mpc *MembershipProofCommitting) GenProof(c curves.Scalar) *MembershipProof {
// s_y = r_y + c*y
sY := schnorr(mpc.blindingFactor, mpc.witnessValue, c)
// s_σ = r_σ + c*σ
sSigma := schnorr(mpc.rSigma, mpc.sigma, c)
// s_ρ = r_ρ + c*ρ
sRho := schnorr(mpc.rRho, mpc.rho, c)
// s_δσ = rδσ + c*δ_σ
sDeltaSigma := schnorr(mpc.rDeltaSigma, mpc.deltaSigma, c)
// s_δρ = rδρ + c*δ_ρ
sDeltaRho := schnorr(mpc.rDeltaRho, mpc.deltaRho, c)
return &MembershipProof{
mpc.eC,
mpc.tSigma,
mpc.tRho,
sSigma,
sRho,
sDeltaSigma,
sDeltaRho,
sY,
}
}
func schnorr(r, v, challenge curves.Scalar) curves.Scalar {
res := v
res = res.Mul(challenge)
res = res.Add(r)
return res
}
type membershipProofMarshal struct {
EC []byte `bare:"e_c"`
TSigma []byte `bare:"t_sigma"`
TRho []byte `bare:"t_rho"`
SSigma []byte `bare:"s_sigma"`
SRho []byte `bare:"s_rho"`
SDeltaSigma []byte `bare:"s_delta_sigma"`
SDeltaRho []byte `bare:"s_delta_rho"`
SY []byte `bare:"s_y"`
Curve string `bare:"curve"`
}
// MembershipProof contains values in the proof to be verified
type MembershipProof struct {
eC curves.Point
tSigma curves.Point
tRho curves.Point
sSigma curves.Scalar
sRho curves.Scalar
sDeltaSigma curves.Scalar
sDeltaRho curves.Scalar
sY curves.Scalar
}
// Finalize computes values in the proof to be verified.
func (mp *MembershipProof) Finalize(acc *Accumulator, pp *ProofParams, pk *PublicKey, challenge curves.Scalar) (*MembershipProofFinal, error) {
// R_σ = s_δ X + c T_σ
negTSigma := mp.tSigma
negTSigma = negTSigma.Neg()
capRSigma := pp.x.Mul(mp.sSigma)
capRSigma = capRSigma.Add(negTSigma.Mul(challenge))
// R_ρ = s_ρ Y + c T_ρ
negTRho := mp.tRho
negTRho = negTRho.Neg()
capRRho := pp.y.Mul(mp.sRho)
capRRho = capRRho.Add(negTRho.Mul(challenge))
// R_δσ = s_y T_σ - s_δσ X
negX := pp.x
negX = negX.Neg()
capRDeltaSigma := mp.tSigma.Mul(mp.sY)
capRDeltaSigma = capRDeltaSigma.Add(negX.Mul(mp.sDeltaSigma))
// R_δρ = s_y T_ρ - s_δρ Y
negY := pp.y
negY = negY.Neg()
capRDeltaRho := mp.tRho.Mul(mp.sY)
capRDeltaRho = capRDeltaRho.Add(negY.Mul(mp.sDeltaRho))
// tildeP
g2 := pk.value.Generator()
// Compute capRE, the pairing
// E_c * s_y
eCsY := mp.eC.Mul(mp.sY)
// (-s_delta_sigma - s_delta_rho) * Z
exp := mp.sDeltaSigma
exp = exp.Add(mp.sDeltaRho)
exp = exp.Neg()
expZ := pp.z.Mul(exp)
// (-c) * V
exp = challenge.Neg()
expV := acc.value.Mul(exp)
// E_c * s_y + (-s_delta_sigma - s_delta_rho) * Z + (-c) * V
lhs := eCsY.Add(expZ).Add(expV)
// (-s_sigma - s_rho) * Z
exp = mp.sSigma
exp = exp.Add(mp.sRho)
exp = exp.Neg()
expZ2 := pp.z.Mul(exp)
// E_c * c
cEc := mp.eC.Mul(challenge)
// (-s_sigma - s_rho) * Z + E_c * c
rhs := cEc.Add(expZ2)
// Prepare
lhsPrep, ok := lhs.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
g2Prep, ok := g2.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
rhsPrep, ok := rhs.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
pkPrep := pk.value
// capRE
capRE := g2Prep.MultiPairing(lhsPrep, g2Prep, rhsPrep, pkPrep)
return &MembershipProofFinal{
acc.value,
mp.eC,
mp.tSigma,
mp.tRho,
capRE,
capRSigma,
capRRho,
capRDeltaSigma,
capRDeltaRho,
}, nil
}
// MarshalBinary converts MembershipProof to bytes
func (mp MembershipProof) MarshalBinary() ([]byte, error) {
tv := &membershipProofMarshal{
EC: mp.eC.ToAffineCompressed(),
TSigma: mp.tSigma.ToAffineCompressed(),
TRho: mp.tRho.ToAffineCompressed(),
SSigma: mp.sSigma.Bytes(),
SRho: mp.sRho.Bytes(),
SDeltaSigma: mp.sDeltaSigma.Bytes(),
SDeltaRho: mp.sDeltaRho.Bytes(),
SY: mp.sY.Bytes(),
Curve: mp.eC.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes to MembershipProof
func (mp *MembershipProof) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(membershipProofMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
eC, err := curve.NewIdentityPoint().FromAffineCompressed(tv.EC)
if err != nil {
return err
}
tSigma, err := curve.NewIdentityPoint().FromAffineCompressed(tv.TSigma)
if err != nil {
return err
}
tRho, err := curve.NewIdentityPoint().FromAffineCompressed(tv.TRho)
if err != nil {
return err
}
sSigma, err := curve.NewScalar().SetBytes(tv.SSigma)
if err != nil {
return err
}
sRho, err := curve.NewScalar().SetBytes(tv.SRho)
if err != nil {
return err
}
sDeltaSigma, err := curve.NewScalar().SetBytes(tv.SDeltaSigma)
if err != nil {
return err
}
sDeltaRho, err := curve.NewScalar().SetBytes(tv.SDeltaRho)
if err != nil {
return err
}
sY, err := curve.NewScalar().SetBytes(tv.SY)
if err != nil {
return err
}
mp.eC = eC
mp.tSigma = tSigma
mp.tRho = tRho
mp.sSigma = sSigma
mp.sRho = sRho
mp.sDeltaSigma = sDeltaSigma
mp.sDeltaRho = sDeltaRho
mp.sY = sY
return nil
}
// MembershipProofFinal contains values that are input to Fiat-Shamir Heuristic
type MembershipProofFinal struct {
accumulator curves.Point
eC curves.Point
tSigma curves.Point
tRho curves.Point
capRE curves.Scalar
capRSigma curves.Point
capRRho curves.Point
capRDeltaSigma curves.Point
capRDeltaRho curves.Point
}
// GetChallenge computes Fiat-Shamir Heuristic taking input values of MembershipProofFinal
func (m MembershipProofFinal) GetChallenge(curve *curves.PairingCurve) curves.Scalar {
res := m.accumulator.ToAffineCompressed()
res = append(res, m.eC.ToAffineCompressed()...)
res = append(res, m.tSigma.ToAffineCompressed()...)
res = append(res, m.tRho.ToAffineCompressed()...)
res = append(res, m.capRE.Bytes()...)
res = append(res, m.capRSigma.ToAffineCompressed()...)
res = append(res, m.capRRho.ToAffineCompressed()...)
res = append(res, m.capRDeltaSigma.ToAffineCompressed()...)
res = append(res, m.capRDeltaRho.ToAffineCompressed()...)
challenge := curve.Scalar.Hash(res)
return challenge
}

View File

@ -0,0 +1,182 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
func TestProofParamsMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
params, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, params.x)
require.NotNil(t, params.y)
require.NotNil(t, params.z)
bytes, err := params.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, bytes)
params2 := &ProofParams{
curve.PointG1.Generator(),
curve.PointG1.Generator(),
curve.PointG1.Generator(),
}
err = params2.UnmarshalBinary(bytes)
require.NoError(t, err)
require.True(t, params.x.Equal(params2.x))
require.True(t, params.y.Equal(params2.y))
require.True(t, params.z.Equal(params2.z))
}
func TestMembershipProof(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// Initiate a new accumulator
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
// Initiate a new membership witness for value elements[3]
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
// Create proof parameters, which contains randomly sampled G1 points X, Y, Z, K
params, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, params.x)
require.NotNil(t, params.y)
require.NotNil(t, params.z)
mpc, err := new(MembershipProofCommitting).New(wit, acc, params, pk)
require.NoError(t, err)
testMPC(t, mpc)
challenge := curve.Scalar.Hash(mpc.GetChallengeBytes())
require.NotNil(t, challenge)
proof := mpc.GenProof(challenge)
require.NotNil(t, proof)
testProof(t, proof)
finalProof, err := proof.Finalize(acc, params, pk, challenge)
require.NoError(t, err)
require.NotNil(t, finalProof)
testFinalProof(t, finalProof)
challenge2 := finalProof.GetChallenge(curve)
require.Equal(t, challenge, challenge2)
// Check we can still have a valid proof even if accumulator and witness are updated
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
additions := data[0:2]
deletions := data[2:5]
_, coefficients, err := acc.Update(sk, additions, deletions)
require.NoError(t, err)
require.NotNil(t, coefficients)
_, err = wit.BatchUpdate(additions, deletions, coefficients)
require.NoError(t, err)
newParams, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, newParams.x)
require.NotNil(t, newParams.y)
require.NotNil(t, newParams.z)
newMPC, err := new(MembershipProofCommitting).New(wit, acc, newParams, pk)
require.NoError(t, err)
testMPC(t, newMPC)
challenge3 := curve.Scalar.Hash(newMPC.GetChallengeBytes())
require.NotNil(t, challenge3)
newProof := newMPC.GenProof(challenge3)
require.NotNil(t, newProof)
testProof(t, newProof)
newFinalProof, err := newProof.Finalize(acc, newParams, pk, challenge3)
require.NoError(t, err)
require.NotNil(t, newFinalProof)
testFinalProof(t, newFinalProof)
challenge4 := newFinalProof.GetChallenge(curve)
require.Equal(t, challenge3, challenge4)
}
func testMPC(t *testing.T, mpc *MembershipProofCommitting) {
require.NotNil(t, mpc.eC)
require.NotNil(t, mpc.tSigma)
require.NotNil(t, mpc.tRho)
require.NotNil(t, mpc.deltaSigma)
require.NotNil(t, mpc.deltaRho)
require.NotNil(t, mpc.blindingFactor)
require.NotNil(t, mpc.rSigma)
require.NotNil(t, mpc.rRho)
require.NotNil(t, mpc.rDeltaSigma)
require.NotNil(t, mpc.rDeltaRho)
require.NotNil(t, mpc.sigma)
require.NotNil(t, mpc.rho)
require.NotNil(t, mpc.capRSigma)
require.NotNil(t, mpc.capRRho)
require.NotNil(t, mpc.capRDeltaSigma)
require.NotNil(t, mpc.capRDeltaRho)
require.NotNil(t, mpc.capRE)
require.NotNil(t, mpc.accumulator)
require.NotNil(t, mpc.witnessValue)
require.NotNil(t, mpc.xG1)
require.NotNil(t, mpc.yG1)
require.NotNil(t, mpc.zG1)
}
func testProof(t *testing.T, proof *MembershipProof) {
require.NotNil(t, proof.eC)
require.NotNil(t, proof.tSigma)
require.NotNil(t, proof.tRho)
require.NotNil(t, proof.sSigma)
require.NotNil(t, proof.sRho)
require.NotNil(t, proof.sDeltaSigma)
require.NotNil(t, proof.sDeltaRho)
require.NotNil(t, proof.sY)
}
func testFinalProof(t *testing.T, finalProof *MembershipProofFinal) {
require.NotNil(t, finalProof.accumulator)
require.NotNil(t, finalProof.eC)
require.NotNil(t, finalProof.tSigma)
require.NotNil(t, finalProof.tRho)
require.NotNil(t, finalProof.capRE)
require.NotNil(t, finalProof.capRSigma)
require.NotNil(t, finalProof.capRRho)
require.NotNil(t, finalProof.capRDeltaSigma)
require.NotNil(t, finalProof.capRDeltaRho)
}

375
pkg/crypto/accumulator/witness.go Executable file
View File

@ -0,0 +1,375 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
// MembershipWitness contains the witness c and the value y respect to the accumulator state.
type MembershipWitness struct {
c curves.Point
y curves.Scalar
}
// New creates a new membership witness
func (mw *MembershipWitness) New(y Element, acc *Accumulator, sk *SecretKey) (*MembershipWitness, error) {
if acc.value == nil || acc.value.IsIdentity() {
return nil, fmt.Errorf("value of accumulator should not be nil")
}
if sk.value == nil || sk.value.IsZero() {
return nil, fmt.Errorf("secret key should not be nil")
}
if y == nil || y.IsZero() {
return nil, fmt.Errorf("y should not be nil")
}
newAcc := &Accumulator{acc.value}
_, err := newAcc.Remove(sk, y)
if err != nil {
return nil, err
}
mw.c = newAcc.value
mw.y = y.Add(y.Zero())
return mw, nil
}
// Verify the MembershipWitness mw is a valid witness as per section 4 in
// <https://eprint.iacr.org/2020/777>
func (mw MembershipWitness) Verify(pk *PublicKey, acc *Accumulator) error {
if mw.c == nil || mw.y == nil || mw.c.IsIdentity() || mw.y.IsZero() {
return fmt.Errorf("c and y should not be nil")
}
if pk.value == nil || pk.value.IsIdentity() {
return fmt.Errorf("invalid public key")
}
if acc.value == nil || acc.value.IsIdentity() {
return fmt.Errorf("accumulator value should not be nil")
}
// Set -tildeP
g2, ok := pk.value.Generator().(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// y*tildeP + tildeQ, tildeP is a G2 generator.
p, ok := g2.Mul(mw.y).Add(pk.value).(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// Prepare
witness, ok := mw.c.(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
v, ok := acc.value.Neg().(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// Check e(witness, y*tildeP + tildeQ) * e(-acc, tildeP) == Identity
result := p.MultiPairing(witness, p, v, g2)
if !result.IsOne() {
return fmt.Errorf("invalid result")
}
return nil
}
// ApplyDelta returns C' = dA(y)/dD(y)*C + 1/dD(y) * <Gamma_y, Omega>
// according to the witness update protocol described in section 4 of
// https://eprint.iacr.org/2020/777.pdf
func (mw *MembershipWitness) ApplyDelta(delta *Delta) (*MembershipWitness, error) {
if mw.c == nil || mw.y == nil || delta == nil {
return nil, fmt.Errorf("y, c or delta should not be nil")
}
// C' = dA(y)/dD(y)*C + 1/dD(y) * <Gamma_y, Omega>
mw.c = mw.c.Mul(delta.d).Add(delta.p)
return mw, nil
}
// BatchUpdate performs batch update as described in section 4
func (mw *MembershipWitness) BatchUpdate(additions []Element, deletions []Element, coefficients []Coefficient) (*MembershipWitness, error) {
delta, err := evaluateDelta(mw.y, additions, deletions, coefficients)
if err != nil {
return nil, err
}
mw, err = mw.ApplyDelta(delta)
if err != nil {
return nil, fmt.Errorf("applyDelta fails")
}
return mw, nil
}
// MultiBatchUpdate performs multi-batch update using epoch as described in section 4.2
func (mw *MembershipWitness) MultiBatchUpdate(A [][]Element, D [][]Element, C [][]Coefficient) (*MembershipWitness, error) {
delta, err := evaluateDeltas(mw.y, A, D, C)
if err != nil {
return nil, fmt.Errorf("evaluateDeltas fails")
}
mw, err = mw.ApplyDelta(delta)
if err != nil {
return nil, err
}
return mw, nil
}
// MarshalBinary converts a membership witness to bytes
func (mw MembershipWitness) MarshalBinary() ([]byte, error) {
if mw.c == nil || mw.y == nil {
return nil, fmt.Errorf("c and y value should not be nil")
}
result := append(mw.c.ToAffineCompressed(), mw.y.Bytes()...)
tv := &structMarshal{
Value: result,
Curve: mw.c.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes into MembershipWitness
func (mw *MembershipWitness) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("input data should not be nil")
}
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
ptLength := len(curve.Point.ToAffineCompressed())
scLength := len(curve.Scalar.Bytes())
expectedLength := ptLength + scLength
if len(tv.Value) != expectedLength {
return fmt.Errorf("invalid byte sequence")
}
cValue, err := curve.Point.FromAffineCompressed(tv.Value[:ptLength])
if err != nil {
return err
}
yValue, err := curve.Scalar.SetBytes(tv.Value[ptLength:])
if err != nil {
return err
}
mw.c = cValue
mw.y = yValue
return nil
}
// Delta contains values d and p, where d should be the division dA(y)/dD(y) on some value y
// p should be equal to 1/dD * <Gamma_y, Omega>
type Delta struct {
d curves.Scalar
p curves.Point
}
// MarshalBinary converts Delta into bytes
func (d *Delta) MarshalBinary() ([]byte, error) {
if d.d == nil || d.p == nil {
return nil, fmt.Errorf("d and p should not be nil")
}
var result []byte
result = append(result, d.p.ToAffineCompressed()...)
result = append(result, d.d.Bytes()...)
tv := &structMarshal{
Value: result,
Curve: d.p.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts data into Delta
func (d *Delta) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
ptLength := len(curve.Point.ToAffineCompressed())
scLength := len(curve.Scalar.Bytes())
expectedLength := ptLength + scLength
if len(tv.Value) != expectedLength {
return fmt.Errorf("invalid byte sequence")
}
pValue, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Value[:ptLength])
if err != nil {
return err
}
dValue, err := curve.NewScalar().SetBytes(tv.Value[ptLength:])
if err != nil {
return err
}
d.d = dValue
d.p = pValue
return nil
}
// evaluateDeltas compute values used for membership witness batch update with epoch
// as described in section 4.2, page 11 of https://eprint.iacr.org/2020/777.pdf
func evaluateDeltas(y Element, A [][]Element, D [][]Element, C [][]Coefficient) (*Delta, error) {
if len(A) != len(D) || len(A) != len(C) {
return nil, fmt.Errorf("a, d, c should have same length")
}
one := y.One()
size := len(A)
// dA(x) = ∏ 1..n (yA_i - x)
aa := make([]curves.Scalar, 0)
// dD(x) = ∏ 1..m (yD_i - x)
dd := make([]curves.Scalar, 0)
a := one
d := one
// dA_{a->b}(y) = ∏ a..b dAs(y)
// dD_{a->b}(y) = ∏ a..b dDs(y)
for i := 0; i < size; i++ {
adds := A[i]
dels := D[i]
// ta = dAs(y)
ta, err := dad(adds, y)
if err != nil {
return nil, fmt.Errorf("dad on additions fails")
}
// td = dDs(y)
td, err := dad(dels, y)
if err != nil {
return nil, fmt.Errorf("dad on deletions fails")
}
// ∏ a..b dAs(y)
a = a.Mul(ta)
// ∏ a..b dDs(y)
d = d.Mul(td)
aa = append(aa, ta)
dd = append(dd, td)
}
// If this fails, then this value was removed.
d, err := d.Invert()
if err != nil {
return nil, fmt.Errorf("no inverse exists")
}
// <Gamma_y, Omega>
p := make(polynomialPoint, 0, size)
// Ωi->j+1 = ∑ 1..t (dAt * dDt-1) · Ω
for i := 0; i < size; i++ {
// t = i+1
// ∏^(t-1)_(h=i+1)
ddh := one
// dDi→t1 (y)
for h := 0; h < i; h++ {
ddh = ddh.Mul(dd[h])
}
// ∏^(j+1)_(k=t+1)
dak := one
// dAt->j(y)
for k := i + 1; k < size; k++ {
dak = dak.Mul(aa[k])
}
// dDi->t-1(y) * dAt->j(y)
dak = dak.Mul(ddh)
pp := make(polynomialPoint, len(C[i]))
for j := 0; j < len(pp); j++ {
pp[j] = C[i][j]
}
// dDi->t-1(y) * dAt->j(y) · Ω
pp, err := pp.Mul(dak)
if err != nil {
return nil, fmt.Errorf("pp.Mul fails")
}
p, err = p.Add(pp)
if err != nil {
return nil, fmt.Errorf("pp.Add fails")
}
}
// dAi->j(y)/dDi->j(y)
a = a.Mul(d)
// Ωi->j(y)
v, err := p.evaluate(y)
if err != nil {
return nil, fmt.Errorf("p.evaluate fails")
}
// (1/dDi->j(y)) * Ωi->j(y)
v = v.Mul(d)
// return
return &Delta{d: a, p: v}, nil
}
// evaluateDelta computes values used for membership witness batch update
// as described in section 4.1 of https://eprint.iacr.org/2020/777.pdf
func evaluateDelta(y Element, additions []Element, deletions []Element, coefficients []Coefficient) (*Delta, error) {
// dD(y) = ∏ 1..m (yD_i - y), d = 1/dD(y)
var err error
d, err := dad(deletions, y)
if err != nil {
return nil, fmt.Errorf("dad fails on deletions")
}
d, err = d.Invert()
if err != nil {
return nil, fmt.Errorf("no inverse exists")
}
// dA(y) = ∏ 1..n (yA_i - y)
a, err := dad(additions, y)
if err != nil {
return nil, fmt.Errorf("dad fails on additions")
}
// dA(y)/dD(y)
a = a.Mul(d)
// Create a PolynomialG1 from coefficients
p := make(polynomialPoint, len(coefficients))
for i := 0; i < len(coefficients); i++ {
p[i] = coefficients[i]
}
// <Gamma_y, Omega>
v, err := p.evaluate(y)
if err != nil {
return nil, fmt.Errorf("p.evaluate fails")
}
// 1/dD * <Gamma_y, Omega>
v = v.Mul(d)
return &Delta{d: a, p: v}, nil
}

View File

@ -0,0 +1,229 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
func Test_Membership_Witness_New(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, _ := new(SecretKey).New(curve, seed[:])
acc, _ := new(Accumulator).New(curve)
e := curve.Scalar.New(2)
mw, err := new(MembershipWitness).New(e, acc, key)
require.NoError(t, err)
require.NotNil(t, mw.c)
require.NotNil(t, mw.y)
}
func Test_Membership_Witness_Marshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
mw := &MembershipWitness{
curve.PointG1.Generator().Mul(curve.Scalar.New(10)),
curve.Scalar.New(15),
}
data, err := mw.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
newMW := &MembershipWitness{}
err = newMW.UnmarshalBinary(data)
require.NoError(t, err)
require.True(t, mw.c.Equal(newMW.c))
require.Equal(t, 0, mw.y.Cmp(newMW.y))
}
func Test_Membership(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// nm_witness_max works as well if set to value larger than 0 for this test.x
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
require.False(t, acc.value.IsIdentity())
require.True(t, acc.value.IsOnCurve())
require.NotEqual(t, acc.value, curve.NewG1GeneratorPoint())
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
err = wit.Verify(pk, acc)
require.NoError(t, err)
// Test wrong cases, forge a wrong witness
wrongWit := MembershipWitness{
curve.PointG1.Identity(),
curve.Scalar.One(),
}
err = wrongWit.Verify(pk, acc)
require.Error(t, err)
// Test wrong cases, forge a wrong accumulator
wrongAcc := &Accumulator{
curve.PointG1.Generator(),
}
err = wit.Verify(pk, wrongAcc)
require.Error(t, err)
}
func Test_Membership_Batch_Update(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// nm_witness_max works as well if set to value larger than 0 for this test.
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
err = wit.Verify(pk, acc)
require.Nil(t, err)
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
additions := data[0:2]
deletions := data[2:5]
_, coefficients, err := acc.Update(sk, additions, deletions)
require.NoError(t, err)
require.NotNil(t, coefficients)
_, err = wit.BatchUpdate(additions, deletions, coefficients)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
}
func Test_Membership_Multi_Batch_Update(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
element8 := curve.Scalar.Hash([]byte("10"))
element9 := curve.Scalar.Hash([]byte("11"))
element10 := curve.Scalar.Hash([]byte("12"))
element11 := curve.Scalar.Hash([]byte("13"))
element12 := curve.Scalar.Hash([]byte("14"))
element13 := curve.Scalar.Hash([]byte("15"))
element14 := curve.Scalar.Hash([]byte("16"))
element15 := curve.Scalar.Hash([]byte("17"))
element16 := curve.Scalar.Hash([]byte("18"))
element17 := curve.Scalar.Hash([]byte("19"))
element18 := curve.Scalar.Hash([]byte("20"))
elements := []Element{
element1,
element2,
element3,
element4,
element5,
element6,
element7,
element8,
element9,
element10,
element11,
element12,
element13,
element14,
element15,
element16,
element17,
element18,
}
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
adds1 := data[0:2]
dels1 := data[2:5]
_, coeffs1, err := acc.Update(sk, adds1, dels1)
require.NoError(t, err)
require.NotNil(t, coeffs1)
dels2 := elements[8:10]
_, coeffs2, err := acc.Update(sk, []Element{}, dels2)
require.NoError(t, err)
require.NotNil(t, coeffs2)
dels3 := elements[11:14]
_, coeffs3, err := acc.Update(sk, []Element{}, dels3)
require.NoError(t, err)
require.NotNil(t, coeffs3)
a := make([][]Element, 3)
a[0] = adds1
a[1] = []Element{}
a[2] = []Element{}
d := make([][]Element, 3)
d[0] = dels1
d[1] = dels2
d[2] = dels3
c := make([][]Coefficient, 3)
c[0] = coeffs1
c[1] = coeffs2
c[2] = coeffs3
_, err = wit.MultiBatchUpdate(a, d, c)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
}

66
pkg/crypto/bip32/bip32.go Normal file
View File

@ -0,0 +1,66 @@
package bip32
import (
"crypto/hmac"
"crypto/sha512"
"encoding/binary"
"errors"
"math/big"
"github.com/btcsuite/btcd/btcec/v2"
)
// ComputePublicKey computes the public key of a child key given the extended public key, chain code, and index.
func ComputePublicKey(extPubKey []byte, chainCode uint32, index int) ([]byte, error) {
// Check if the index is a hardened child key
if chainCode&0x80000000 != 0 && index < 0 {
return nil, errors.New("invalid index")
}
// Serialize the public key
pubKey, err := btcec.ParsePubKey(extPubKey)
if err != nil {
return nil, err
}
pubKeyBytes := pubKey.SerializeCompressed()
// Serialize the index
indexBytes := make([]byte, 4)
binary.BigEndian.PutUint32(indexBytes, uint32(index))
// Compute the HMAC-SHA512
mac := hmac.New(sha512.New, []byte{byte(chainCode)})
mac.Write(pubKeyBytes)
mac.Write(indexBytes)
I := mac.Sum(nil)
// Split I into two 32-byte sequences
IL := I[:32]
// Convert IL to a big integer
ilNum := new(big.Int).SetBytes(IL)
// Check if parse256(IL) >= n
curve := btcec.S256()
if ilNum.Cmp(curve.N) >= 0 {
return nil, errors.New("invalid child key")
}
// Compute the child public key
ilx, ily := curve.ScalarBaseMult(IL)
childX, childY := curve.Add(ilx, ily, pubKey.X(), pubKey.Y())
lx := newBigIntFieldVal(childX)
ly := newBigIntFieldVal(childY)
// Create the child public key
childPubKey := btcec.NewPublicKey(lx, ly)
childPubKeyBytes := childPubKey.SerializeCompressed()
return childPubKeyBytes, nil
}
// newBigIntFieldVal creates a new field value from a big integer.
func newBigIntFieldVal(val *big.Int) *btcec.FieldVal {
lx := new(btcec.FieldVal)
lx.SetByteSlice(val.Bytes())
return lx
}

View File

@ -0,0 +1,57 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package bulletproof
import (
"github.com/pkg/errors"
"golang.org/x/crypto/sha3"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
// generators contains a list of points to be used as generators for bulletproofs.
type generators []curves.Point
// ippGenerators holds generators necessary for an Inner Product Proof
// It includes a single u generator, and a list of generators divided in half to G and H
// See lines 10 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
type ippGenerators struct {
G generators
H generators
}
// getGeneratorPoints generates generators using HashToCurve with Shake256(domain) as input
// lenVector is the length of the scalars used for the Inner Product Proof
// getGeneratorPoints will return 2*lenVector + 1 total points, split between a single u generator
// and G and H lists of vectors per the IPP specification
// See lines 10 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
func getGeneratorPoints(lenVector int, domain []byte, curve curves.Curve) (*ippGenerators, error) {
shake := sha3.NewShake256()
_, err := shake.Write(domain)
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints shake.Write")
}
numPoints := lenVector * 2
points := make([]curves.Point, numPoints)
for i := 0; i < numPoints; i++ {
bytes := [64]byte{}
_, err := shake.Read(bytes[:])
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints shake.Read")
}
nextPoint := curve.Point.Hash(bytes[:])
points[i] = nextPoint
}
// Get G and H by splitting points in half
G, H, err := splitPointVector(points)
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints splitPointVector")
}
out := ippGenerators{G: G, H: H}
return &out, nil
}

View File

@ -0,0 +1,61 @@
package bulletproof
import (
"testing"
"github.com/stretchr/testify/require"
"golang.org/x/crypto/sha3"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
func TestGeneratorsHappyPath(t *testing.T) {
curve := curves.ED25519()
gs, err := getGeneratorPoints(10, []byte("test"), *curve)
gsConcatenated := concatIPPGenerators(*gs)
require.NoError(t, err)
require.Len(t, gs.G, 10)
require.Len(t, gs.H, 10)
require.True(t, noDuplicates(gsConcatenated))
}
func TestGeneratorsUniquePerDomain(t *testing.T) {
curve := curves.ED25519()
gs1, err := getGeneratorPoints(10, []byte("test"), *curve)
gs1Concatenated := concatIPPGenerators(*gs1)
require.NoError(t, err)
gs2, err := getGeneratorPoints(10, []byte("test2"), *curve)
gs2Concatenated := concatIPPGenerators(*gs2)
require.NoError(t, err)
require.True(t, areDisjoint(gs1Concatenated, gs2Concatenated))
}
func noDuplicates(gs generators) bool {
seen := map[[32]byte]bool{}
for _, G := range gs {
value := sha3.Sum256(G.ToAffineCompressed())
if seen[value] {
return false
}
seen[value] = true
}
return true
}
func areDisjoint(gs1, gs2 generators) bool {
for _, g1 := range gs1 {
for _, g2 := range gs2 {
if g1.Equal(g2) {
return false
}
}
}
return true
}
func concatIPPGenerators(ippGens ippGenerators) generators {
var out generators
out = append(out, ippGens.G...)
out = append(out, ippGens.H...)
return out
}

181
pkg/crypto/bulletproof/helpers.go Executable file
View File

@ -0,0 +1,181 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package bulletproof
import (
"github.com/pkg/errors"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
// innerProduct takes two lists of scalars (a, b) and performs the dot product returning a single scalar.
func innerProduct(a, b []curves.Scalar) (curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
if len(a) < 1 {
return nil, errors.New("length of vectors must be at least one")
}
// Get a new scalar of value zero of the same curve as input arguments
innerProduct := a[0].Zero()
for i, aElem := range a {
bElem := b[i]
// innerProduct = aElem*bElem + innerProduct
innerProduct = aElem.MulAdd(bElem, innerProduct)
}
return innerProduct, nil
}
// splitPointVector takes a vector of points, splits it in half returning each half.
func splitPointVector(points []curves.Point) ([]curves.Point, []curves.Point, error) {
if len(points) < 1 {
return nil, nil, errors.New("length of points must be at least one")
}
if len(points)&0x01 != 0 {
return nil, nil, errors.New("length of points must be even")
}
nPrime := len(points) >> 1
firstHalf := points[:nPrime]
secondHalf := points[nPrime:]
return firstHalf, secondHalf, nil
}
// splitScalarVector takes a vector of scalars, splits it in half returning each half.
func splitScalarVector(scalars []curves.Scalar) ([]curves.Scalar, []curves.Scalar, error) {
if len(scalars) < 1 {
return nil, nil, errors.New("length of scalars must be at least one")
}
if len(scalars)&0x01 != 0 {
return nil, nil, errors.New("length of scalars must be even")
}
nPrime := len(scalars) >> 1
firstHalf := scalars[:nPrime]
secondHalf := scalars[nPrime:]
return firstHalf, secondHalf, nil
}
// multiplyScalarToPointVector takes a single scalar and a list of points, multiplies each point by scalar.
func multiplyScalarToPointVector(x curves.Scalar, g []curves.Point) []curves.Point {
products := make([]curves.Point, len(g))
for i, gElem := range g {
product := gElem.Mul(x)
products[i] = product
}
return products
}
// multiplyScalarToScalarVector takes a single scalar (x) and a list of scalars (a), multiplies each scalar in the vector by the scalar.
func multiplyScalarToScalarVector(x curves.Scalar, a []curves.Scalar) []curves.Scalar {
products := make([]curves.Scalar, len(a))
for i, aElem := range a {
product := aElem.Mul(x)
products[i] = product
}
return products
}
// multiplyPairwisePointVectors takes two lists of points (g, h) and performs a pairwise multiplication returning a list of points.
func multiplyPairwisePointVectors(g, h []curves.Point) ([]curves.Point, error) {
if len(g) != len(h) {
return nil, errors.New("length of point vectors must be the same")
}
product := make([]curves.Point, len(g))
for i, gElem := range g {
product[i] = gElem.Add(h[i])
}
return product, nil
}
// multiplyPairwiseScalarVectors takes two lists of points (a, b) and performs a pairwise multiplication returning a list of scalars.
func multiplyPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of point vectors must be the same")
}
product := make([]curves.Scalar, len(a))
for i, aElem := range a {
product[i] = aElem.Mul(b[i])
}
return product, nil
}
// addPairwiseScalarVectors takes two lists of scalars (a, b) and performs a pairwise addition returning a list of scalars.
func addPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
sum := make([]curves.Scalar, len(a))
for i, aElem := range a {
sum[i] = aElem.Add(b[i])
}
return sum, nil
}
// subtractPairwiseScalarVectors takes two lists of scalars (a, b) and performs a pairwise subtraction returning a list of scalars.
func subtractPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
diff := make([]curves.Scalar, len(a))
for i, aElem := range a {
diff[i] = aElem.Sub(b[i])
}
return diff, nil
}
// invertScalars takes a list of scalars then returns a list with each element inverted.
func invertScalars(xs []curves.Scalar) ([]curves.Scalar, error) {
xinvs := make([]curves.Scalar, len(xs))
for i, x := range xs {
xinv, err := x.Invert()
if err != nil {
return nil, errors.Wrap(err, "bulletproof helpers invertx")
}
xinvs[i] = xinv
}
return xinvs, nil
}
// isPowerOfTwo returns whether a number i is a power of two or not.
func isPowerOfTwo(i int) bool {
return i&(i-1) == 0
}
// get2nVector returns a scalar vector 2^n such that [1, 2, 4, ... 2^(n-1)]
// See k^n and 2^n definitions on pg 12 of https://eprint.iacr.org/2017/1066.pdf
func get2nVector(length int, curve curves.Curve) []curves.Scalar {
vector2n := make([]curves.Scalar, length)
vector2n[0] = curve.Scalar.One()
for i := 1; i < length; i++ {
vector2n[i] = vector2n[i-1].Double()
}
return vector2n
}
func get1nVector(length int, curve curves.Curve) []curves.Scalar {
vector1n := make([]curves.Scalar, length)
for i := 0; i < length; i++ {
vector1n[i] = curve.Scalar.One()
}
return vector1n
}
func getknVector(k curves.Scalar, length int, curve curves.Curve) []curves.Scalar {
vectorkn := make([]curves.Scalar, length)
vectorkn[0] = curve.Scalar.One()
vectorkn[1] = k
for i := 2; i < length; i++ {
vectorkn[i] = vectorkn[i-1].Mul(k)
}
return vectorkn
}

View File

@ -0,0 +1,85 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
func TestInnerProductHappyPath(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(3, *curve)
b := randScalarVec(3, *curve)
_, err := innerProduct(a, b)
require.NoError(t, err)
}
func TestInnerProductMismatchedLengths(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(3, *curve)
b := randScalarVec(4, *curve)
_, err := innerProduct(a, b)
require.Error(t, err)
}
func TestInnerProductEmptyVector(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(0, *curve)
b := randScalarVec(0, *curve)
_, err := innerProduct(a, b)
require.Error(t, err)
}
func TestInnerProductOut(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(2, *curve)
b := randScalarVec(2, *curve)
c, err := innerProduct(a, b)
require.NoError(t, err)
// Calculate manually a0*b0 + a1*b1
cPrime := a[0].Mul(b[0]).Add(a[1].Mul(b[1]))
require.Equal(t, c, cPrime)
}
func TestSplitListofPointsHappyPath(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(10, *curve)
firstHalf, secondHalf, err := splitPointVector(points)
require.NoError(t, err)
require.Len(t, firstHalf, 5)
require.Len(t, secondHalf, 5)
}
func TestSplitListofPointsOddLength(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(11, *curve)
_, _, err := splitPointVector(points)
require.Error(t, err)
}
func TestSplitListofPointsZeroLength(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(0, *curve)
_, _, err := splitPointVector(points)
require.Error(t, err)
}
func randScalarVec(length int, curve curves.Curve) []curves.Scalar {
out := make([]curves.Scalar, length)
for i := 0; i < length; i++ {
out[i] = curve.Scalar.Random(crand.Reader)
}
return out
}
func randPointVec(length int, curve curves.Curve) []curves.Point {
out := make([]curves.Point, length)
for i := 0; i < length; i++ {
out[i] = curve.Point.Random(crand.Reader)
}
return out
}

View File

@ -0,0 +1,396 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package bulletproof implements the zero knowledge protocol bulletproofs as defined in https://eprint.iacr.org/2017/1066.pdf
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
// InnerProductProver is the struct used to create InnerProductProofs
// It specifies which curve to use and holds precomputed generators
// See NewInnerProductProver() for prover initialization.
type InnerProductProver struct {
curve curves.Curve
generators ippGenerators
}
// InnerProductProof contains necessary output for the inner product proof
// a and b are the final input vectors of scalars, they should be of length 1
// Ls and Rs are calculated per recursion of the IPP and are necessary for verification
// See section 3.1 on pg 15 of https://eprint.iacr.org/2017/1066.pdf
type InnerProductProof struct {
a, b curves.Scalar
capLs, capRs []curves.Point
curve *curves.Curve
}
// ippRecursion is the same as IPP but tracks recursive a', b', g', h' and Ls and Rs
// It should only be used internally by InnerProductProver.Prove()
// See L35 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
type ippRecursion struct {
a, b []curves.Scalar
c curves.Scalar
capLs, capRs []curves.Point
g, h []curves.Point
u, capP curves.Point
transcript *merlin.Transcript
}
// NewInnerProductProver initializes a new prover
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A prover can be used to construct inner product proofs for vectors of length less than or equal to maxVectorLength
// A prover is defined by an explicit curve.
func NewInnerProductProver(maxVectorLength int, domain []byte, curve curves.Curve) (*InnerProductProver, error) {
generators, err := getGeneratorPoints(maxVectorLength, domain, curve)
if err != nil {
return nil, errors.Wrap(err, "ipp getGenerators")
}
return &InnerProductProver{curve: curve, generators: *generators}, nil
}
// NewInnerProductProof initializes a new InnerProductProof for a specified curve
// This should be used in tandem with UnmarshalBinary() to convert a marshaled proof into the struct.
func NewInnerProductProof(curve *curves.Curve) *InnerProductProof {
var capLs, capRs []curves.Point
newProof := InnerProductProof{
a: curve.NewScalar(),
b: curve.NewScalar(),
capLs: capLs,
capRs: capRs,
curve: curve,
}
return &newProof
}
// rangeToIPP takes the output of a range proof and converts it into an inner product proof
// See section 4.2 on pg 20
// The conversion specifies generators to use (g and hPrime), as well as the two vectors l, r of which the inner product is tHat
// Additionally, note that the P used for the IPP is in fact P*h^-mu from the range proof.
func (prover *InnerProductProver) rangeToIPP(proofG, proofH []curves.Point, l, r []curves.Scalar, tHat curves.Scalar, capPhmuinv, u curves.Point, transcript *merlin.Transcript) (*InnerProductProof, error) {
// Note that P as a witness is only g^l * h^r
// P needs to be in the form of g^l * h^r * u^<l,r>
// Calculate the final P including the u^<l,r> term
utHat := u.Mul(tHat)
capP := capPhmuinv.Add(utHat)
// Use params to prove inner product
recursionParams := &ippRecursion{
a: l,
b: r,
capLs: []curves.Point{},
capRs: []curves.Point{},
c: tHat,
g: proofG,
h: proofH,
capP: capP,
u: u,
transcript: transcript,
}
return prover.proveRecursive(recursionParams)
}
// getP returns the initial P value given two scalars a,b and point u
// This method should only be used for testing
// See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) getP(a, b []curves.Scalar, u curves.Point) (curves.Point, error) {
// Vectors must have length power of two
if !isPowerOfTwo(len(a)) {
return nil, errors.New("ipp vector length must be power of two")
}
// Generator vectors must be same length
if len(prover.generators.G) != len(prover.generators.H) {
return nil, errors.New("ipp generator lengths of g and h must be equal")
}
// Inner product requires len(a) == len(b) else error is returned
c, err := innerProduct(a, b)
if err != nil {
return nil, errors.Wrap(err, "ipp getInnerProduct")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:len(a)]
proofH := prover.generators.H[0:len(b)]
// initial P = g^a * h^b * u^(a dot b) (See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf)
ga := prover.curve.NewGeneratorPoint().SumOfProducts(proofG, a)
hb := prover.curve.NewGeneratorPoint().SumOfProducts(proofH, b)
uadotb := u.Mul(c)
capP := ga.Add(hb).Add(uadotb)
return capP, nil
}
// Prove executes the prover protocol on pg 16 of https://eprint.iacr.org/2017/1066.pdf
// It generates an inner product proof for vectors a and b, using u to blind the inner product in P
// A transcript is used for the Fiat Shamir heuristic.
func (prover *InnerProductProver) Prove(a, b []curves.Scalar, u curves.Point, transcript *merlin.Transcript) (*InnerProductProof, error) {
// Vectors must have length power of two
if !isPowerOfTwo(len(a)) {
return nil, errors.New("ipp vector length must be power of two")
}
// Generator vectors must be same length
if len(prover.generators.G) != len(prover.generators.H) {
return nil, errors.New("ipp generator lengths of g and h must be equal")
}
// Inner product requires len(a) == len(b) else error is returned
c, err := innerProduct(a, b)
if err != nil {
return nil, errors.Wrap(err, "ipp getInnerProduct")
}
// Length of vectors must be less than the number of generators generated
if len(a) > len(prover.generators.G) {
return nil, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:len(a)]
proofH := prover.generators.H[0:len(b)]
// initial P = g^a * h^b * u^(a dot b) (See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf)
ga := prover.curve.NewGeneratorPoint().SumOfProducts(proofG, a)
hb := prover.curve.NewGeneratorPoint().SumOfProducts(proofH, b)
uadotb := u.Mul(c)
capP := ga.Add(hb).Add(uadotb)
recursionParams := &ippRecursion{
a: a,
b: b,
capLs: []curves.Point{},
capRs: []curves.Point{},
c: c,
g: proofG,
h: proofH,
capP: capP,
u: u,
transcript: transcript,
}
return prover.proveRecursive(recursionParams)
}
// proveRecursive executes the recursion on pg 16 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) proveRecursive(recursionParams *ippRecursion) (*InnerProductProof, error) {
// length checks
if len(recursionParams.a) != len(recursionParams.b) {
return nil, errors.New("ipp proveRecursive a and b different lengths")
}
if len(recursionParams.g) != len(recursionParams.h) {
return nil, errors.New("ipp proveRecursive g and h different lengths")
}
if len(recursionParams.a) != len(recursionParams.g) {
return nil, errors.New("ipp proveRecursive scalar and point vectors different lengths")
}
// Base case (L14, pg16 of https://eprint.iacr.org/2017/1066.pdf)
if len(recursionParams.a) == 1 {
proof := &InnerProductProof{
a: recursionParams.a[0],
b: recursionParams.b[0],
capLs: recursionParams.capLs,
capRs: recursionParams.capRs,
curve: &prover.curve,
}
return proof, nil
}
// Split current state into low (first half) vs high (second half) vectors
aLo, aHi, err := splitScalarVector(recursionParams.a)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitScalarVector")
}
bLo, bHi, err := splitScalarVector(recursionParams.b)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitScalarVector")
}
gLo, gHi, err := splitPointVector(recursionParams.g)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitPointVector")
}
hLo, hHi, err := splitPointVector(recursionParams.h)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitPointVector")
}
// c_l, c_r (L21,22, pg16 of https://eprint.iacr.org/2017/1066.pdf)
cL, err := innerProduct(aLo, bHi)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
cR, err := innerProduct(aHi, bLo)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
// L, R (L23,24, pg16 of https://eprint.iacr.org/2017/1066.pdf)
lga := prover.curve.Point.SumOfProducts(gHi, aLo)
lhb := prover.curve.Point.SumOfProducts(hLo, bHi)
ucL := recursionParams.u.Mul(cL)
capL := lga.Add(lhb).Add(ucL)
rga := prover.curve.Point.SumOfProducts(gLo, aHi)
rhb := prover.curve.Point.SumOfProducts(hHi, bLo)
ucR := recursionParams.u.Mul(cR)
capR := rga.Add(rhb).Add(ucR)
// Add L,R for verifier to use to calculate final g, h
newL := recursionParams.capLs
newL = append(newL, capL)
newR := recursionParams.capRs
newR = append(newR, capR)
// Get x from L, R for non-interactive (See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf)
// Note this replaces the interactive model, i.e. L36-28 of pg16 of https://eprint.iacr.org/2017/1066.pdf
x, err := prover.calcx(capL, capR, recursionParams.transcript)
if err != nil {
return nil, errors.Wrap(err, "recursionParams calcx")
}
// Calculate recursive inputs
xInv, err := x.Invert()
if err != nil {
return nil, errors.Wrap(err, "recursionParams x.Invert")
}
// g', h' (L29,30, pg16 of https://eprint.iacr.org/2017/1066.pdf)
gLoxInverse := multiplyScalarToPointVector(xInv, gLo)
gHix := multiplyScalarToPointVector(x, gHi)
gPrime, err := multiplyPairwisePointVectors(gLoxInverse, gHix)
if err != nil {
return nil, errors.Wrap(err, "recursionParams multiplyPairwisePointVectors")
}
hLox := multiplyScalarToPointVector(x, hLo)
hHixInv := multiplyScalarToPointVector(xInv, hHi)
hPrime, err := multiplyPairwisePointVectors(hLox, hHixInv)
if err != nil {
return nil, errors.Wrap(err, "recursionParams multiplyPairwisePointVectors")
}
// P' (L31, pg16 of https://eprint.iacr.org/2017/1066.pdf)
xSquare := x.Square()
xInvSquare := xInv.Square()
LxSquare := capL.Mul(xSquare)
RxInvSquare := capR.Mul(xInvSquare)
PPrime := LxSquare.Add(recursionParams.capP).Add(RxInvSquare)
// a', b' (L33, 34, pg16 of https://eprint.iacr.org/2017/1066.pdf)
aLox := multiplyScalarToScalarVector(x, aLo)
aHixIn := multiplyScalarToScalarVector(xInv, aHi)
aPrime, err := addPairwiseScalarVectors(aLox, aHixIn)
if err != nil {
return nil, errors.Wrap(err, "recursionParams addPairwiseScalarVectors")
}
bLoxInv := multiplyScalarToScalarVector(xInv, bLo)
bHix := multiplyScalarToScalarVector(x, bHi)
bPrime, err := addPairwiseScalarVectors(bLoxInv, bHix)
if err != nil {
return nil, errors.Wrap(err, "recursionParams addPairwiseScalarVectors")
}
// c'
cPrime, err := innerProduct(aPrime, bPrime)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
// Make recursive call (L35, pg16 of https://eprint.iacr.org/2017/1066.pdf)
recursiveIPP := &ippRecursion{
a: aPrime,
b: bPrime,
capLs: newL,
capRs: newR,
c: cPrime,
g: gPrime,
h: hPrime,
capP: PPrime,
u: recursionParams.u,
transcript: recursionParams.transcript,
}
out, err := prover.proveRecursive(recursiveIPP)
if err != nil {
return nil, errors.Wrap(err, "recursionParams proveRecursive")
}
return out, nil
}
// calcx uses a merlin transcript for Fiat Shamir
// For each recursion, it takes the current state of the transcript and appends the newly calculated L and R values
// A new scalar is then read from the transcript
// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) calcx(capL, capR curves.Point, transcript *merlin.Transcript) (curves.Scalar, error) {
// Add the newest capL and capR values to transcript
transcript.AppendMessage([]byte("addRecursiveL"), capL.ToAffineUncompressed())
transcript.AppendMessage([]byte("addRecursiveR"), capR.ToAffineUncompressed())
// Read 64 bytes from, set to scalar
outBytes := transcript.ExtractBytes([]byte("getx"), 64)
x, err := prover.curve.NewScalar().SetBytesWide(outBytes)
if err != nil {
return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide")
}
return x, nil
}
// MarshalBinary takes an inner product proof and marshals into bytes.
func (proof *InnerProductProof) MarshalBinary() []byte {
var out []byte
out = append(out, proof.a.Bytes()...)
out = append(out, proof.b.Bytes()...)
for i, capLElem := range proof.capLs {
capRElem := proof.capRs[i]
out = append(out, capLElem.ToAffineCompressed()...)
out = append(out, capRElem.ToAffineCompressed()...)
}
return out
}
// UnmarshalBinary takes bytes of a marshaled proof and writes them into an inner product proof
// The inner product proof used should be from the output of NewInnerProductProof().
func (proof *InnerProductProof) UnmarshalBinary(data []byte) error {
scalarLen := len(proof.curve.NewScalar().Bytes())
pointLen := len(proof.curve.NewGeneratorPoint().ToAffineCompressed())
ptr := 0
// Get scalars
a, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary SetBytes")
}
proof.a = a
ptr += scalarLen
b, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary SetBytes")
}
proof.b = b
ptr += scalarLen
// Get points
var capLs, capRs []curves.Point //nolint:prealloc // pointer arithmetic makes it too unreadable.
for ptr < len(data) {
capLElem, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary FromAffineCompressed")
}
capLs = append(capLs, capLElem)
ptr += pointLen
capRElem, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary FromAffineCompressed")
}
capRs = append(capRs, capRElem)
ptr += pointLen
}
proof.capLs = capLs
proof.capRs = capRs
return nil
}

View File

@ -0,0 +1,99 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"github.com/onsonr/sonr/pkg/crypto/core/curves"
)
func TestIPPHappyPath(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(8, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(a, b, u, transcript)
require.NoError(t, err)
require.Equal(t, 3, len(proof.capLs))
require.Equal(t, 3, len(proof.capRs))
}
func TestIPPMismatchedVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(4, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPNonPowerOfTwoLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(3, *curve)
b := randScalarVec(3, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPZeroLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(0, *curve)
b := randScalarVec(0, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPGreaterThanMaxLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(16, *curve)
b := randScalarVec(16, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPMarshal(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(8, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(a, b, u, transcript)
require.NoError(t, err)
proofMarshaled := proof.MarshalBinary()
proofPrime := NewInnerProductProof(curve)
err = proofPrime.UnmarshalBinary(proofMarshaled)
require.NoError(t, err)
require.Zero(t, proof.a.Cmp(proofPrime.a))
require.Zero(t, proof.b.Cmp(proofPrime.b))
for i, proofCapLElem := range proof.capLs {
proofPrimeCapLElem := proofPrime.capLs[i]
require.True(t, proofCapLElem.Equal(proofPrimeCapLElem))
proofCapRElem := proof.capRs[i]
proofPrimeCapRElem := proofPrime.capRs[i]
require.True(t, proofCapRElem.Equal(proofPrimeCapRElem))
}
}

Some files were not shown because too many files have changed in this diff Show More