(no commit message provided)

This commit is contained in:
Prad Nukala 2024-07-05 22:20:13 -04:00 committed by Prad Nukala (aider)
commit 5fd43dfd6b
457 changed files with 115535 additions and 0 deletions

72
.air.toml Normal file
View File

@ -0,0 +1,72 @@
root = "."
testdata_dir = "testdata"
tmp_dir = "tmp"
[build]
# Array of commands to run before each build
pre_cmd = ["task templ"]
# Just plain old shell command. You could use `make` as well.
cmd = "go build -o ./tmp/vltd ./cmd/vltd"
# Array of commands to run after ^C
bin = "tmp/vltd"
# Watch these filename extensions.
include_ext = ["go", "proto", "templ"]
# Ignore these filename extensions or directories.
exclude_dir = ["api", "crypto", "pkl", "scripts", "tmp", ".github"]
# Watch these directories if you specified.
include_dir = ["pkg", "x", "internal", ".github"]
# Watch these files.
include_file = []
# Exclude files.
exclude_file = []
# Exclude specific regular expressions.
exclude_regex = ["_test\\.go"]
# Exclude unchanged files.
exclude_unchanged = true
# Follow symlink for directories
follow_symlink = true
# This log file places in your tmp_dir.
log = "air.log"
# Poll files for changes instead of using fsnotify.
poll = false
# Poll interval (defaults to the minimum interval of 500ms).
poll_interval = 500 # ms
# It's not necessary to trigger build each time file changes if it's too frequent.
delay = 0 # ms
# Stop running old binary when build errors occur.
stop_on_error = true
# Send Interrupt signal before killing process (windows does not support this feature)
send_interrupt = false
# Delay after sending Interrupt signal
kill_delay = 500 # nanosecond
# Rerun binary or not
rerun = false
# Delay after each execution
rerun_delay = 500
[log]
# Show log time
time = false
# Only show main log (silences watcher, build, runner)
main_only = false
[color]
# Customize each part's color. If no color found, use the raw app log.
main = "magenta"
watcher = "cyan"
build = "yellow"
runner = "green"
[misc]
# Delete tmp directory on exit
clean_on_exit = true
[screen]
clear_on_rebuild = true
keep_scroll = true
# Enable live-reloading on the browser.
[proxy]
enabled = true
proxy_port = 8090
app_port = 8080

7
.cz.toml Normal file
View File

@ -0,0 +1,7 @@
[tool.commitizen]
name = "cz_conventional_commits"
tag_format = "$version"
version_scheme = "semver"
version = "0.6.0"
update_changelog_on_bump = true
major_version_zero = true

15
.devcontainer/Dockerfile Normal file
View File

@ -0,0 +1,15 @@
FROM jetpackio/devbox:latest
# Installing your devbox project
WORKDIR /code
USER root:root
RUN mkdir -p /code && chown ${DEVBOX_USER}:${DEVBOX_USER} /code
USER ${DEVBOX_USER}:${DEVBOX_USER}
COPY --chown=${DEVBOX_USER}:${DEVBOX_USER} devbox.json devbox.json
COPY --chown=${DEVBOX_USER}:${DEVBOX_USER} devbox.lock devbox.lock
RUN devbox run -- echo "Installed Packages."
RUN devbox shellenv --init-hook >> ~/.profile

View File

@ -0,0 +1,16 @@
{
"name": "Devbox Remote Container",
"build": {
"dockerfile": "./Dockerfile",
"context": ".."
},
"customizations": {
"vscode": {
"settings": {},
"extensions": [
"jetpack-io.devbox"
]
}
},
"remoteUser": "devbox"
}

10
.envrc Normal file
View File

@ -0,0 +1,10 @@
# Automatically sets up your devbox environment whenever you cd into this
# directory via our direnv integration:
#
# Enter Nushell
eval "$(devbox generate direnv --print-envrc)"
# check out https://www.jetpack.io/devbox/docs/ide_configuration/direnv/
# for more details
#

8
.github/testnet.md vendored Normal file
View File

@ -0,0 +1,8 @@
---
title: New Testnet {{ date | date('dddd MMMM Do') }}
labels: enhancement
---
Server IP: {{ env.SERVER_IPV4_ADDR }}
SSH: `ssh -o StrictHostKeyChecking=no root@{{ env.SERVER_IPV4_ADDR }}`
Tag: {{ env.GITHUB_REF_NAME }} / Commit: {{ env.GITHUB_SHA }}
Local-Interchain API: http://{{ env.SERVER_IPV4_ADDR }}:{{ env.LOCALIC_PORT }}

26
.github/workflows/auto-bump.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: Automatic Tag Bump
on:
pull_request:
types:
- closed
branches:
- main
jobs:
build:
if: github.event.pull_request.merged == true
runs-on: ubuntu-22.04
permissions:
contents: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.merge_commit_sha }}
fetch-depth: "0"
- name: Bump version and push tag
uses: anothrNick/github-tag-action@v1 # Don't use @master or @v1 unless you're happy to test the latest version
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # if you don't want to set write permissions use a PAT token
WITH_V: true
PRERELEASE: true

22
.github/workflows/buf-publish.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: Publish to buf.build/didao/sonr
on:
push:
permissions:
contents: write
issues: write
jobs:
buf_push:
runs-on: ubuntu-latest
steps:
# Run `git checkout`
- uses: actions/checkout@v2
# Install the `buf` CLI
- uses: bufbuild/buf-setup-action@v1
# Push only the Input in `proto` to the BSR
- uses: bufbuild/buf-push-action@v1
continue-on-error: true
with:
input: proto
buf_token: ${{ secrets.BUF_TOKEN }}

71
.github/workflows/docker-release.yml vendored Normal file
View File

@ -0,0 +1,71 @@
name: docker image release
# NOTE: For this action to work, you must enable write permissions in your github repository settings.
# Settings -> Actions -> General. "Workflow Permissions". Select "Read and write permissions".
# If you forget to enable, the action will fail on push with a 401 error. Just re-run the failed action after enabling.
on:
push:
tags:
- "v[0-9]+.[0-9]+.[0-9]+" # ignore rc
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
GO_VERSION: 1.22
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
release-image:
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
# all lowercase ghcr registry
- run: |
DOCKER_REGISTRY=`echo "${{ env.REGISTRY }}/${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]'`
echo "DOCKER_REGISTRY=$DOCKER_REGISTRY" >> $GITHUB_ENV
REPO_NAME=`echo "${{ github.repository }}" | awk -F'/' '{print $2}' | tr '[:upper:]' '[:lower:]'`
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
- name: Parse tag
id: tag
run: |
# v0.0.1
VERSION=$(echo ${{ github.ref_name }} | sed "s/v//")
# 0.0.1
echo "VERSION=$VERSION" >> $GITHUB_ENV
# build and publish package to ghcr (public) with codebase remaining private
- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# make sure to update package to be public in repo ghcr settings
- name: Build and push Docker image
uses: strangelove-ventures/heighliner-build-action@v1.0.0
with:
# v0.0.1
git-ref: ${{ github.ref_name }}
chain: ${{ env.REPO_NAME}}
dockerfile: cosmos
registry: ${{ env.DOCKER_REGISTRY }}
build-target: |
cd ..
make install
local: true
binaries: |
- /go/bin/sonrd
build-env: |
- BUILD_TAGS=muslc

43
.github/workflows/release-binary.yml vendored Normal file
View File

@ -0,0 +1,43 @@
name: "Release Binary"
# Pairs with .goreleaser.yaml file for configuration.
on:
push:
tags:
- '**'
# Test Locally with:
# goreleaser build --skip-validate --snapshot --clean
jobs:
goreleaser:
permissions: write-all
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2.3.4
with:
fetch-depth: 0
- uses: actions/setup-go@v2
with:
go-version: '1.21'
- name: Clean up dist directory
run: rm -rf dist
- name: Build
uses: goreleaser/goreleaser-action@v5
with:
version: latest
args: build --skip-validate
- name: Release
uses: goreleaser/goreleaser-action@v5
if: startsWith(github.ref, 'refs/tags/')
with:
version: latest
args: release --skip-validate --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

92
.github/workflows/testnet-hetzner.yml vendored Normal file
View File

@ -0,0 +1,92 @@
name: cloud testnet
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+' # ignore rc
# https://github.com/<org>/<repo>/settings/secrets/actions/new
# - HCLOUD_TOKEN
# - SSH_PRIVATE_KEY
env:
GO_VERSION: 1.21.0
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
LOCALIC_PORT: 8080
LOCALIC_AUTH_KEY: ""
HETZNER_SSH_KEY: "reece-hetzner"
# HETZNER_MACHINE_TYPE: "cpx31" # shared 4vCPU ($7/Mo)
HETZNER_MACHINE_TYPE: "ccx23" # dedicated 4 CPU 16GB Ram ($25/Mo)
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
launch-testnet:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: TimDaub/hetzner-cloud-deploy-server-action@v2
with:
# gh-actions-tn-v1.0.0
server-name: "gh-actions-tn-${{github.ref_name}}"
server-image: "ubuntu-22.04"
server-type: ${{ env.HETZNER_MACHINE_TYPE }}
ssh-key-name: ${{ env.HETZNER_SSH_KEY }}
delete-server: false
startup-timeout: 40000 # ms
hcloud-token: ${{ secrets.HCLOUD_TOKEN }}
- name: Set env variables
run: |
mkdir -p ~/.ssh/ && ssh-keyscan -H $SERVER_IPV4 >> ~/.ssh/known_hosts
echo "SERVER_IPV4_ADDR=$SERVER_IPV4" >> $GITHUB_ENV
echo "GITHUB_SHA=${{github.sha}}" >> $GITHUB_ENV
echo "GITHUB_REF_NAME=${{ github.ref_name }}" >> $GITHUB_ENV
echo "LOCALIC_PORT=${{ env.LOCALIC_PORT }}" >> $GITHUB_ENV
- uses: JasonEtco/create-an-issue@v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
filename: .github/testnet.md
update_existing: true
- name: Testnet setup
uses: appleboy/ssh-action@v1.0.3
with:
host: ${{ env.SERVER_IPV4_ADDR }}
username: root
key: ${{ secrets.SSH_PRIVATE_KEY }}
port: 22
script: |
sudo apt-get update
sudo DEBIAN_FRONTEND=noninteractive apt -y install make gcc jq bison ca-certificates curl
wget https://go.dev/dl/go1.22.1.linux-amd64.tar.gz
sudo rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go1.22.1.linux-amd64.tar.gz
echo "export PATH=$PATH:/usr/local/go/bin" >> ~/.bashrc
export PATH=$PATH:/usr/local/go/bin
bash # idk if I can use this or not
sudo DEBIAN_FRONTEND=noninteractive apt-get remove -y containerd.io
sudo DEBIAN_FRONTEND=noninteractive apt-get -y install docker.io docker-compose
sudo apt-get update
wget https://github.com/strangelove-ventures/interchaintest/releases/download/v8.2.0/local-ic && chmod +x local-ic
sudo mv local-ic /usr/local/bin
git clone https://github.com/strangelove-ventures/heighliner.git && cd heighliner
go build && chmod +x heighliner
sudo mv heighliner /usr/local/bin
cd ~/
git clone https://github.com/${{ github.repository }}.git chain && cd chain
git checkout ${{ github.ref_name }}
make local-image
sudo screen -S testnet -d -m local-ic start ibc-testnet --api-address=0.0.0.0 --api-port=${{ env.LOCALIC_PORT }} --auth-key=${{ env.LOCALIC_AUTH_KEY }}

View File

@ -0,0 +1,65 @@
name: self-hosted testnet
#
# Setup a runner on your own hardware or in a public cloud like GCP, Hetzner, etc.
# This is required if your chain is closed source but you want a dev/semi-public testnet
# - https://github.com/<org>/<repo>/settings/actions/runners/new?arch=x64&os=linux
#
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+' # ignore rc
env:
GO_VERSION: 1.21.0
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
LOCALIC_PORT: 8080
LOCALIC_AUTH_KEY: ""
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
launch-testnet:
runs-on: self-hosted
steps:
- uses: actions/checkout@v3
- name: Setup System
run: |
sudo apt-get update
sudo DEBIAN_FRONTEND=noninteractive apt -y install make gcc jq bison ca-certificates curl
wget https://go.dev/dl/go1.22.1.linux-amd64.tar.gz
sudo rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go1.22.1.linux-amd64.tar.gz
echo "export PATH=$PATH:/usr/local/go/bin" >> ~/.bashrc
export PATH=$PATH:/usr/local/go/bin
sudo DEBIAN_FRONTEND=noninteractive apt-get remove -y containerd.io || true
sudo DEBIAN_FRONTEND=noninteractive apt-get -y install docker.io docker-compose
sudo apt-get update
wget https://github.com/strangelove-ventures/interchaintest/releases/download/v8.2.0/local-ic && chmod +x local-ic
sudo mv local-ic /usr/local/bin
git clone https://github.com/strangelove-ventures/heighliner.git && cd heighliner
go build && chmod +x heighliner
sudo mv heighliner /usr/local/bin
cd .. && rm -rf heighliner
- name: Build + Run Testnet
run: |
killall local-ic || true
docker kill $(docker ps -q) || true
export PATH=$PATH:/usr/local/go/bin
make local-image
sudo screen -S testnet-${{ github.ref_name }} -d -m local-ic start ibc-testnet --api-address=0.0.0.0 --api-port=${{ env.LOCALIC_PORT }} --auth-key=${{ env.LOCALIC_AUTH_KEY }}
# Add other commands here you perform for setup once local-ic has started (poll on LOCALIC_PORT) such as contract upload.

26
.github/workflows/unit-test.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: Unit tests
on:
push:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
GO_VERSION: 1.21
jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Check out source
uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: ${{ env.GO_VERSION }}
check-latest: true
- name: Tests
run: make test

74
.gitignore vendored Normal file
View File

@ -0,0 +1,74 @@
# Binaries
*.exe
*.exe~
*.dll
*.so
*.dylib
*.app
.DS_Store
.session.vim
aof*
# Test binary
*.test
.devon*
**/.DS_Store
# Output of the go coverage tool
*.out
buf.lock
# Dependency directories
node_modules/
# Go workspace file
go.work
go.work.sum
# Environment files
.env
**/*.env
# Lock files
package-lock.json
devbox.lock
# Config files
pkg/config/static/
# Terraform
**/.terraform/*
.terraform
*.tfstate
*.tfstate.*
crash.log
crash.*.log
*.tfvars
*.tfvars.json
override.tf
override.tf.json
*_override.tf
*_override.tf.json
.terraformrc
terraform.rc
# Misc
.DS_Store
.tmp/
tmp/
**/*tmp*/
*.tmp
*.log
*.dot
*.pem
dist/
bin/
build/
.devbox
.ignore
.opencommitignore
heighliner*
sonr
deploy/**/data
x/.DS_Store
.aider*

111
.goreleaser.yaml Normal file
View File

@ -0,0 +1,111 @@
project_name: core
release:
github:
owner: di-dao
name: core
name_template: "{{.Tag}}"
# Only uncomment os, arch, and targets if you are NOT using cosmwasm / wasm-light-client.
# Windows, 386 (32bit), and ARM are not Wasm compatible.
builds:
- id: core
goos:
- linux
- darwin
# - windows
goarch:
- amd64
# - arm64
# - "386"
goarm:
- "6"
gomips:
- hardfloat
goamd64:
- v1
targets:
- linux_amd64_v1
# - darwin_amd64_v1
# - linux_arm64
# - linux_386
# - darwin_arm64
# - windows_amd64_v1
# - windows_arm64
# - windows_386
dir: .
main: ./cmd/sonrd
binary: sonrd
builder: go
gobinary: go
command: build
ldflags:
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} -X main.builtBy=goreleaser
archives:
- id: default
name_template: '{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ with .Arm }}v{{ . }}{{ end }}{{ with .Mips }}_{{ . }}{{ end }}{{ if not (eq .Amd64 "v1") }}{{ .Amd64 }}{{ end }}'
format: tar.gz
files:
- src: license*
- src: LICENSE*
- src: readme*
- src: README*
- src: changelog*
- src: CHANGELOG*
snapshot:
name_template: "{{ .Version }}-SNAPSHOT-{{ .ShortCommit }}"
checksum:
name_template: "{{ .ProjectName }}_{{ .Version }}_checksums.txt"
algorithm: sha256
dist: dist
env_files:
github_token: ~/.config/goreleaser/github_token
gitlab_token: ~/.config/goreleaser/gitlab_token
gitea_token: ~/.config/goreleaser/gitea_token
source:
name_template: "{{ .ProjectName }}-{{ .Version }}"
format: tar.gz
gomod:
gobinary: go
announce:
twitter:
message_template: "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"
mastodon:
message_template: "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"
server: ""
reddit:
title_template: "{{ .ProjectName }} {{ .Tag }} is out!"
url_template: "{{ .ReleaseURL }}"
slack:
message_template: "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"
username: GoReleaser
discord:
message_template: "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"
author: GoReleaser
color: "3888754"
icon_url: https://goreleaser.com/static/avatar.png
teams:
title_template: "{{ .ProjectName }} {{ .Tag }} is out!"
message_template: "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"
color: "#2D313E"
icon_url: https://goreleaser.com/static/avatar.png
smtp:
subject_template: "{{ .ProjectName }} {{ .Tag }} is out!"
body_template: "You can view details from: {{ .ReleaseURL }}"
mattermost:
message_template: "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"
title_template: "{{ .ProjectName }} {{ .Tag }} is out!"
username: GoReleaser
linkedin:
message_template: "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"
telegram:
message_template: "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"
webhook:
message_template: '{ "message": "{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}"}'
content_type: application/json; charset=utf-8
git:
tag_sort: -version:refname
github_urls:
download: https://github.com
gitlab_urls:
download: https://gitlab.com

0
CHANGELOG.md Normal file
View File

44
Dockerfile Normal file
View File

@ -0,0 +1,44 @@
FROM golang:1.21-alpine AS go-builder
SHELL ["/bin/sh", "-ecuxo", "pipefail"]
RUN apk add --no-cache ca-certificates build-base git
WORKDIR /code
COPY go.mod go.sum ./
RUN set -eux; \
export ARCH=$(uname -m); \
WASM_VERSION=$(go list -m all | grep github.com/CosmWasm/wasmvm || true); \
if [ ! -z "${WASM_VERSION}" ]; then \
WASMVM_REPO=$(echo $WASM_VERSION | awk '{print $1}');\
WASMVM_VERS=$(echo $WASM_VERSION | awk '{print $2}');\
wget -O /lib/libwasmvm_muslc.a https://${WASMVM_REPO}/releases/download/${WASMVM_VERS}/libwasmvm_muslc.$(uname -m).a;\
fi; \
go mod download;
# Copy over code
COPY . /code
# force it to use static lib (from above) not standard libgo_cosmwasm.so file
# then log output of file /code/bin/sonrd
# then ensure static linking
RUN LEDGER_ENABLED=false BUILD_TAGS=muslc LINK_STATICALLY=true make build \
&& file /code/build/sonrd \
&& echo "Ensuring binary is statically linked ..." \
&& (file /code/build/sonrd | grep "statically linked")
# --------------------------------------------------------
FROM alpine:3.16
COPY --from=go-builder /code/build/sonrd /usr/bin/sonrd
# Install dependencies used for Starship
RUN apk add --no-cache curl make bash jq sed
WORKDIR /opt
# rest server, tendermint p2p, tendermint rpc
EXPOSE 1317 26656 26657
CMD ["/usr/bin/sonrd", "version"]

295
Makefile Normal file
View File

@ -0,0 +1,295 @@
#!/usr/bin/make -f
PACKAGES_SIMTEST=$(shell go list ./... | grep '/simulation')
VERSION := $(shell echo $(shell git describe --tags) | sed 's/^v//')
COMMIT := $(shell git log -1 --format='%H')
LEDGER_ENABLED ?= true
SDK_PACK := $(shell go list -m github.com/cosmos/cosmos-sdk | sed 's/ /\@/g')
BINDIR ?= $(GOPATH)/bin
SIMAPP = ./app
# for dockerized protobuf tools
DOCKER := $(shell which docker)
HTTPS_GIT := github.com/onsonr/hway.git
export GO111MODULE = on
# process build tags
build_tags = netgo
ifeq ($(LEDGER_ENABLED),true)
ifeq ($(OS),Windows_NT)
GCCEXE = $(shell where gcc.exe 2> NUL)
ifeq ($(GCCEXE),)
$(error gcc.exe not installed for ledger support, please install or set LEDGER_ENABLED=false)
else
build_tags += ledger
endif
else
UNAME_S = $(shell uname -s)
ifeq ($(UNAME_S),OpenBSD)
$(warning OpenBSD detected, disabling ledger support (https://github.com/cosmos/cosmos-sdk/issues/1988))
else
GCC = $(shell command -v gcc 2> /dev/null)
ifeq ($(GCC),)
$(error gcc not installed for ledger support, please install or set LEDGER_ENABLED=false)
else
build_tags += ledger
endif
endif
endif
endif
ifeq ($(WITH_CLEVELDB),yes)
build_tags += gcc
endif
build_tags += $(BUILD_TAGS)
build_tags := $(strip $(build_tags))
whitespace :=
empty = $(whitespace) $(whitespace)
comma := ,
build_tags_comma_sep := $(subst $(empty),$(comma),$(build_tags))
# process linker flags
ldflags = -X github.com/cosmos/cosmos-sdk/version.Name=core \
-X github.com/cosmos/cosmos-sdk/version.AppName=sonrd \
-X github.com/cosmos/cosmos-sdk/version.Version=$(VERSION) \
-X github.com/cosmos/cosmos-sdk/version.Commit=$(COMMIT) \
-X "github.com/cosmos/cosmos-sdk/version.BuildTags=$(build_tags_comma_sep)"
ifeq ($(WITH_CLEVELDB),yes)
ldflags += -X github.com/cosmos/cosmos-sdk/types.DBBackend=cleveldb
endif
ifeq ($(LINK_STATICALLY),true)
ldflags += -linkmode=external -extldflags "-Wl,-z,muldefs -static"
endif
ldflags += $(LDFLAGS)
ldflags := $(strip $(ldflags))
BUILD_FLAGS := -tags "$(build_tags_comma_sep)" -ldflags '$(ldflags)' -trimpath
# The below include contains the tools and runsim targets.
include contrib/devtools/Makefile
all: install lint test
build: go.sum
ifeq ($(OS),Windows_NT)
$(error wasmd server not supported. Use "make build-windows-client" for client)
exit 1
else
go build -mod=readonly $(BUILD_FLAGS) -o build/sonrd ./cmd/sonrd
endif
build-windows-client: go.sum
GOOS=windows GOARCH=amd64 go build -mod=readonly $(BUILD_FLAGS) -o build/sonrd.exe ./cmd/sonrd
build-contract-tests-hooks:
ifeq ($(OS),Windows_NT)
go build -mod=readonly $(BUILD_FLAGS) -o build/contract_tests.exe ./cmd/contract_tests
else
go build -mod=readonly $(BUILD_FLAGS) -o build/contract_tests ./cmd/contract_tests
endif
install: go.sum
go install -mod=readonly $(BUILD_FLAGS) ./cmd/sonrd
########################################
### Tools & dependencies
go-mod-cache: go.sum
@echo "--> Download go modules to local cache"
@go mod download
go.sum: go.mod
@echo "--> Ensure dependencies have not been modified"
@go mod verify
draw-deps:
@# requires brew install graphviz or apt-get install graphviz
go install github.com/RobotsAndPencils/goviz@latest
@goviz -i ./cmd/sonrd -d 2 | dot -Tpng -o dependency-graph.png
clean:
rm -rf snapcraft-local.yaml build/
distclean: clean
rm -rf vendor/
########################################
### Testing
test: test-unit
test-all: test-race test-cover test-system
test-unit:
@VERSION=$(VERSION) go test -mod=readonly -tags='ledger test_ledger_mock' ./...
test-race:
@VERSION=$(VERSION) go test -mod=readonly -race -tags='ledger test_ledger_mock' ./...
test-cover:
@go test -mod=readonly -timeout 30m -race -coverprofile=coverage.txt -covermode=atomic -tags='ledger test_ledger_mock' ./...
benchmark:
@go test -mod=readonly -bench=. ./...
test-sim-import-export: runsim
@echo "Running application import/export simulation. This may take several minutes..."
@$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(SIMAPP) -ExitOnFail 50 5 TestAppImportExport
test-sim-multi-seed-short: runsim
@echo "Running short multi-seed application simulation. This may take awhile!"
@$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(SIMAPP) -ExitOnFail 50 5 TestFullAppSimulation
test-sim-deterministic: runsim
@echo "Running application deterministic simulation. This may take awhile!"
@$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(SIMAPP) -ExitOnFail 1 1 TestAppStateDeterminism
test-system: install
$(MAKE) -C tests/system/ test
###############################################################################
### Linting ###
###############################################################################
format-tools:
go install mvdan.cc/gofumpt@v0.4.0
go install github.com/client9/misspell/cmd/misspell@v0.3.4
go install github.com/daixiang0/gci@v0.11.2
lint: format-tools
golangci-lint run --tests=false
find . -name '*.go' -type f -not -path "./vendor*" -not -path "./tests/system/vendor*" -not -path "*.git*" -not -path "*_test.go" | xargs gofumpt -d
format: format-tools
find . -name '*.go' -type f -not -path "./vendor*" -not -path "./tests/system/vendor*" -not -path "*.git*" -not -path "./client/lcd/statik/statik.go" | xargs gofumpt -w
find . -name '*.go' -type f -not -path "./vendor*" -not -path "./tests/system/vendor*" -not -path "*.git*" -not -path "./client/lcd/statik/statik.go" | xargs misspell -w
find . -name '*.go' -type f -not -path "./vendor*" -not -path "./tests/system/vendor*" -not -path "*.git*" -not -path "./client/lcd/statik/statik.go" | xargs gci write --skip-generated -s standard -s default -s "prefix(cosmossdk.io)" -s "prefix(github.com/cosmos/cosmos-sdk)" -s "prefix(github.com/CosmWasm/wasmd)" --custom-order
mod-tidy:
go mod tidy
cd interchaintest && go mod tidy
.PHONY: format-tools lint format mod-tidy
###############################################################################
### Protobuf ###
###############################################################################
protoVer=0.13.2
protoImageName=ghcr.io/cosmos/proto-builder:$(protoVer)
protoImage=$(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace $(protoImageName)
proto-all: proto-format proto-lint proto-gen format
proto-gen:
@echo "Generating Protobuf files"
@go install cosmossdk.io/orm/cmd/protoc-gen-go-cosmos-orm@latest
@$(protoImage) sh ./scripts/protocgen.sh
# generate the stubs for the proto files from the proto directory
spawn stub-gen
proto-format:
@echo "Formatting Protobuf files"
@$(protoImage) find ./ -name "*.proto" -exec clang-format -i {} \;
proto-swagger-gen:
@./scripts/protoc-swagger-gen.sh
proto-lint:
@$(protoImage) buf lint --error-format=json
proto-check-breaking:
@$(protoImage) buf breaking --against $(HTTPS_GIT)#branch=main
.PHONY: all install install-debug \
go-mod-cache draw-deps clean build format \
test test-all test-build test-cover test-unit test-race \
test-sim-import-export build-windows-client \
test-system
## --- Testnet Utilities ---
get-localic:
@echo "Installing local-interchain"
git clone --branch v8.2.0 https://github.com/strangelove-ventures/interchaintest.git interchaintest-downloader
cd interchaintest-downloader/local-interchain && make install
@echo ✅ local-interchain installed $(shell which local-ic)
is-localic-installed:
ifeq (,$(shell which local-ic))
make get-localic
endif
get-heighliner:
git clone https://github.com/strangelove-ventures/heighliner.git
cd heighliner && go install
local-image:
ifeq (,$(shell which heighliner))
echo 'heighliner' binary not found. Consider running `make get-heighliner`
else
heighliner build -c core --local -f chains.yaml
endif
.PHONY: get-heighliner local-image is-localic-installed
###############################################################################
### e2e ###
###############################################################################
ictest-basic:
@echo "Running basic interchain tests"
@cd interchaintest && go test -race -v -run TestBasicChain .
ictest-ibc:
@echo "Running IBC interchain tests"
@cd interchaintest && go test -race -v -run TestIBC .
ictest-wasm:
@echo "Running cosmwasm interchain tests"
@cd interchaintest && go test -race -v -run TestCosmWasmIntegration .
ictest-packetforward:
@echo "Running packet forward middleware interchain tests"
@cd interchaintest && go test -race -v -run TestPacketForwardMiddleware .
ictest-poa:
@echo "Running proof of authority interchain tests"
@cd interchaintest && go test -race -v -run TestPOA .
ictest-tokenfactory:
@echo "Running token factory interchain tests"
@cd interchaintest && go test -race -v -run TestTokenFactory .
###############################################################################
### testnet ###
###############################################################################
setup-testnet: mod-tidy is-localic-installed install local-image set-testnet-configs setup-testnet-keys
# Run this before testnet keys are added
# chainid-1 is used in the testnet.json
set-testnet-configs:
sonrd config set client chain-id chainid-1
sonrd config set client keyring-backend test
sonrd config set client output text
# import keys from testnet.json into test keyring
setup-testnet-keys:
-`echo "decorate bright ozone fork gallery riot bus exhaust worth way bone indoor calm squirrel merry zero scheme cotton until shop any excess stage laundry" | sonrd keys add acc0 --recover`
-`echo "wealth flavor believe regret funny network recall kiss grape useless pepper cram hint member few certain unveil rather brick bargain curious require crowd raise" | sonrd keys add acc1 --recover`
# default testnet is with IBC
testnet: setup-testnet
spawn local-ic start ibc-testnet
testnet-basic: setup-testnet
spawn local-ic start testnet
sh-testnet: mod-tidy
CHAIN_ID="local-1" BLOCK_TIME="1000ms" CLEAN=true sh scripts/test_node.sh
.PHONY: setup-testnet set-testnet-configs testnet testnet-basic sh-testnet

41
README.md Normal file
View File

@ -0,0 +1,41 @@
<div align="center" style="text-align: center;">
<img src="https://pub-97e96d678cb448969765e4c1542e675a.r2.dev/github-sonr.png" width="256" height="256" />
# `sonr` - Sonr Chain
[![Go Reference](https://pkg.go.dev/badge/github.com/didao-org/sonr.svg)](https://pkg.go.dev/github.com/didao-org/sonr)
![GitHub commit activity](https://img.shields.io/github/commit-activity/w/didao-org/sonr)
![GitHub Release Date - Published_At](https://img.shields.io/github/release-date/didao-org/sonr)
[![Static Badge](https://img.shields.io/badge/homepage-sonr.io-blue?style=flat-square)](https://sonr.io)
![Discord](https://img.shields.io/discord/843061375160156170?logo=discord&label=Discord%20Chat)
[![Go Report Card](https://goreportcard.com/badge/github.com/didao-org/sonr)](https://goreportcard.com/report/github.com/didao-org/sonr)
[![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=sonrhq_sonr&metric=security_rating)](https://sonarcloud.io/summary/new_code?id=sonr-io_sonr)
[![Vulnerabilities](https://sonarcloud.io/api/project_badges/measure?project=sonrhq_sonr&metric=vulnerabilities)](https://sonarcloud.io/summary/new_code?id=sonr-io_sonr)
[![Mutable.ai Auto Wiki](https://img.shields.io/badge/Auto_Wiki-Mutable.ai-blue)](https://wiki.mutable.ai/di-dao/sonr)
</div>
<br />
Sonr is a combination of decentralized primitives. Fundamentally, it is a peer-to-peer identity and asset management system that leverages DID documents, Webauthn, and IPFS—providing users with a secure, portable decentralized identity.
<br />
## Acknowledgements
Sonr would not have been possible without the direct and indirect support of the following organizations and individuals:
- **Protocol Labs**: For IPFS & Libp2p.
- **Interchain Foundation**: For Cosmos & IBC.
- **Tim Berners-Lee**: For the Internet.
- **Satoshi Nakamoto**: For Bitcoin.
- **Steve Jobs**: For Taste.
<br />
## Community & Support
- [Forum](https://github.com/onsonr/hway/discussions)
- [Issues](https://github.com/onsonr/hway/issues)
- [Twitter](https://sonr.io/twitter)
- [Dev Chat](https://sonr.io/discord)

26
Taskfile.yml Normal file
View File

@ -0,0 +1,26 @@
version: "3"
tasks:
dev:
cmd: air
install:
cmd: make install
testnet:
cmd: make sh-testnet
proto:
cmd: devbox -q run proto
templ:
cmd: templ generate
build:vltd:
cmd: go build -o ./bin/vltd ./cmd/vltd
build:sonrd:
cmd: go build -o ./bin/sonrd ./cmd/sonrd
build:dwn:
cmd: tinygo build -o ./bin/dwn.wasm -target wasi ./internal/dwn/db.go

View File

@ -0,0 +1,502 @@
// Code generated by protoc-gen-go-pulsar. DO NOT EDIT.
package modulev1
import (
_ "cosmossdk.io/api/cosmos/app/v1alpha1"
fmt "fmt"
runtime "github.com/cosmos/cosmos-proto/runtime"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoiface "google.golang.org/protobuf/runtime/protoiface"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
io "io"
reflect "reflect"
sync "sync"
)
var (
md_Module protoreflect.MessageDescriptor
)
func init() {
file_did_module_v1_module_proto_init()
md_Module = File_did_module_v1_module_proto.Messages().ByName("Module")
}
var _ protoreflect.Message = (*fastReflection_Module)(nil)
type fastReflection_Module Module
func (x *Module) ProtoReflect() protoreflect.Message {
return (*fastReflection_Module)(x)
}
func (x *Module) slowProtoReflect() protoreflect.Message {
mi := &file_did_module_v1_module_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
var _fastReflection_Module_messageType fastReflection_Module_messageType
var _ protoreflect.MessageType = fastReflection_Module_messageType{}
type fastReflection_Module_messageType struct{}
func (x fastReflection_Module_messageType) Zero() protoreflect.Message {
return (*fastReflection_Module)(nil)
}
func (x fastReflection_Module_messageType) New() protoreflect.Message {
return new(fastReflection_Module)
}
func (x fastReflection_Module_messageType) Descriptor() protoreflect.MessageDescriptor {
return md_Module
}
// Descriptor returns message descriptor, which contains only the protobuf
// type information for the message.
func (x *fastReflection_Module) Descriptor() protoreflect.MessageDescriptor {
return md_Module
}
// Type returns the message type, which encapsulates both Go and protobuf
// type information. If the Go type information is not needed,
// it is recommended that the message descriptor be used instead.
func (x *fastReflection_Module) Type() protoreflect.MessageType {
return _fastReflection_Module_messageType
}
// New returns a newly allocated and mutable empty message.
func (x *fastReflection_Module) New() protoreflect.Message {
return new(fastReflection_Module)
}
// Interface unwraps the message reflection interface and
// returns the underlying ProtoMessage interface.
func (x *fastReflection_Module) Interface() protoreflect.ProtoMessage {
return (*Module)(x)
}
// Range iterates over every populated field in an undefined order,
// calling f for each field descriptor and value encountered.
// Range returns immediately if f returns false.
// While iterating, mutating operations may only be performed
// on the current field descriptor.
func (x *fastReflection_Module) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) {
}
// Has reports whether a field is populated.
//
// Some fields have the property of nullability where it is possible to
// distinguish between the default value of a field and whether the field
// was explicitly populated with the default value. Singular message fields,
// member fields of a oneof, and proto2 scalar fields are nullable. Such
// fields are populated only if explicitly set.
//
// In other cases (aside from the nullable cases above),
// a proto3 scalar field is populated if it contains a non-zero value, and
// a repeated field is populated if it is non-empty.
func (x *fastReflection_Module) Has(fd protoreflect.FieldDescriptor) bool {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.did.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.did.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// Clear clears the field such that a subsequent Has call reports false.
//
// Clearing an extension field clears both the extension type and value
// associated with the given field number.
//
// Clear is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_Module) Clear(fd protoreflect.FieldDescriptor) {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.did.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.did.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// Get retrieves the value for a field.
//
// For unpopulated scalars, it returns the default value, where
// the default value of a bytes scalar is guaranteed to be a copy.
// For unpopulated composite types, it returns an empty, read-only view
// of the value; to obtain a mutable reference, use Mutable.
func (x *fastReflection_Module) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {
switch descriptor.FullName() {
default:
if descriptor.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.did.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.did.module.v1.Module does not contain field %s", descriptor.FullName()))
}
}
// Set stores the value for a field.
//
// For a field belonging to a oneof, it implicitly clears any other field
// that may be currently set within the same oneof.
// For extension fields, it implicitly stores the provided ExtensionType.
// When setting a composite type, it is unspecified whether the stored value
// aliases the source's memory in any way. If the composite value is an
// empty, read-only value, then it panics.
//
// Set is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_Module) Set(fd protoreflect.FieldDescriptor, value protoreflect.Value) {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.did.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.did.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// Mutable returns a mutable reference to a composite type.
//
// If the field is unpopulated, it may allocate a composite value.
// For a field belonging to a oneof, it implicitly clears any other field
// that may be currently set within the same oneof.
// For extension fields, it implicitly stores the provided ExtensionType
// if not already stored.
// It panics if the field does not contain a composite type.
//
// Mutable is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_Module) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.did.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.did.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// NewField returns a new value that is assignable to the field
// for the given descriptor. For scalars, this returns the default value.
// For lists, maps, and messages, this returns a new, empty, mutable value.
func (x *fastReflection_Module) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.did.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.did.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// WhichOneof reports which field within the oneof is populated,
// returning nil if none are populated.
// It panics if the oneof descriptor does not belong to this message.
func (x *fastReflection_Module) WhichOneof(d protoreflect.OneofDescriptor) protoreflect.FieldDescriptor {
switch d.FullName() {
default:
panic(fmt.Errorf("%s is not a oneof field in didao.sonr.did.module.v1.Module", d.FullName()))
}
panic("unreachable")
}
// GetUnknown retrieves the entire list of unknown fields.
// The caller may only mutate the contents of the RawFields
// if the mutated bytes are stored back into the message with SetUnknown.
func (x *fastReflection_Module) GetUnknown() protoreflect.RawFields {
return x.unknownFields
}
// SetUnknown stores an entire list of unknown fields.
// The raw fields must be syntactically valid according to the wire format.
// An implementation may panic if this is not the case.
// Once stored, the caller must not mutate the content of the RawFields.
// An empty RawFields may be passed to clear the fields.
//
// SetUnknown is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_Module) SetUnknown(fields protoreflect.RawFields) {
x.unknownFields = fields
}
// IsValid reports whether the message is valid.
//
// An invalid message is an empty, read-only value.
//
// An invalid message often corresponds to a nil pointer of the concrete
// message type, but the details are implementation dependent.
// Validity is not part of the protobuf data model, and may not
// be preserved in marshaling or other operations.
func (x *fastReflection_Module) IsValid() bool {
return x != nil
}
// ProtoMethods returns optional fastReflectionFeature-path implementations of various operations.
// This method may return nil.
//
// The returned methods type is identical to
// "google.golang.org/protobuf/runtime/protoiface".Methods.
// Consult the protoiface package documentation for details.
func (x *fastReflection_Module) ProtoMethods() *protoiface.Methods {
size := func(input protoiface.SizeInput) protoiface.SizeOutput {
x := input.Message.Interface().(*Module)
if x == nil {
return protoiface.SizeOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Size: 0,
}
}
options := runtime.SizeInputToOptions(input)
_ = options
var n int
var l int
_ = l
if x.unknownFields != nil {
n += len(x.unknownFields)
}
return protoiface.SizeOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Size: n,
}
}
marshal := func(input protoiface.MarshalInput) (protoiface.MarshalOutput, error) {
x := input.Message.Interface().(*Module)
if x == nil {
return protoiface.MarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Buf: input.Buf,
}, nil
}
options := runtime.MarshalInputToOptions(input)
_ = options
size := options.Size(x)
dAtA := make([]byte, size)
i := len(dAtA)
_ = i
var l int
_ = l
if x.unknownFields != nil {
i -= len(x.unknownFields)
copy(dAtA[i:], x.unknownFields)
}
if input.Buf != nil {
input.Buf = append(input.Buf, dAtA...)
} else {
input.Buf = dAtA
}
return protoiface.MarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Buf: input.Buf,
}, nil
}
unmarshal := func(input protoiface.UnmarshalInput) (protoiface.UnmarshalOutput, error) {
x := input.Message.Interface().(*Module)
if x == nil {
return protoiface.UnmarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Flags: input.Flags,
}, nil
}
options := runtime.UnmarshalInputToOptions(input)
_ = options
dAtA := input.Buf
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, runtime.ErrIntOverflow
}
if iNdEx >= l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, fmt.Errorf("proto: Module: wiretype end group for non-group")
}
if fieldNum <= 0 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, fmt.Errorf("proto: Module: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
default:
iNdEx = preIndex
skippy, err := runtime.Skip(dAtA[iNdEx:])
if err != nil {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, runtime.ErrInvalidLength
}
if (iNdEx + skippy) > l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
if !options.DiscardUnknown {
x.unknownFields = append(x.unknownFields, dAtA[iNdEx:iNdEx+skippy]...)
}
iNdEx += skippy
}
}
if iNdEx > l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, nil
}
return &protoiface.Methods{
NoUnkeyedLiterals: struct{}{},
Flags: protoiface.SupportMarshalDeterministic | protoiface.SupportUnmarshalDiscardUnknown,
Size: size,
Marshal: marshal,
Unmarshal: unmarshal,
Merge: nil,
CheckInitialized: nil,
}
}
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.0
// protoc (unknown)
// source: did/module/v1/module.proto
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Module is the app config object of the module.
// Learn more: https://docs.cosmos.network/main/building-modules/depinject
type Module struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *Module) Reset() {
*x = Module{}
if protoimpl.UnsafeEnabled {
mi := &file_did_module_v1_module_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Module) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Module) ProtoMessage() {}
// Deprecated: Use Module.ProtoReflect.Descriptor instead.
func (*Module) Descriptor() ([]byte, []int) {
return file_did_module_v1_module_proto_rawDescGZIP(), []int{0}
}
var File_did_module_v1_module_proto protoreflect.FileDescriptor
var file_did_module_v1_module_proto_rawDesc = []byte{
0x0a, 0x1a, 0x64, 0x69, 0x64, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0x2f,
0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x18, 0x64, 0x69,
0x64, 0x61, 0x6f, 0x2e, 0x73, 0x6f, 0x6e, 0x72, 0x2e, 0x64, 0x69, 0x64, 0x2e, 0x6d, 0x6f, 0x64,
0x75, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x1a, 0x20, 0x63, 0x6f, 0x73, 0x6d, 0x6f, 0x73, 0x2f, 0x61,
0x70, 0x70, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x6d, 0x6f, 0x64, 0x75,
0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x28, 0x0a, 0x06, 0x4d, 0x6f, 0x64, 0x75,
0x6c, 0x65, 0x3a, 0x1e, 0xba, 0xc0, 0x96, 0xda, 0x01, 0x18, 0x0a, 0x16, 0x67, 0x69, 0x74, 0x68,
0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x69, 0x2d, 0x64, 0x61, 0x6f, 0x2f, 0x73, 0x6f,
0x6e, 0x72, 0x42, 0xe3, 0x01, 0x0a, 0x1c, 0x63, 0x6f, 0x6d, 0x2e, 0x64, 0x69, 0x64, 0x61, 0x6f,
0x2e, 0x73, 0x6f, 0x6e, 0x72, 0x2e, 0x64, 0x69, 0x64, 0x2e, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65,
0x2e, 0x76, 0x31, 0x42, 0x0b, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f,
0x50, 0x01, 0x5a, 0x31, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64,
0x69, 0x2d, 0x64, 0x61, 0x6f, 0x2f, 0x73, 0x6f, 0x6e, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x64,
0x69, 0x64, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0x3b, 0x6d, 0x6f, 0x64,
0x75, 0x6c, 0x65, 0x76, 0x31, 0xa2, 0x02, 0x04, 0x44, 0x53, 0x44, 0x4d, 0xaa, 0x02, 0x18, 0x44,
0x69, 0x64, 0x61, 0x6f, 0x2e, 0x53, 0x6f, 0x6e, 0x72, 0x2e, 0x44, 0x69, 0x64, 0x2e, 0x4d, 0x6f,
0x64, 0x75, 0x6c, 0x65, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x18, 0x44, 0x69, 0x64, 0x61, 0x6f, 0x5c,
0x53, 0x6f, 0x6e, 0x72, 0x5c, 0x44, 0x69, 0x64, 0x5c, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5c,
0x56, 0x31, 0xe2, 0x02, 0x24, 0x44, 0x69, 0x64, 0x61, 0x6f, 0x5c, 0x53, 0x6f, 0x6e, 0x72, 0x5c,
0x44, 0x69, 0x64, 0x5c, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50,
0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1c, 0x44, 0x69, 0x64, 0x61,
0x6f, 0x3a, 0x3a, 0x53, 0x6f, 0x6e, 0x72, 0x3a, 0x3a, 0x44, 0x69, 0x64, 0x3a, 0x3a, 0x4d, 0x6f,
0x64, 0x75, 0x6c, 0x65, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_did_module_v1_module_proto_rawDescOnce sync.Once
file_did_module_v1_module_proto_rawDescData = file_did_module_v1_module_proto_rawDesc
)
func file_did_module_v1_module_proto_rawDescGZIP() []byte {
file_did_module_v1_module_proto_rawDescOnce.Do(func() {
file_did_module_v1_module_proto_rawDescData = protoimpl.X.CompressGZIP(file_did_module_v1_module_proto_rawDescData)
})
return file_did_module_v1_module_proto_rawDescData
}
var file_did_module_v1_module_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_did_module_v1_module_proto_goTypes = []interface{}{
(*Module)(nil), // 0: didao.sonr.did.module.v1.Module
}
var file_did_module_v1_module_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_did_module_v1_module_proto_init() }
func file_did_module_v1_module_proto_init() {
if File_did_module_v1_module_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_did_module_v1_module_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Module); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_did_module_v1_module_proto_rawDesc,
NumEnums: 0,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_did_module_v1_module_proto_goTypes,
DependencyIndexes: file_did_module_v1_module_proto_depIdxs,
MessageInfos: file_did_module_v1_module_proto_msgTypes,
}.Build()
File_did_module_v1_module_proto = out.File
file_did_module_v1_module_proto_rawDesc = nil
file_did_module_v1_module_proto_goTypes = nil
file_did_module_v1_module_proto_depIdxs = nil
}

2559
api/did/v1/account.pulsar.go Normal file

File diff suppressed because it is too large Load Diff

1991
api/did/v1/genesis.pulsar.go Normal file

File diff suppressed because it is too large Load Diff

5021
api/did/v1/query.pulsar.go Normal file

File diff suppressed because it is too large Load Diff

267
api/did/v1/query_grpc.pb.go Normal file
View File

@ -0,0 +1,267 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.3.0
// - protoc (unknown)
// source: did/v1/query.proto
package didv1
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
const (
Query_Params_FullMethodName = "/did.v1.Query/Params"
Query_PropertyExists_FullMethodName = "/did.v1.Query/PropertyExists"
Query_ResolveIdentifier_FullMethodName = "/did.v1.Query/ResolveIdentifier"
Query_LoginOptions_FullMethodName = "/did.v1.Query/LoginOptions"
Query_RegisterOptions_FullMethodName = "/did.v1.Query/RegisterOptions"
)
// QueryClient is the client API for Query service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type QueryClient interface {
// Params queries all parameters of the module.
Params(ctx context.Context, in *QueryParamsRequest, opts ...grpc.CallOption) (*QueryParamsResponse, error)
// Exists queries if an id exists.
PropertyExists(ctx context.Context, in *QueryExistsRequest, opts ...grpc.CallOption) (*QueryExistsResponse, error)
// Resolve queries the DID document by its id.
ResolveIdentifier(ctx context.Context, in *QueryResolveRequest, opts ...grpc.CallOption) (*QueryResolveResponse, error)
// LoginOptions queries the PublicKeyCredentialAttestationOptions for starting a login flow.
LoginOptions(ctx context.Context, in *QueryLoginOptionsRequest, opts ...grpc.CallOption) (*QueryLoginOptionsResponse, error)
// RegisterOptions queries the PublicKeyCredentialCreationOptions for starting a register flow.
RegisterOptions(ctx context.Context, in *QueryRegisterOptionsRequest, opts ...grpc.CallOption) (*QueryRegisterOptionsResponse, error)
}
type queryClient struct {
cc grpc.ClientConnInterface
}
func NewQueryClient(cc grpc.ClientConnInterface) QueryClient {
return &queryClient{cc}
}
func (c *queryClient) Params(ctx context.Context, in *QueryParamsRequest, opts ...grpc.CallOption) (*QueryParamsResponse, error) {
out := new(QueryParamsResponse)
err := c.cc.Invoke(ctx, Query_Params_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *queryClient) PropertyExists(ctx context.Context, in *QueryExistsRequest, opts ...grpc.CallOption) (*QueryExistsResponse, error) {
out := new(QueryExistsResponse)
err := c.cc.Invoke(ctx, Query_PropertyExists_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *queryClient) ResolveIdentifier(ctx context.Context, in *QueryResolveRequest, opts ...grpc.CallOption) (*QueryResolveResponse, error) {
out := new(QueryResolveResponse)
err := c.cc.Invoke(ctx, Query_ResolveIdentifier_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *queryClient) LoginOptions(ctx context.Context, in *QueryLoginOptionsRequest, opts ...grpc.CallOption) (*QueryLoginOptionsResponse, error) {
out := new(QueryLoginOptionsResponse)
err := c.cc.Invoke(ctx, Query_LoginOptions_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *queryClient) RegisterOptions(ctx context.Context, in *QueryRegisterOptionsRequest, opts ...grpc.CallOption) (*QueryRegisterOptionsResponse, error) {
out := new(QueryRegisterOptionsResponse)
err := c.cc.Invoke(ctx, Query_RegisterOptions_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// QueryServer is the server API for Query service.
// All implementations must embed UnimplementedQueryServer
// for forward compatibility
type QueryServer interface {
// Params queries all parameters of the module.
Params(context.Context, *QueryParamsRequest) (*QueryParamsResponse, error)
// Exists queries if an id exists.
PropertyExists(context.Context, *QueryExistsRequest) (*QueryExistsResponse, error)
// Resolve queries the DID document by its id.
ResolveIdentifier(context.Context, *QueryResolveRequest) (*QueryResolveResponse, error)
// LoginOptions queries the PublicKeyCredentialAttestationOptions for starting a login flow.
LoginOptions(context.Context, *QueryLoginOptionsRequest) (*QueryLoginOptionsResponse, error)
// RegisterOptions queries the PublicKeyCredentialCreationOptions for starting a register flow.
RegisterOptions(context.Context, *QueryRegisterOptionsRequest) (*QueryRegisterOptionsResponse, error)
mustEmbedUnimplementedQueryServer()
}
// UnimplementedQueryServer must be embedded to have forward compatible implementations.
type UnimplementedQueryServer struct {
}
func (UnimplementedQueryServer) Params(context.Context, *QueryParamsRequest) (*QueryParamsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Params not implemented")
}
func (UnimplementedQueryServer) PropertyExists(context.Context, *QueryExistsRequest) (*QueryExistsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method PropertyExists not implemented")
}
func (UnimplementedQueryServer) ResolveIdentifier(context.Context, *QueryResolveRequest) (*QueryResolveResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ResolveIdentifier not implemented")
}
func (UnimplementedQueryServer) LoginOptions(context.Context, *QueryLoginOptionsRequest) (*QueryLoginOptionsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method LoginOptions not implemented")
}
func (UnimplementedQueryServer) RegisterOptions(context.Context, *QueryRegisterOptionsRequest) (*QueryRegisterOptionsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method RegisterOptions not implemented")
}
func (UnimplementedQueryServer) mustEmbedUnimplementedQueryServer() {}
// UnsafeQueryServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to QueryServer will
// result in compilation errors.
type UnsafeQueryServer interface {
mustEmbedUnimplementedQueryServer()
}
func RegisterQueryServer(s grpc.ServiceRegistrar, srv QueryServer) {
s.RegisterService(&Query_ServiceDesc, srv)
}
func _Query_Params_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(QueryParamsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(QueryServer).Params(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Query_Params_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(QueryServer).Params(ctx, req.(*QueryParamsRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Query_PropertyExists_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(QueryExistsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(QueryServer).PropertyExists(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Query_PropertyExists_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(QueryServer).PropertyExists(ctx, req.(*QueryExistsRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Query_ResolveIdentifier_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(QueryResolveRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(QueryServer).ResolveIdentifier(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Query_ResolveIdentifier_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(QueryServer).ResolveIdentifier(ctx, req.(*QueryResolveRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Query_LoginOptions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(QueryLoginOptionsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(QueryServer).LoginOptions(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Query_LoginOptions_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(QueryServer).LoginOptions(ctx, req.(*QueryLoginOptionsRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Query_RegisterOptions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(QueryRegisterOptionsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(QueryServer).RegisterOptions(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Query_RegisterOptions_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(QueryServer).RegisterOptions(ctx, req.(*QueryRegisterOptionsRequest))
}
return interceptor(ctx, in, info, handler)
}
// Query_ServiceDesc is the grpc.ServiceDesc for Query service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Query_ServiceDesc = grpc.ServiceDesc{
ServiceName: "did.v1.Query",
HandlerType: (*QueryServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Params",
Handler: _Query_Params_Handler,
},
{
MethodName: "PropertyExists",
Handler: _Query_PropertyExists_Handler,
},
{
MethodName: "ResolveIdentifier",
Handler: _Query_ResolveIdentifier_Handler,
},
{
MethodName: "LoginOptions",
Handler: _Query_LoginOptions_Handler,
},
{
MethodName: "RegisterOptions",
Handler: _Query_RegisterOptions_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "did/v1/query.proto",
}

View File

@ -0,0 +1,531 @@
// Code generated by protoc-gen-go-cosmos-orm. DO NOT EDIT.
package didv1
import (
context "context"
ormlist "cosmossdk.io/orm/model/ormlist"
ormtable "cosmossdk.io/orm/model/ormtable"
ormerrors "cosmossdk.io/orm/types/ormerrors"
)
type AttestationTable interface {
Insert(ctx context.Context, attestation *Attestation) error
Update(ctx context.Context, attestation *Attestation) error
Save(ctx context.Context, attestation *Attestation) error
Delete(ctx context.Context, attestation *Attestation) error
Has(ctx context.Context, id string) (found bool, err error)
// Get returns nil and an error which responds true to ormerrors.IsNotFound() if the record was not found.
Get(ctx context.Context, id string) (*Attestation, error)
List(ctx context.Context, prefixKey AttestationIndexKey, opts ...ormlist.Option) (AttestationIterator, error)
ListRange(ctx context.Context, from, to AttestationIndexKey, opts ...ormlist.Option) (AttestationIterator, error)
DeleteBy(ctx context.Context, prefixKey AttestationIndexKey) error
DeleteRange(ctx context.Context, from, to AttestationIndexKey) error
doNotImplement()
}
type AttestationIterator struct {
ormtable.Iterator
}
func (i AttestationIterator) Value() (*Attestation, error) {
var attestation Attestation
err := i.UnmarshalMessage(&attestation)
return &attestation, err
}
type AttestationIndexKey interface {
id() uint32
values() []interface{}
attestationIndexKey()
}
// primary key starting index..
type AttestationPrimaryKey = AttestationIdIndexKey
type AttestationIdIndexKey struct {
vs []interface{}
}
func (x AttestationIdIndexKey) id() uint32 { return 0 }
func (x AttestationIdIndexKey) values() []interface{} { return x.vs }
func (x AttestationIdIndexKey) attestationIndexKey() {}
func (this AttestationIdIndexKey) WithId(id string) AttestationIdIndexKey {
this.vs = []interface{}{id}
return this
}
type attestationTable struct {
table ormtable.Table
}
func (this attestationTable) Insert(ctx context.Context, attestation *Attestation) error {
return this.table.Insert(ctx, attestation)
}
func (this attestationTable) Update(ctx context.Context, attestation *Attestation) error {
return this.table.Update(ctx, attestation)
}
func (this attestationTable) Save(ctx context.Context, attestation *Attestation) error {
return this.table.Save(ctx, attestation)
}
func (this attestationTable) Delete(ctx context.Context, attestation *Attestation) error {
return this.table.Delete(ctx, attestation)
}
func (this attestationTable) Has(ctx context.Context, id string) (found bool, err error) {
return this.table.PrimaryKey().Has(ctx, id)
}
func (this attestationTable) Get(ctx context.Context, id string) (*Attestation, error) {
var attestation Attestation
found, err := this.table.PrimaryKey().Get(ctx, &attestation, id)
if err != nil {
return nil, err
}
if !found {
return nil, ormerrors.NotFound
}
return &attestation, nil
}
func (this attestationTable) List(ctx context.Context, prefixKey AttestationIndexKey, opts ...ormlist.Option) (AttestationIterator, error) {
it, err := this.table.GetIndexByID(prefixKey.id()).List(ctx, prefixKey.values(), opts...)
return AttestationIterator{it}, err
}
func (this attestationTable) ListRange(ctx context.Context, from, to AttestationIndexKey, opts ...ormlist.Option) (AttestationIterator, error) {
it, err := this.table.GetIndexByID(from.id()).ListRange(ctx, from.values(), to.values(), opts...)
return AttestationIterator{it}, err
}
func (this attestationTable) DeleteBy(ctx context.Context, prefixKey AttestationIndexKey) error {
return this.table.GetIndexByID(prefixKey.id()).DeleteBy(ctx, prefixKey.values()...)
}
func (this attestationTable) DeleteRange(ctx context.Context, from, to AttestationIndexKey) error {
return this.table.GetIndexByID(from.id()).DeleteRange(ctx, from.values(), to.values())
}
func (this attestationTable) doNotImplement() {}
var _ AttestationTable = attestationTable{}
func NewAttestationTable(db ormtable.Schema) (AttestationTable, error) {
table := db.GetTable(&Attestation{})
if table == nil {
return nil, ormerrors.TableNotFound.Wrap(string((&Attestation{}).ProtoReflect().Descriptor().FullName()))
}
return attestationTable{table}, nil
}
type ControllerTable interface {
Insert(ctx context.Context, controller *Controller) error
Update(ctx context.Context, controller *Controller) error
Save(ctx context.Context, controller *Controller) error
Delete(ctx context.Context, controller *Controller) error
Has(ctx context.Context, id string) (found bool, err error)
// Get returns nil and an error which responds true to ormerrors.IsNotFound() if the record was not found.
Get(ctx context.Context, id string) (*Controller, error)
List(ctx context.Context, prefixKey ControllerIndexKey, opts ...ormlist.Option) (ControllerIterator, error)
ListRange(ctx context.Context, from, to ControllerIndexKey, opts ...ormlist.Option) (ControllerIterator, error)
DeleteBy(ctx context.Context, prefixKey ControllerIndexKey) error
DeleteRange(ctx context.Context, from, to ControllerIndexKey) error
doNotImplement()
}
type ControllerIterator struct {
ormtable.Iterator
}
func (i ControllerIterator) Value() (*Controller, error) {
var controller Controller
err := i.UnmarshalMessage(&controller)
return &controller, err
}
type ControllerIndexKey interface {
id() uint32
values() []interface{}
controllerIndexKey()
}
// primary key starting index..
type ControllerPrimaryKey = ControllerIdIndexKey
type ControllerIdIndexKey struct {
vs []interface{}
}
func (x ControllerIdIndexKey) id() uint32 { return 0 }
func (x ControllerIdIndexKey) values() []interface{} { return x.vs }
func (x ControllerIdIndexKey) controllerIndexKey() {}
func (this ControllerIdIndexKey) WithId(id string) ControllerIdIndexKey {
this.vs = []interface{}{id}
return this
}
type controllerTable struct {
table ormtable.Table
}
func (this controllerTable) Insert(ctx context.Context, controller *Controller) error {
return this.table.Insert(ctx, controller)
}
func (this controllerTable) Update(ctx context.Context, controller *Controller) error {
return this.table.Update(ctx, controller)
}
func (this controllerTable) Save(ctx context.Context, controller *Controller) error {
return this.table.Save(ctx, controller)
}
func (this controllerTable) Delete(ctx context.Context, controller *Controller) error {
return this.table.Delete(ctx, controller)
}
func (this controllerTable) Has(ctx context.Context, id string) (found bool, err error) {
return this.table.PrimaryKey().Has(ctx, id)
}
func (this controllerTable) Get(ctx context.Context, id string) (*Controller, error) {
var controller Controller
found, err := this.table.PrimaryKey().Get(ctx, &controller, id)
if err != nil {
return nil, err
}
if !found {
return nil, ormerrors.NotFound
}
return &controller, nil
}
func (this controllerTable) List(ctx context.Context, prefixKey ControllerIndexKey, opts ...ormlist.Option) (ControllerIterator, error) {
it, err := this.table.GetIndexByID(prefixKey.id()).List(ctx, prefixKey.values(), opts...)
return ControllerIterator{it}, err
}
func (this controllerTable) ListRange(ctx context.Context, from, to ControllerIndexKey, opts ...ormlist.Option) (ControllerIterator, error) {
it, err := this.table.GetIndexByID(from.id()).ListRange(ctx, from.values(), to.values(), opts...)
return ControllerIterator{it}, err
}
func (this controllerTable) DeleteBy(ctx context.Context, prefixKey ControllerIndexKey) error {
return this.table.GetIndexByID(prefixKey.id()).DeleteBy(ctx, prefixKey.values()...)
}
func (this controllerTable) DeleteRange(ctx context.Context, from, to ControllerIndexKey) error {
return this.table.GetIndexByID(from.id()).DeleteRange(ctx, from.values(), to.values())
}
func (this controllerTable) doNotImplement() {}
var _ ControllerTable = controllerTable{}
func NewControllerTable(db ormtable.Schema) (ControllerTable, error) {
table := db.GetTable(&Controller{})
if table == nil {
return nil, ormerrors.TableNotFound.Wrap(string((&Controller{}).ProtoReflect().Descriptor().FullName()))
}
return controllerTable{table}, nil
}
type DelegationTable interface {
Insert(ctx context.Context, delegation *Delegation) error
Update(ctx context.Context, delegation *Delegation) error
Save(ctx context.Context, delegation *Delegation) error
Delete(ctx context.Context, delegation *Delegation) error
Has(ctx context.Context, id string) (found bool, err error)
// Get returns nil and an error which responds true to ormerrors.IsNotFound() if the record was not found.
Get(ctx context.Context, id string) (*Delegation, error)
List(ctx context.Context, prefixKey DelegationIndexKey, opts ...ormlist.Option) (DelegationIterator, error)
ListRange(ctx context.Context, from, to DelegationIndexKey, opts ...ormlist.Option) (DelegationIterator, error)
DeleteBy(ctx context.Context, prefixKey DelegationIndexKey) error
DeleteRange(ctx context.Context, from, to DelegationIndexKey) error
doNotImplement()
}
type DelegationIterator struct {
ormtable.Iterator
}
func (i DelegationIterator) Value() (*Delegation, error) {
var delegation Delegation
err := i.UnmarshalMessage(&delegation)
return &delegation, err
}
type DelegationIndexKey interface {
id() uint32
values() []interface{}
delegationIndexKey()
}
// primary key starting index..
type DelegationPrimaryKey = DelegationIdIndexKey
type DelegationIdIndexKey struct {
vs []interface{}
}
func (x DelegationIdIndexKey) id() uint32 { return 0 }
func (x DelegationIdIndexKey) values() []interface{} { return x.vs }
func (x DelegationIdIndexKey) delegationIndexKey() {}
func (this DelegationIdIndexKey) WithId(id string) DelegationIdIndexKey {
this.vs = []interface{}{id}
return this
}
type delegationTable struct {
table ormtable.Table
}
func (this delegationTable) Insert(ctx context.Context, delegation *Delegation) error {
return this.table.Insert(ctx, delegation)
}
func (this delegationTable) Update(ctx context.Context, delegation *Delegation) error {
return this.table.Update(ctx, delegation)
}
func (this delegationTable) Save(ctx context.Context, delegation *Delegation) error {
return this.table.Save(ctx, delegation)
}
func (this delegationTable) Delete(ctx context.Context, delegation *Delegation) error {
return this.table.Delete(ctx, delegation)
}
func (this delegationTable) Has(ctx context.Context, id string) (found bool, err error) {
return this.table.PrimaryKey().Has(ctx, id)
}
func (this delegationTable) Get(ctx context.Context, id string) (*Delegation, error) {
var delegation Delegation
found, err := this.table.PrimaryKey().Get(ctx, &delegation, id)
if err != nil {
return nil, err
}
if !found {
return nil, ormerrors.NotFound
}
return &delegation, nil
}
func (this delegationTable) List(ctx context.Context, prefixKey DelegationIndexKey, opts ...ormlist.Option) (DelegationIterator, error) {
it, err := this.table.GetIndexByID(prefixKey.id()).List(ctx, prefixKey.values(), opts...)
return DelegationIterator{it}, err
}
func (this delegationTable) ListRange(ctx context.Context, from, to DelegationIndexKey, opts ...ormlist.Option) (DelegationIterator, error) {
it, err := this.table.GetIndexByID(from.id()).ListRange(ctx, from.values(), to.values(), opts...)
return DelegationIterator{it}, err
}
func (this delegationTable) DeleteBy(ctx context.Context, prefixKey DelegationIndexKey) error {
return this.table.GetIndexByID(prefixKey.id()).DeleteBy(ctx, prefixKey.values()...)
}
func (this delegationTable) DeleteRange(ctx context.Context, from, to DelegationIndexKey) error {
return this.table.GetIndexByID(from.id()).DeleteRange(ctx, from.values(), to.values())
}
func (this delegationTable) doNotImplement() {}
var _ DelegationTable = delegationTable{}
func NewDelegationTable(db ormtable.Schema) (DelegationTable, error) {
table := db.GetTable(&Delegation{})
if table == nil {
return nil, ormerrors.TableNotFound.Wrap(string((&Delegation{}).ProtoReflect().Descriptor().FullName()))
}
return delegationTable{table}, nil
}
type ServiceTable interface {
Insert(ctx context.Context, service *Service) error
Update(ctx context.Context, service *Service) error
Save(ctx context.Context, service *Service) error
Delete(ctx context.Context, service *Service) error
Has(ctx context.Context, id string) (found bool, err error)
// Get returns nil and an error which responds true to ormerrors.IsNotFound() if the record was not found.
Get(ctx context.Context, id string) (*Service, error)
List(ctx context.Context, prefixKey ServiceIndexKey, opts ...ormlist.Option) (ServiceIterator, error)
ListRange(ctx context.Context, from, to ServiceIndexKey, opts ...ormlist.Option) (ServiceIterator, error)
DeleteBy(ctx context.Context, prefixKey ServiceIndexKey) error
DeleteRange(ctx context.Context, from, to ServiceIndexKey) error
doNotImplement()
}
type ServiceIterator struct {
ormtable.Iterator
}
func (i ServiceIterator) Value() (*Service, error) {
var service Service
err := i.UnmarshalMessage(&service)
return &service, err
}
type ServiceIndexKey interface {
id() uint32
values() []interface{}
serviceIndexKey()
}
// primary key starting index..
type ServicePrimaryKey = ServiceIdIndexKey
type ServiceIdIndexKey struct {
vs []interface{}
}
func (x ServiceIdIndexKey) id() uint32 { return 0 }
func (x ServiceIdIndexKey) values() []interface{} { return x.vs }
func (x ServiceIdIndexKey) serviceIndexKey() {}
func (this ServiceIdIndexKey) WithId(id string) ServiceIdIndexKey {
this.vs = []interface{}{id}
return this
}
type serviceTable struct {
table ormtable.Table
}
func (this serviceTable) Insert(ctx context.Context, service *Service) error {
return this.table.Insert(ctx, service)
}
func (this serviceTable) Update(ctx context.Context, service *Service) error {
return this.table.Update(ctx, service)
}
func (this serviceTable) Save(ctx context.Context, service *Service) error {
return this.table.Save(ctx, service)
}
func (this serviceTable) Delete(ctx context.Context, service *Service) error {
return this.table.Delete(ctx, service)
}
func (this serviceTable) Has(ctx context.Context, id string) (found bool, err error) {
return this.table.PrimaryKey().Has(ctx, id)
}
func (this serviceTable) Get(ctx context.Context, id string) (*Service, error) {
var service Service
found, err := this.table.PrimaryKey().Get(ctx, &service, id)
if err != nil {
return nil, err
}
if !found {
return nil, ormerrors.NotFound
}
return &service, nil
}
func (this serviceTable) List(ctx context.Context, prefixKey ServiceIndexKey, opts ...ormlist.Option) (ServiceIterator, error) {
it, err := this.table.GetIndexByID(prefixKey.id()).List(ctx, prefixKey.values(), opts...)
return ServiceIterator{it}, err
}
func (this serviceTable) ListRange(ctx context.Context, from, to ServiceIndexKey, opts ...ormlist.Option) (ServiceIterator, error) {
it, err := this.table.GetIndexByID(from.id()).ListRange(ctx, from.values(), to.values(), opts...)
return ServiceIterator{it}, err
}
func (this serviceTable) DeleteBy(ctx context.Context, prefixKey ServiceIndexKey) error {
return this.table.GetIndexByID(prefixKey.id()).DeleteBy(ctx, prefixKey.values()...)
}
func (this serviceTable) DeleteRange(ctx context.Context, from, to ServiceIndexKey) error {
return this.table.GetIndexByID(from.id()).DeleteRange(ctx, from.values(), to.values())
}
func (this serviceTable) doNotImplement() {}
var _ ServiceTable = serviceTable{}
func NewServiceTable(db ormtable.Schema) (ServiceTable, error) {
table := db.GetTable(&Service{})
if table == nil {
return nil, ormerrors.TableNotFound.Wrap(string((&Service{}).ProtoReflect().Descriptor().FullName()))
}
return serviceTable{table}, nil
}
type StateStore interface {
AttestationTable() AttestationTable
ControllerTable() ControllerTable
DelegationTable() DelegationTable
ServiceTable() ServiceTable
doNotImplement()
}
type stateStore struct {
attestation AttestationTable
controller ControllerTable
delegation DelegationTable
service ServiceTable
}
func (x stateStore) AttestationTable() AttestationTable {
return x.attestation
}
func (x stateStore) ControllerTable() ControllerTable {
return x.controller
}
func (x stateStore) DelegationTable() DelegationTable {
return x.delegation
}
func (x stateStore) ServiceTable() ServiceTable {
return x.service
}
func (stateStore) doNotImplement() {}
var _ StateStore = stateStore{}
func NewStateStore(db ormtable.Schema) (StateStore, error) {
attestationTable, err := NewAttestationTable(db)
if err != nil {
return nil, err
}
controllerTable, err := NewControllerTable(db)
if err != nil {
return nil, err
}
delegationTable, err := NewDelegationTable(db)
if err != nil {
return nil, err
}
serviceTable, err := NewServiceTable(db)
if err != nil {
return nil, err
}
return stateStore{
attestationTable,
controllerTable,
delegationTable,
serviceTable,
}, nil
}

3283
api/did/v1/state.pulsar.go Normal file

File diff suppressed because it is too large Load Diff

3558
api/did/v1/tx.pulsar.go Normal file

File diff suppressed because it is too large Load Diff

193
api/did/v1/tx_grpc.pb.go Normal file
View File

@ -0,0 +1,193 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.3.0
// - protoc (unknown)
// source: did/v1/tx.proto
package didv1
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
const (
Msg_UpdateParams_FullMethodName = "/did.v1.Msg/UpdateParams"
Msg_InitializeController_FullMethodName = "/did.v1.Msg/InitializeController"
Msg_AuthenticateController_FullMethodName = "/did.v1.Msg/AuthenticateController"
)
// MsgClient is the client API for Msg service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type MsgClient interface {
// UpdateParams defines a governance operation for updating the parameters.
//
// Since: cosmos-sdk 0.47
UpdateParams(ctx context.Context, in *MsgUpdateParams, opts ...grpc.CallOption) (*MsgUpdateParamsResponse, error)
// InitializeController initializes a controller with the given assertions, keyshares, and verifications.
InitializeController(ctx context.Context, in *MsgInitializeController, opts ...grpc.CallOption) (*MsgInitializeControllerResponse, error)
// AuthenticateController asserts the given controller is the owner of the given address.
AuthenticateController(ctx context.Context, in *MsgAuthenticateController, opts ...grpc.CallOption) (*MsgAuthenticateControllerResponse, error)
}
type msgClient struct {
cc grpc.ClientConnInterface
}
func NewMsgClient(cc grpc.ClientConnInterface) MsgClient {
return &msgClient{cc}
}
func (c *msgClient) UpdateParams(ctx context.Context, in *MsgUpdateParams, opts ...grpc.CallOption) (*MsgUpdateParamsResponse, error) {
out := new(MsgUpdateParamsResponse)
err := c.cc.Invoke(ctx, Msg_UpdateParams_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *msgClient) InitializeController(ctx context.Context, in *MsgInitializeController, opts ...grpc.CallOption) (*MsgInitializeControllerResponse, error) {
out := new(MsgInitializeControllerResponse)
err := c.cc.Invoke(ctx, Msg_InitializeController_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *msgClient) AuthenticateController(ctx context.Context, in *MsgAuthenticateController, opts ...grpc.CallOption) (*MsgAuthenticateControllerResponse, error) {
out := new(MsgAuthenticateControllerResponse)
err := c.cc.Invoke(ctx, Msg_AuthenticateController_FullMethodName, in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// MsgServer is the server API for Msg service.
// All implementations must embed UnimplementedMsgServer
// for forward compatibility
type MsgServer interface {
// UpdateParams defines a governance operation for updating the parameters.
//
// Since: cosmos-sdk 0.47
UpdateParams(context.Context, *MsgUpdateParams) (*MsgUpdateParamsResponse, error)
// InitializeController initializes a controller with the given assertions, keyshares, and verifications.
InitializeController(context.Context, *MsgInitializeController) (*MsgInitializeControllerResponse, error)
// AuthenticateController asserts the given controller is the owner of the given address.
AuthenticateController(context.Context, *MsgAuthenticateController) (*MsgAuthenticateControllerResponse, error)
mustEmbedUnimplementedMsgServer()
}
// UnimplementedMsgServer must be embedded to have forward compatible implementations.
type UnimplementedMsgServer struct {
}
func (UnimplementedMsgServer) UpdateParams(context.Context, *MsgUpdateParams) (*MsgUpdateParamsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateParams not implemented")
}
func (UnimplementedMsgServer) InitializeController(context.Context, *MsgInitializeController) (*MsgInitializeControllerResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method InitializeController not implemented")
}
func (UnimplementedMsgServer) AuthenticateController(context.Context, *MsgAuthenticateController) (*MsgAuthenticateControllerResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method AuthenticateController not implemented")
}
func (UnimplementedMsgServer) mustEmbedUnimplementedMsgServer() {}
// UnsafeMsgServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to MsgServer will
// result in compilation errors.
type UnsafeMsgServer interface {
mustEmbedUnimplementedMsgServer()
}
func RegisterMsgServer(s grpc.ServiceRegistrar, srv MsgServer) {
s.RegisterService(&Msg_ServiceDesc, srv)
}
func _Msg_UpdateParams_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MsgUpdateParams)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MsgServer).UpdateParams(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Msg_UpdateParams_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MsgServer).UpdateParams(ctx, req.(*MsgUpdateParams))
}
return interceptor(ctx, in, info, handler)
}
func _Msg_InitializeController_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MsgInitializeController)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MsgServer).InitializeController(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Msg_InitializeController_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MsgServer).InitializeController(ctx, req.(*MsgInitializeController))
}
return interceptor(ctx, in, info, handler)
}
func _Msg_AuthenticateController_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MsgAuthenticateController)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MsgServer).AuthenticateController(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Msg_AuthenticateController_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MsgServer).AuthenticateController(ctx, req.(*MsgAuthenticateController))
}
return interceptor(ctx, in, info, handler)
}
// Msg_ServiceDesc is the grpc.ServiceDesc for Msg service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Msg_ServiceDesc = grpc.ServiceDesc{
ServiceName: "did.v1.Msg",
HandlerType: (*MsgServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "UpdateParams",
Handler: _Msg_UpdateParams_Handler,
},
{
MethodName: "InitializeController",
Handler: _Msg_InitializeController_Handler,
},
{
MethodName: "AuthenticateController",
Handler: _Msg_AuthenticateController_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "did/v1/tx.proto",
}

View File

@ -0,0 +1,504 @@
// Code generated by protoc-gen-go-pulsar. DO NOT EDIT.
package modulev1
import (
_ "cosmossdk.io/api/cosmos/app/v1alpha1"
fmt "fmt"
runtime "github.com/cosmos/cosmos-proto/runtime"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoiface "google.golang.org/protobuf/runtime/protoiface"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
io "io"
reflect "reflect"
sync "sync"
)
var (
md_Module protoreflect.MessageDescriptor
)
func init() {
file_oracle_module_v1_module_proto_init()
md_Module = File_oracle_module_v1_module_proto.Messages().ByName("Module")
}
var _ protoreflect.Message = (*fastReflection_Module)(nil)
type fastReflection_Module Module
func (x *Module) ProtoReflect() protoreflect.Message {
return (*fastReflection_Module)(x)
}
func (x *Module) slowProtoReflect() protoreflect.Message {
mi := &file_oracle_module_v1_module_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
var _fastReflection_Module_messageType fastReflection_Module_messageType
var _ protoreflect.MessageType = fastReflection_Module_messageType{}
type fastReflection_Module_messageType struct{}
func (x fastReflection_Module_messageType) Zero() protoreflect.Message {
return (*fastReflection_Module)(nil)
}
func (x fastReflection_Module_messageType) New() protoreflect.Message {
return new(fastReflection_Module)
}
func (x fastReflection_Module_messageType) Descriptor() protoreflect.MessageDescriptor {
return md_Module
}
// Descriptor returns message descriptor, which contains only the protobuf
// type information for the message.
func (x *fastReflection_Module) Descriptor() protoreflect.MessageDescriptor {
return md_Module
}
// Type returns the message type, which encapsulates both Go and protobuf
// type information. If the Go type information is not needed,
// it is recommended that the message descriptor be used instead.
func (x *fastReflection_Module) Type() protoreflect.MessageType {
return _fastReflection_Module_messageType
}
// New returns a newly allocated and mutable empty message.
func (x *fastReflection_Module) New() protoreflect.Message {
return new(fastReflection_Module)
}
// Interface unwraps the message reflection interface and
// returns the underlying ProtoMessage interface.
func (x *fastReflection_Module) Interface() protoreflect.ProtoMessage {
return (*Module)(x)
}
// Range iterates over every populated field in an undefined order,
// calling f for each field descriptor and value encountered.
// Range returns immediately if f returns false.
// While iterating, mutating operations may only be performed
// on the current field descriptor.
func (x *fastReflection_Module) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) {
}
// Has reports whether a field is populated.
//
// Some fields have the property of nullability where it is possible to
// distinguish between the default value of a field and whether the field
// was explicitly populated with the default value. Singular message fields,
// member fields of a oneof, and proto2 scalar fields are nullable. Such
// fields are populated only if explicitly set.
//
// In other cases (aside from the nullable cases above),
// a proto3 scalar field is populated if it contains a non-zero value, and
// a repeated field is populated if it is non-empty.
func (x *fastReflection_Module) Has(fd protoreflect.FieldDescriptor) bool {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.oracle.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.oracle.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// Clear clears the field such that a subsequent Has call reports false.
//
// Clearing an extension field clears both the extension type and value
// associated with the given field number.
//
// Clear is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_Module) Clear(fd protoreflect.FieldDescriptor) {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.oracle.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.oracle.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// Get retrieves the value for a field.
//
// For unpopulated scalars, it returns the default value, where
// the default value of a bytes scalar is guaranteed to be a copy.
// For unpopulated composite types, it returns an empty, read-only view
// of the value; to obtain a mutable reference, use Mutable.
func (x *fastReflection_Module) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {
switch descriptor.FullName() {
default:
if descriptor.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.oracle.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.oracle.module.v1.Module does not contain field %s", descriptor.FullName()))
}
}
// Set stores the value for a field.
//
// For a field belonging to a oneof, it implicitly clears any other field
// that may be currently set within the same oneof.
// For extension fields, it implicitly stores the provided ExtensionType.
// When setting a composite type, it is unspecified whether the stored value
// aliases the source's memory in any way. If the composite value is an
// empty, read-only value, then it panics.
//
// Set is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_Module) Set(fd protoreflect.FieldDescriptor, value protoreflect.Value) {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.oracle.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.oracle.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// Mutable returns a mutable reference to a composite type.
//
// If the field is unpopulated, it may allocate a composite value.
// For a field belonging to a oneof, it implicitly clears any other field
// that may be currently set within the same oneof.
// For extension fields, it implicitly stores the provided ExtensionType
// if not already stored.
// It panics if the field does not contain a composite type.
//
// Mutable is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_Module) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.oracle.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.oracle.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// NewField returns a new value that is assignable to the field
// for the given descriptor. For scalars, this returns the default value.
// For lists, maps, and messages, this returns a new, empty, mutable value.
func (x *fastReflection_Module) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: didao.sonr.oracle.module.v1.Module"))
}
panic(fmt.Errorf("message didao.sonr.oracle.module.v1.Module does not contain field %s", fd.FullName()))
}
}
// WhichOneof reports which field within the oneof is populated,
// returning nil if none are populated.
// It panics if the oneof descriptor does not belong to this message.
func (x *fastReflection_Module) WhichOneof(d protoreflect.OneofDescriptor) protoreflect.FieldDescriptor {
switch d.FullName() {
default:
panic(fmt.Errorf("%s is not a oneof field in didao.sonr.oracle.module.v1.Module", d.FullName()))
}
panic("unreachable")
}
// GetUnknown retrieves the entire list of unknown fields.
// The caller may only mutate the contents of the RawFields
// if the mutated bytes are stored back into the message with SetUnknown.
func (x *fastReflection_Module) GetUnknown() protoreflect.RawFields {
return x.unknownFields
}
// SetUnknown stores an entire list of unknown fields.
// The raw fields must be syntactically valid according to the wire format.
// An implementation may panic if this is not the case.
// Once stored, the caller must not mutate the content of the RawFields.
// An empty RawFields may be passed to clear the fields.
//
// SetUnknown is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_Module) SetUnknown(fields protoreflect.RawFields) {
x.unknownFields = fields
}
// IsValid reports whether the message is valid.
//
// An invalid message is an empty, read-only value.
//
// An invalid message often corresponds to a nil pointer of the concrete
// message type, but the details are implementation dependent.
// Validity is not part of the protobuf data model, and may not
// be preserved in marshaling or other operations.
func (x *fastReflection_Module) IsValid() bool {
return x != nil
}
// ProtoMethods returns optional fastReflectionFeature-path implementations of various operations.
// This method may return nil.
//
// The returned methods type is identical to
// "google.golang.org/protobuf/runtime/protoiface".Methods.
// Consult the protoiface package documentation for details.
func (x *fastReflection_Module) ProtoMethods() *protoiface.Methods {
size := func(input protoiface.SizeInput) protoiface.SizeOutput {
x := input.Message.Interface().(*Module)
if x == nil {
return protoiface.SizeOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Size: 0,
}
}
options := runtime.SizeInputToOptions(input)
_ = options
var n int
var l int
_ = l
if x.unknownFields != nil {
n += len(x.unknownFields)
}
return protoiface.SizeOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Size: n,
}
}
marshal := func(input protoiface.MarshalInput) (protoiface.MarshalOutput, error) {
x := input.Message.Interface().(*Module)
if x == nil {
return protoiface.MarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Buf: input.Buf,
}, nil
}
options := runtime.MarshalInputToOptions(input)
_ = options
size := options.Size(x)
dAtA := make([]byte, size)
i := len(dAtA)
_ = i
var l int
_ = l
if x.unknownFields != nil {
i -= len(x.unknownFields)
copy(dAtA[i:], x.unknownFields)
}
if input.Buf != nil {
input.Buf = append(input.Buf, dAtA...)
} else {
input.Buf = dAtA
}
return protoiface.MarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Buf: input.Buf,
}, nil
}
unmarshal := func(input protoiface.UnmarshalInput) (protoiface.UnmarshalOutput, error) {
x := input.Message.Interface().(*Module)
if x == nil {
return protoiface.UnmarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Flags: input.Flags,
}, nil
}
options := runtime.UnmarshalInputToOptions(input)
_ = options
dAtA := input.Buf
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, runtime.ErrIntOverflow
}
if iNdEx >= l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, fmt.Errorf("proto: Module: wiretype end group for non-group")
}
if fieldNum <= 0 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, fmt.Errorf("proto: Module: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
default:
iNdEx = preIndex
skippy, err := runtime.Skip(dAtA[iNdEx:])
if err != nil {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, runtime.ErrInvalidLength
}
if (iNdEx + skippy) > l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
if !options.DiscardUnknown {
x.unknownFields = append(x.unknownFields, dAtA[iNdEx:iNdEx+skippy]...)
}
iNdEx += skippy
}
}
if iNdEx > l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, nil
}
return &protoiface.Methods{
NoUnkeyedLiterals: struct{}{},
Flags: protoiface.SupportMarshalDeterministic | protoiface.SupportUnmarshalDiscardUnknown,
Size: size,
Marshal: marshal,
Unmarshal: unmarshal,
Merge: nil,
CheckInitialized: nil,
}
}
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.0
// protoc (unknown)
// source: oracle/module/v1/module.proto
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Module is the app config object of the module.
// Learn more: https://docs.cosmos.network/main/building-modules/depinject
type Module struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *Module) Reset() {
*x = Module{}
if protoimpl.UnsafeEnabled {
mi := &file_oracle_module_v1_module_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Module) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Module) ProtoMessage() {}
// Deprecated: Use Module.ProtoReflect.Descriptor instead.
func (*Module) Descriptor() ([]byte, []int) {
return file_oracle_module_v1_module_proto_rawDescGZIP(), []int{0}
}
var File_oracle_module_v1_module_proto protoreflect.FileDescriptor
var file_oracle_module_v1_module_proto_rawDesc = []byte{
0x0a, 0x1d, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2f,
0x76, 0x31, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12,
0x1b, 0x64, 0x69, 0x64, 0x61, 0x6f, 0x2e, 0x73, 0x6f, 0x6e, 0x72, 0x2e, 0x6f, 0x72, 0x61, 0x63,
0x6c, 0x65, 0x2e, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x1a, 0x20, 0x63, 0x6f,
0x73, 0x6d, 0x6f, 0x73, 0x2f, 0x61, 0x70, 0x70, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61,
0x31, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x28,
0x0a, 0x06, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x3a, 0x1e, 0xba, 0xc0, 0x96, 0xda, 0x01, 0x18,
0x0a, 0x16, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x69, 0x2d,
0x64, 0x61, 0x6f, 0x2f, 0x73, 0x6f, 0x6e, 0x72, 0x42, 0xf5, 0x01, 0x0a, 0x1f, 0x63, 0x6f, 0x6d,
0x2e, 0x64, 0x69, 0x64, 0x61, 0x6f, 0x2e, 0x73, 0x6f, 0x6e, 0x72, 0x2e, 0x6f, 0x72, 0x61, 0x63,
0x6c, 0x65, 0x2e, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x42, 0x0b, 0x4d, 0x6f,
0x64, 0x75, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x34, 0x67, 0x69, 0x74,
0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x69, 0x2d, 0x64, 0x61, 0x6f, 0x2f, 0x73,
0x6f, 0x6e, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2f, 0x6d,
0x6f, 0x64, 0x75, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0x3b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x76,
0x31, 0xa2, 0x02, 0x04, 0x44, 0x53, 0x4f, 0x4d, 0xaa, 0x02, 0x1b, 0x44, 0x69, 0x64, 0x61, 0x6f,
0x2e, 0x53, 0x6f, 0x6e, 0x72, 0x2e, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2e, 0x4d, 0x6f, 0x64,
0x75, 0x6c, 0x65, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x1b, 0x44, 0x69, 0x64, 0x61, 0x6f, 0x5c, 0x53,
0x6f, 0x6e, 0x72, 0x5c, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x5c, 0x4d, 0x6f, 0x64, 0x75, 0x6c,
0x65, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x27, 0x44, 0x69, 0x64, 0x61, 0x6f, 0x5c, 0x53, 0x6f, 0x6e,
0x72, 0x5c, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x5c, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5c,
0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02,
0x1f, 0x44, 0x69, 0x64, 0x61, 0x6f, 0x3a, 0x3a, 0x53, 0x6f, 0x6e, 0x72, 0x3a, 0x3a, 0x4f, 0x72,
0x61, 0x63, 0x6c, 0x65, 0x3a, 0x3a, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x3a, 0x3a, 0x56, 0x31,
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_oracle_module_v1_module_proto_rawDescOnce sync.Once
file_oracle_module_v1_module_proto_rawDescData = file_oracle_module_v1_module_proto_rawDesc
)
func file_oracle_module_v1_module_proto_rawDescGZIP() []byte {
file_oracle_module_v1_module_proto_rawDescOnce.Do(func() {
file_oracle_module_v1_module_proto_rawDescData = protoimpl.X.CompressGZIP(file_oracle_module_v1_module_proto_rawDescData)
})
return file_oracle_module_v1_module_proto_rawDescData
}
var file_oracle_module_v1_module_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_oracle_module_v1_module_proto_goTypes = []interface{}{
(*Module)(nil), // 0: didao.sonr.oracle.module.v1.Module
}
var file_oracle_module_v1_module_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_oracle_module_v1_module_proto_init() }
func file_oracle_module_v1_module_proto_init() {
if File_oracle_module_v1_module_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_oracle_module_v1_module_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Module); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_oracle_module_v1_module_proto_rawDesc,
NumEnums: 0,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_oracle_module_v1_module_proto_goTypes,
DependencyIndexes: file_oracle_module_v1_module_proto_depIdxs,
MessageInfos: file_oracle_module_v1_module_proto_msgTypes,
}.Build()
File_oracle_module_v1_module_proto = out.File
file_oracle_module_v1_module_proto_rawDesc = nil
file_oracle_module_v1_module_proto_goTypes = nil
file_oracle_module_v1_module_proto_depIdxs = nil
}

View File

@ -0,0 +1,493 @@
// Code generated by protoc-gen-go-pulsar. DO NOT EDIT.
package oraclev1
import (
fmt "fmt"
runtime "github.com/cosmos/cosmos-proto/runtime"
_ "github.com/cosmos/gogoproto/gogoproto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoiface "google.golang.org/protobuf/runtime/protoiface"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
io "io"
reflect "reflect"
sync "sync"
)
var (
md_GenesisState protoreflect.MessageDescriptor
)
func init() {
file_oracle_v1_genesis_proto_init()
md_GenesisState = File_oracle_v1_genesis_proto.Messages().ByName("GenesisState")
}
var _ protoreflect.Message = (*fastReflection_GenesisState)(nil)
type fastReflection_GenesisState GenesisState
func (x *GenesisState) ProtoReflect() protoreflect.Message {
return (*fastReflection_GenesisState)(x)
}
func (x *GenesisState) slowProtoReflect() protoreflect.Message {
mi := &file_oracle_v1_genesis_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
var _fastReflection_GenesisState_messageType fastReflection_GenesisState_messageType
var _ protoreflect.MessageType = fastReflection_GenesisState_messageType{}
type fastReflection_GenesisState_messageType struct{}
func (x fastReflection_GenesisState_messageType) Zero() protoreflect.Message {
return (*fastReflection_GenesisState)(nil)
}
func (x fastReflection_GenesisState_messageType) New() protoreflect.Message {
return new(fastReflection_GenesisState)
}
func (x fastReflection_GenesisState_messageType) Descriptor() protoreflect.MessageDescriptor {
return md_GenesisState
}
// Descriptor returns message descriptor, which contains only the protobuf
// type information for the message.
func (x *fastReflection_GenesisState) Descriptor() protoreflect.MessageDescriptor {
return md_GenesisState
}
// Type returns the message type, which encapsulates both Go and protobuf
// type information. If the Go type information is not needed,
// it is recommended that the message descriptor be used instead.
func (x *fastReflection_GenesisState) Type() protoreflect.MessageType {
return _fastReflection_GenesisState_messageType
}
// New returns a newly allocated and mutable empty message.
func (x *fastReflection_GenesisState) New() protoreflect.Message {
return new(fastReflection_GenesisState)
}
// Interface unwraps the message reflection interface and
// returns the underlying ProtoMessage interface.
func (x *fastReflection_GenesisState) Interface() protoreflect.ProtoMessage {
return (*GenesisState)(x)
}
// Range iterates over every populated field in an undefined order,
// calling f for each field descriptor and value encountered.
// Range returns immediately if f returns false.
// While iterating, mutating operations may only be performed
// on the current field descriptor.
func (x *fastReflection_GenesisState) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) {
}
// Has reports whether a field is populated.
//
// Some fields have the property of nullability where it is possible to
// distinguish between the default value of a field and whether the field
// was explicitly populated with the default value. Singular message fields,
// member fields of a oneof, and proto2 scalar fields are nullable. Such
// fields are populated only if explicitly set.
//
// In other cases (aside from the nullable cases above),
// a proto3 scalar field is populated if it contains a non-zero value, and
// a repeated field is populated if it is non-empty.
func (x *fastReflection_GenesisState) Has(fd protoreflect.FieldDescriptor) bool {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: oracle.v1.GenesisState"))
}
panic(fmt.Errorf("message oracle.v1.GenesisState does not contain field %s", fd.FullName()))
}
}
// Clear clears the field such that a subsequent Has call reports false.
//
// Clearing an extension field clears both the extension type and value
// associated with the given field number.
//
// Clear is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_GenesisState) Clear(fd protoreflect.FieldDescriptor) {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: oracle.v1.GenesisState"))
}
panic(fmt.Errorf("message oracle.v1.GenesisState does not contain field %s", fd.FullName()))
}
}
// Get retrieves the value for a field.
//
// For unpopulated scalars, it returns the default value, where
// the default value of a bytes scalar is guaranteed to be a copy.
// For unpopulated composite types, it returns an empty, read-only view
// of the value; to obtain a mutable reference, use Mutable.
func (x *fastReflection_GenesisState) Get(descriptor protoreflect.FieldDescriptor) protoreflect.Value {
switch descriptor.FullName() {
default:
if descriptor.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: oracle.v1.GenesisState"))
}
panic(fmt.Errorf("message oracle.v1.GenesisState does not contain field %s", descriptor.FullName()))
}
}
// Set stores the value for a field.
//
// For a field belonging to a oneof, it implicitly clears any other field
// that may be currently set within the same oneof.
// For extension fields, it implicitly stores the provided ExtensionType.
// When setting a composite type, it is unspecified whether the stored value
// aliases the source's memory in any way. If the composite value is an
// empty, read-only value, then it panics.
//
// Set is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_GenesisState) Set(fd protoreflect.FieldDescriptor, value protoreflect.Value) {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: oracle.v1.GenesisState"))
}
panic(fmt.Errorf("message oracle.v1.GenesisState does not contain field %s", fd.FullName()))
}
}
// Mutable returns a mutable reference to a composite type.
//
// If the field is unpopulated, it may allocate a composite value.
// For a field belonging to a oneof, it implicitly clears any other field
// that may be currently set within the same oneof.
// For extension fields, it implicitly stores the provided ExtensionType
// if not already stored.
// It panics if the field does not contain a composite type.
//
// Mutable is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_GenesisState) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: oracle.v1.GenesisState"))
}
panic(fmt.Errorf("message oracle.v1.GenesisState does not contain field %s", fd.FullName()))
}
}
// NewField returns a new value that is assignable to the field
// for the given descriptor. For scalars, this returns the default value.
// For lists, maps, and messages, this returns a new, empty, mutable value.
func (x *fastReflection_GenesisState) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value {
switch fd.FullName() {
default:
if fd.IsExtension() {
panic(fmt.Errorf("proto3 declared messages do not support extensions: oracle.v1.GenesisState"))
}
panic(fmt.Errorf("message oracle.v1.GenesisState does not contain field %s", fd.FullName()))
}
}
// WhichOneof reports which field within the oneof is populated,
// returning nil if none are populated.
// It panics if the oneof descriptor does not belong to this message.
func (x *fastReflection_GenesisState) WhichOneof(d protoreflect.OneofDescriptor) protoreflect.FieldDescriptor {
switch d.FullName() {
default:
panic(fmt.Errorf("%s is not a oneof field in oracle.v1.GenesisState", d.FullName()))
}
panic("unreachable")
}
// GetUnknown retrieves the entire list of unknown fields.
// The caller may only mutate the contents of the RawFields
// if the mutated bytes are stored back into the message with SetUnknown.
func (x *fastReflection_GenesisState) GetUnknown() protoreflect.RawFields {
return x.unknownFields
}
// SetUnknown stores an entire list of unknown fields.
// The raw fields must be syntactically valid according to the wire format.
// An implementation may panic if this is not the case.
// Once stored, the caller must not mutate the content of the RawFields.
// An empty RawFields may be passed to clear the fields.
//
// SetUnknown is a mutating operation and unsafe for concurrent use.
func (x *fastReflection_GenesisState) SetUnknown(fields protoreflect.RawFields) {
x.unknownFields = fields
}
// IsValid reports whether the message is valid.
//
// An invalid message is an empty, read-only value.
//
// An invalid message often corresponds to a nil pointer of the concrete
// message type, but the details are implementation dependent.
// Validity is not part of the protobuf data model, and may not
// be preserved in marshaling or other operations.
func (x *fastReflection_GenesisState) IsValid() bool {
return x != nil
}
// ProtoMethods returns optional fastReflectionFeature-path implementations of various operations.
// This method may return nil.
//
// The returned methods type is identical to
// "google.golang.org/protobuf/runtime/protoiface".Methods.
// Consult the protoiface package documentation for details.
func (x *fastReflection_GenesisState) ProtoMethods() *protoiface.Methods {
size := func(input protoiface.SizeInput) protoiface.SizeOutput {
x := input.Message.Interface().(*GenesisState)
if x == nil {
return protoiface.SizeOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Size: 0,
}
}
options := runtime.SizeInputToOptions(input)
_ = options
var n int
var l int
_ = l
if x.unknownFields != nil {
n += len(x.unknownFields)
}
return protoiface.SizeOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Size: n,
}
}
marshal := func(input protoiface.MarshalInput) (protoiface.MarshalOutput, error) {
x := input.Message.Interface().(*GenesisState)
if x == nil {
return protoiface.MarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Buf: input.Buf,
}, nil
}
options := runtime.MarshalInputToOptions(input)
_ = options
size := options.Size(x)
dAtA := make([]byte, size)
i := len(dAtA)
_ = i
var l int
_ = l
if x.unknownFields != nil {
i -= len(x.unknownFields)
copy(dAtA[i:], x.unknownFields)
}
if input.Buf != nil {
input.Buf = append(input.Buf, dAtA...)
} else {
input.Buf = dAtA
}
return protoiface.MarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Buf: input.Buf,
}, nil
}
unmarshal := func(input protoiface.UnmarshalInput) (protoiface.UnmarshalOutput, error) {
x := input.Message.Interface().(*GenesisState)
if x == nil {
return protoiface.UnmarshalOutput{
NoUnkeyedLiterals: input.NoUnkeyedLiterals,
Flags: input.Flags,
}, nil
}
options := runtime.UnmarshalInputToOptions(input)
_ = options
dAtA := input.Buf
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, runtime.ErrIntOverflow
}
if iNdEx >= l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, fmt.Errorf("proto: GenesisState: wiretype end group for non-group")
}
if fieldNum <= 0 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, fmt.Errorf("proto: GenesisState: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
default:
iNdEx = preIndex
skippy, err := runtime.Skip(dAtA[iNdEx:])
if err != nil {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, runtime.ErrInvalidLength
}
if (iNdEx + skippy) > l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
if !options.DiscardUnknown {
x.unknownFields = append(x.unknownFields, dAtA[iNdEx:iNdEx+skippy]...)
}
iNdEx += skippy
}
}
if iNdEx > l {
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, io.ErrUnexpectedEOF
}
return protoiface.UnmarshalOutput{NoUnkeyedLiterals: input.NoUnkeyedLiterals, Flags: input.Flags}, nil
}
return &protoiface.Methods{
NoUnkeyedLiterals: struct{}{},
Flags: protoiface.SupportMarshalDeterministic | protoiface.SupportUnmarshalDiscardUnknown,
Size: size,
Marshal: marshal,
Unmarshal: unmarshal,
Merge: nil,
CheckInitialized: nil,
}
}
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.0
// protoc (unknown)
// source: oracle/v1/genesis.proto
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// GenesisState defines the middlewares genesis state.
type GenesisState struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *GenesisState) Reset() {
*x = GenesisState{}
if protoimpl.UnsafeEnabled {
mi := &file_oracle_v1_genesis_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GenesisState) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GenesisState) ProtoMessage() {}
// Deprecated: Use GenesisState.ProtoReflect.Descriptor instead.
func (*GenesisState) Descriptor() ([]byte, []int) {
return file_oracle_v1_genesis_proto_rawDescGZIP(), []int{0}
}
var File_oracle_v1_genesis_proto protoreflect.FileDescriptor
var file_oracle_v1_genesis_proto_rawDesc = []byte{
0x0a, 0x17, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x67, 0x65, 0x6e, 0x65,
0x73, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x6f, 0x72, 0x61, 0x63, 0x6c,
0x65, 0x2e, 0x76, 0x31, 0x1a, 0x14, 0x67, 0x6f, 0x67, 0x6f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f,
0x67, 0x6f, 0x67, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x0e, 0x0a, 0x0c, 0x47, 0x65,
0x6e, 0x65, 0x73, 0x69, 0x73, 0x53, 0x74, 0x61, 0x74, 0x65, 0x42, 0x91, 0x01, 0x0a, 0x0d, 0x63,
0x6f, 0x6d, 0x2e, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x42, 0x0c, 0x47, 0x65,
0x6e, 0x65, 0x73, 0x69, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69,
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x69, 0x2d, 0x64, 0x61, 0x6f, 0x2f,
0x73, 0x6f, 0x6e, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2f,
0x76, 0x31, 0x3b, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x4f, 0x58,
0x58, 0xaa, 0x02, 0x09, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x09,
0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x15, 0x4f, 0x72, 0x61, 0x63,
0x6c, 0x65, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0xea, 0x02, 0x0a, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_oracle_v1_genesis_proto_rawDescOnce sync.Once
file_oracle_v1_genesis_proto_rawDescData = file_oracle_v1_genesis_proto_rawDesc
)
func file_oracle_v1_genesis_proto_rawDescGZIP() []byte {
file_oracle_v1_genesis_proto_rawDescOnce.Do(func() {
file_oracle_v1_genesis_proto_rawDescData = protoimpl.X.CompressGZIP(file_oracle_v1_genesis_proto_rawDescData)
})
return file_oracle_v1_genesis_proto_rawDescData
}
var file_oracle_v1_genesis_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_oracle_v1_genesis_proto_goTypes = []interface{}{
(*GenesisState)(nil), // 0: oracle.v1.GenesisState
}
var file_oracle_v1_genesis_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_oracle_v1_genesis_proto_init() }
func file_oracle_v1_genesis_proto_init() {
if File_oracle_v1_genesis_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_oracle_v1_genesis_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GenesisState); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_oracle_v1_genesis_proto_rawDesc,
NumEnums: 0,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_oracle_v1_genesis_proto_goTypes,
DependencyIndexes: file_oracle_v1_genesis_proto_depIdxs,
MessageInfos: file_oracle_v1_genesis_proto_msgTypes,
}.Build()
File_oracle_v1_genesis_proto = out.File
file_oracle_v1_genesis_proto_rawDesc = nil
file_oracle_v1_genesis_proto_goTypes = nil
file_oracle_v1_genesis_proto_depIdxs = nil
}

View File

@ -0,0 +1,74 @@
// Code generated by protoc-gen-go-pulsar. DO NOT EDIT.
package oraclev1
import (
_ "github.com/cosmos/gogoproto/gogoproto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
)
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.0
// protoc (unknown)
// source: oracle/v1/query.proto
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
var File_oracle_v1_query_proto protoreflect.FileDescriptor
var file_oracle_v1_query_proto_rawDesc = []byte{
0x0a, 0x15, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x71, 0x75, 0x65, 0x72,
0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2e,
0x76, 0x31, 0x1a, 0x14, 0x67, 0x6f, 0x67, 0x6f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f,
0x67, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x42, 0x8f, 0x01, 0x0a, 0x0d, 0x63, 0x6f, 0x6d,
0x2e, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x42, 0x0a, 0x51, 0x75, 0x65, 0x72,
0x79, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x69, 0x2d, 0x64, 0x61, 0x6f, 0x2f, 0x73, 0x6f, 0x6e, 0x72,
0x2f, 0x61, 0x70, 0x69, 0x2f, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0x3b, 0x6f,
0x72, 0x61, 0x63, 0x6c, 0x65, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x4f, 0x58, 0x58, 0xaa, 0x02, 0x09,
0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x09, 0x4f, 0x72, 0x61, 0x63,
0x6c, 0x65, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x15, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x5c, 0x56,
0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0a,
0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x33,
}
var file_oracle_v1_query_proto_goTypes = []interface{}{}
var file_oracle_v1_query_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_oracle_v1_query_proto_init() }
func file_oracle_v1_query_proto_init() {
if File_oracle_v1_query_proto != nil {
return
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_oracle_v1_query_proto_rawDesc,
NumEnums: 0,
NumMessages: 0,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_oracle_v1_query_proto_goTypes,
DependencyIndexes: file_oracle_v1_query_proto_depIdxs,
}.Build()
File_oracle_v1_query_proto = out.File
file_oracle_v1_query_proto_rawDesc = nil
file_oracle_v1_query_proto_goTypes = nil
file_oracle_v1_query_proto_depIdxs = nil
}

View File

@ -0,0 +1,73 @@
// Code generated by protoc-gen-go-pulsar. DO NOT EDIT.
package oraclev1
import (
_ "github.com/cosmos/gogoproto/gogoproto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
)
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.0
// protoc (unknown)
// source: oracle/v1/tx.proto
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
var File_oracle_v1_tx_proto protoreflect.FileDescriptor
var file_oracle_v1_tx_proto_rawDesc = []byte{
0x0a, 0x12, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0x2f, 0x74, 0x78, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x1a,
0x14, 0x67, 0x6f, 0x67, 0x6f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x67, 0x6f, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x42, 0x8c, 0x01, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x2e, 0x6f, 0x72,
0x61, 0x63, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x42, 0x07, 0x54, 0x78, 0x50, 0x72, 0x6f, 0x74, 0x6f,
0x50, 0x01, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64,
0x69, 0x2d, 0x64, 0x61, 0x6f, 0x2f, 0x73, 0x6f, 0x6e, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x6f,
0x72, 0x61, 0x63, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0x3b, 0x6f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x76,
0x31, 0xa2, 0x02, 0x03, 0x4f, 0x58, 0x58, 0xaa, 0x02, 0x09, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65,
0x2e, 0x56, 0x31, 0xca, 0x02, 0x09, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x5c, 0x56, 0x31, 0xe2,
0x02, 0x15, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0a, 0x4f, 0x72, 0x61, 0x63, 0x6c, 0x65,
0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var file_oracle_v1_tx_proto_goTypes = []interface{}{}
var file_oracle_v1_tx_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_oracle_v1_tx_proto_init() }
func file_oracle_v1_tx_proto_init() {
if File_oracle_v1_tx_proto != nil {
return
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_oracle_v1_tx_proto_rawDesc,
NumEnums: 0,
NumMessages: 0,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_oracle_v1_tx_proto_goTypes,
DependencyIndexes: file_oracle_v1_tx_proto_depIdxs,
}.Build()
File_oracle_v1_tx_proto = out.File
file_oracle_v1_tx_proto_rawDesc = nil
file_oracle_v1_tx_proto_goTypes = nil
file_oracle_v1_tx_proto_depIdxs = nil
}

74
app/ante.go Normal file
View File

@ -0,0 +1,74 @@
package app
import (
"errors"
ibcante "github.com/cosmos/ibc-go/v8/modules/core/ante"
"github.com/cosmos/ibc-go/v8/modules/core/keeper"
circuitante "cosmossdk.io/x/circuit/ante"
circuitkeeper "cosmossdk.io/x/circuit/keeper"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/x/auth/ante"
stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper"
sdkmath "cosmossdk.io/math"
poaante "github.com/strangelove-ventures/poa/ante"
globalfeeante "github.com/strangelove-ventures/globalfee/x/globalfee/ante"
globalfeekeeper "github.com/strangelove-ventures/globalfee/x/globalfee/keeper"
)
// HandlerOptions extend the SDK's AnteHandler options by requiring the IBC
// channel keeper.
type HandlerOptions struct {
ante.HandlerOptions
IBCKeeper *keeper.Keeper
CircuitKeeper *circuitkeeper.Keeper
StakingKeeper *stakingkeeper.Keeper
GlobalFeeKeeper globalfeekeeper.Keeper
BypassMinFeeMsgTypes []string
}
// NewAnteHandler constructor
func NewAnteHandler(options HandlerOptions) (sdk.AnteHandler, error) {
if options.AccountKeeper == nil {
return nil, errors.New("account keeper is required for ante builder")
}
if options.BankKeeper == nil {
return nil, errors.New("bank keeper is required for ante builder")
}
if options.SignModeHandler == nil {
return nil, errors.New("sign mode handler is required for ante builder")
}
if options.CircuitKeeper == nil {
return nil, errors.New("circuit keeper is required for ante builder")
}
poaDoGenTxRateValidation := false
poaRateFloor := sdkmath.LegacyMustNewDecFromStr("0.10")
poaRateCeil := sdkmath.LegacyMustNewDecFromStr("0.50")
anteDecorators := []sdk.AnteDecorator{
ante.NewSetUpContextDecorator(), // outermost AnteDecorator. SetUpContext must be called first
circuitante.NewCircuitBreakerDecorator(options.CircuitKeeper),
ante.NewExtensionOptionsDecorator(options.ExtensionOptionChecker),
ante.NewValidateBasicDecorator(),
ante.NewTxTimeoutHeightDecorator(),
ante.NewValidateMemoDecorator(options.AccountKeeper),
ante.NewConsumeGasForTxSizeDecorator(options.AccountKeeper),
globalfeeante.NewFeeDecorator(options.BypassMinFeeMsgTypes, options.GlobalFeeKeeper, options.StakingKeeper, 2_000_000),
// ante.NewDeductFeeDecorator(options.AccountKeeper, options.BankKeeper, options.FeegrantKeeper, options.TxFeeChecker),
ante.NewSetPubKeyDecorator(options.AccountKeeper), // SetPubKeyDecorator must be called before all signature verification decorators
ante.NewValidateSigCountDecorator(options.AccountKeeper),
ante.NewSigGasConsumeDecorator(options.AccountKeeper, options.SigGasConsumer),
ante.NewSigVerificationDecorator(options.AccountKeeper, options.SignModeHandler),
ante.NewIncrementSequenceDecorator(options.AccountKeeper),
ibcante.NewRedundantRelayDecorator(options.IBCKeeper),
poaante.NewPOADisableStakingDecorator(),
poaante.NewCommissionLimitDecorator(poaDoGenTxRateValidation, poaRateFloor, poaRateCeil),
}
return sdk.ChainAnteDecorators(anteDecorators...), nil
}

1456
app/app.go Normal file

File diff suppressed because it is too large Load Diff

62
app/app_test.go Normal file
View File

@ -0,0 +1,62 @@
package app
import (
"testing"
abci "github.com/cometbft/cometbft/abci/types"
dbm "github.com/cosmos/cosmos-db"
"github.com/stretchr/testify/require"
"cosmossdk.io/log"
simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims"
sdk "github.com/cosmos/cosmos-sdk/types"
)
func TestAppExport(t *testing.T) {
db := dbm.NewMemDB()
logger := log.NewTestLogger(t)
gapp := NewChainAppWithCustomOptions(t, false, SetupOptions{
Logger: logger.With("instance", "first"),
DB: db,
AppOpts: simtestutil.NewAppOptionsWithFlagHome(t.TempDir()),
})
// finalize block so we have CheckTx state set
_, err := gapp.FinalizeBlock(&abci.RequestFinalizeBlock{
Height: 1,
})
require.NoError(t, err)
_, err = gapp.Commit()
require.NoError(t, err)
// Making a new app object with the db, so that initchain hasn't been called
newGapp := NewChainApp(
logger, db, nil, true, simtestutil.NewAppOptionsWithFlagHome(t.TempDir()),
)
_, err = newGapp.ExportAppStateAndValidators(false, []string{}, nil)
require.NoError(t, err, "ExportAppStateAndValidators should not have an error")
}
// ensure that blocked addresses are properly set in bank keeper
func TestBlockedAddrs(t *testing.T) {
gapp := Setup(t)
for acc := range BlockedAddresses() {
t.Run(acc, func(t *testing.T) {
var addr sdk.AccAddress
if modAddr, err := sdk.AccAddressFromBech32(acc); err == nil {
addr = modAddr
} else {
addr = gapp.AccountKeeper.GetModuleAddress(acc)
}
require.True(t, gapp.BankKeeper.BlockedAddr(addr), "ensure that blocked addresses are properly set in bank keeper")
})
}
}
func TestGetMaccPerms(t *testing.T) {
dup := GetMaccPerms()
require.Equal(t, maccPerms, dup, "duplicated module account permissions differed from actual module account permissions")
}

View File

@ -0,0 +1,59 @@
package decorators
import (
"fmt"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/x/authz"
"github.com/cosmos/gogoproto/proto"
)
// MsgFilterDecorator is an ante.go decorator template for filtering messages.
type MsgFilterDecorator struct {
blockedTypes []sdk.Msg
}
// FilterDecorator returns a new MsgFilterDecorator. This errors if the transaction
// contains any of the blocked message types.
//
// Example:
// - decorators.FilterDecorator(&banktypes.MsgSend{})
// This would block any MsgSend messages from being included in a transaction if set in ante.go
func FilterDecorator(blockedMsgTypes ...sdk.Msg) MsgFilterDecorator {
return MsgFilterDecorator{
blockedTypes: blockedMsgTypes,
}
}
func (mfd MsgFilterDecorator) AnteHandle(ctx sdk.Context, tx sdk.Tx, simulate bool, next sdk.AnteHandler) (newCtx sdk.Context, err error) {
if mfd.HasDisallowedMessage(ctx, tx.GetMsgs()) {
currHeight := ctx.BlockHeight()
return ctx, fmt.Errorf("tx contains unsupported message types at height %d", currHeight)
}
return next(ctx, tx, simulate)
}
func (mfd MsgFilterDecorator) HasDisallowedMessage(ctx sdk.Context, msgs []sdk.Msg) bool {
for _, msg := range msgs {
// check nested messages in a recursive manner
if execMsg, ok := msg.(*authz.MsgExec); ok {
msgs, err := execMsg.GetMessages()
if err != nil {
return true
}
if mfd.HasDisallowedMessage(ctx, msgs) {
return true
}
}
for _, blockedType := range mfd.blockedTypes {
if proto.MessageName(msg) == proto.MessageName(blockedType) {
return true
}
}
}
return false
}

View File

@ -0,0 +1,55 @@
package decorators_test
import (
"testing"
sdkmath "cosmossdk.io/math"
"github.com/cometbft/cometbft/crypto/secp256k1"
sdk "github.com/cosmos/cosmos-sdk/types"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
"github.com/stretchr/testify/suite"
app "github.com/onsonr/hway/app"
"github.com/onsonr/hway/app/decorators"
)
type AnteTestSuite struct {
suite.Suite
ctx sdk.Context
app *app.SonrApp
}
func (s *AnteTestSuite) SetupTest() {
isCheckTx := false
s.app = app.Setup(s.T())
s.ctx = s.app.BaseApp.NewContext(isCheckTx)
}
func TestAnteTestSuite(t *testing.T) {
suite.Run(t, new(AnteTestSuite))
}
// Test the change rate decorator with standard edit msgs,
func (s *AnteTestSuite) TestAnteMsgFilterLogic() {
acc := sdk.AccAddress(secp256k1.GenPrivKey().PubKey().Address())
// test blocking any BankSend Messages
ante := decorators.FilterDecorator(&banktypes.MsgSend{})
msg := banktypes.NewMsgSend(
acc,
acc,
sdk.NewCoins(sdk.NewCoin("stake", sdkmath.NewInt(1))),
)
_, err := ante.AnteHandle(s.ctx, decorators.NewMockTx(msg), false, decorators.EmptyAnte)
s.Require().Error(err)
// validate other messages go through still (such as MsgMultiSend)
msgMultiSend := banktypes.NewMsgMultiSend(
banktypes.NewInput(acc, sdk.NewCoins(sdk.NewCoin("stake", sdkmath.NewInt(1)))),
[]banktypes.Output{banktypes.NewOutput(acc, sdk.NewCoins(sdk.NewCoin("stake", sdkmath.NewInt(1))))},
)
_, err = ante.AnteHandle(s.ctx, decorators.NewMockTx(msgMultiSend), false, decorators.EmptyAnte)
s.Require().NoError(err)
}

35
app/decorators/setup.go Normal file
View File

@ -0,0 +1,35 @@
package decorators
import (
sdk "github.com/cosmos/cosmos-sdk/types"
protov2 "google.golang.org/protobuf/proto"
)
// Define an empty ante handle
var (
EmptyAnte = func(ctx sdk.Context, tx sdk.Tx, simulate bool) (sdk.Context, error) {
return ctx, nil
}
)
type MockTx struct {
msgs []sdk.Msg
}
func NewMockTx(msgs ...sdk.Msg) MockTx {
return MockTx{
msgs: msgs,
}
}
func (tx MockTx) GetMsgs() []sdk.Msg {
return tx.msgs
}
func (tx MockTx) GetMsgsV2() ([]protov2.Message, error) {
return nil, nil
}
func (tx MockTx) ValidateBasic() error {
return nil
}

38
app/encoding.go Normal file
View File

@ -0,0 +1,38 @@
package app
import (
"testing"
dbm "github.com/cosmos/cosmos-db"
"cosmossdk.io/log"
simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims"
"github.com/onsonr/hway/app/params"
)
// MakeEncodingConfig creates a new EncodingConfig with all modules registered. For testing only
func MakeEncodingConfig(t testing.TB) params.EncodingConfig {
t.Helper()
// we "pre"-instantiate the application for getting the injected/configured encoding configuration
// note, this is not necessary when using app wiring, as depinject can be directly used (see root_v2.go)
tempApp := NewChainApp(
log.NewNopLogger(),
dbm.NewMemDB(),
nil,
true,
simtestutil.NewAppOptionsWithFlagHome(t.TempDir()),
)
return makeEncodingConfig(tempApp)
}
func makeEncodingConfig(tempApp *SonrApp) params.EncodingConfig {
encodingConfig := params.EncodingConfig{
InterfaceRegistry: tempApp.InterfaceRegistry(),
Codec: tempApp.AppCodec(),
TxConfig: tempApp.TxConfig(),
Amino: tempApp.LegacyAmino(),
}
return encodingConfig
}

253
app/export.go Normal file
View File

@ -0,0 +1,253 @@
package app
import (
"encoding/json"
"fmt"
"log"
cmtproto "github.com/cometbft/cometbft/proto/tendermint/types"
storetypes "cosmossdk.io/store/types"
servertypes "github.com/cosmos/cosmos-sdk/server/types"
sdk "github.com/cosmos/cosmos-sdk/types"
slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types"
"github.com/cosmos/cosmos-sdk/x/staking"
stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"
)
// ExportAppStateAndValidators exports the state of the application for a genesis
// file.
func (app *SonrApp) ExportAppStateAndValidators(forZeroHeight bool, jailAllowedAddrs, modulesToExport []string) (servertypes.ExportedApp, error) {
// as if they could withdraw from the start of the next block
ctx := app.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight()})
// We export at last height + 1, because that's the height at which
// CometBFT will start InitChain.
height := app.LastBlockHeight() + 1
if forZeroHeight {
height = 0
app.prepForZeroHeightGenesis(ctx, jailAllowedAddrs)
}
genState, err := app.ModuleManager.ExportGenesisForModules(ctx, app.appCodec, modulesToExport)
if err != nil {
return servertypes.ExportedApp{}, err
}
appState, err := json.MarshalIndent(genState, "", " ")
if err != nil {
return servertypes.ExportedApp{}, err
}
validators, err := staking.WriteValidators(ctx, app.StakingKeeper)
return servertypes.ExportedApp{
AppState: appState,
Validators: validators,
Height: height,
ConsensusParams: app.BaseApp.GetConsensusParams(ctx),
}, err
}
// prepare for fresh start at zero height
// NOTE zero height genesis is a temporary feature which will be deprecated
//
// in favor of export at a block height
func (app *SonrApp) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs []string) {
applyAllowedAddrs := false
// check if there is a allowed address list
if len(jailAllowedAddrs) > 0 {
applyAllowedAddrs = true
}
allowedAddrsMap := make(map[string]bool)
for _, addr := range jailAllowedAddrs {
_, err := sdk.ValAddressFromBech32(addr)
if err != nil {
log.Fatal(err)
}
allowedAddrsMap[addr] = true
}
// Just to be safe, assert the invariants on current state.
app.CrisisKeeper.AssertInvariants(ctx)
// Handle fee distribution state.
// withdraw all validator commission
err := app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) {
valBz, err := app.StakingKeeper.ValidatorAddressCodec().StringToBytes(val.GetOperator())
if err != nil {
panic(err)
}
_, _ = app.DistrKeeper.WithdrawValidatorCommission(ctx, valBz)
return false
})
if err != nil {
panic(err)
}
// withdraw all delegator rewards
dels, err := app.StakingKeeper.GetAllDelegations(ctx)
if err != nil {
panic(err)
}
for _, delegation := range dels {
valAddr, err := sdk.ValAddressFromBech32(delegation.ValidatorAddress)
if err != nil {
panic(err)
}
delAddr := sdk.MustAccAddressFromBech32(delegation.DelegatorAddress)
if _, err = app.DistrKeeper.WithdrawDelegationRewards(ctx, delAddr, valAddr); err != nil {
panic(err)
}
}
// clear validator slash events
app.DistrKeeper.DeleteAllValidatorSlashEvents(ctx)
// clear validator historical rewards
app.DistrKeeper.DeleteAllValidatorHistoricalRewards(ctx)
// set context height to zero
height := ctx.BlockHeight()
ctx = ctx.WithBlockHeight(0)
// reinitialize all validators
err = app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) {
valBz, err := app.StakingKeeper.ValidatorAddressCodec().StringToBytes(val.GetOperator())
if err != nil {
panic(err)
}
// donate any unwithdrawn outstanding reward fraction tokens to the community pool
scraps, err := app.DistrKeeper.GetValidatorOutstandingRewardsCoins(ctx, valBz)
if err != nil {
panic(err)
}
feePool, err := app.DistrKeeper.FeePool.Get(ctx)
if err != nil {
panic(err)
}
feePool.CommunityPool = feePool.CommunityPool.Add(scraps...)
if err := app.DistrKeeper.FeePool.Set(ctx, feePool); err != nil {
panic(err)
}
if err := app.DistrKeeper.Hooks().AfterValidatorCreated(ctx, valBz); err != nil {
panic(err)
}
return false
})
if err != nil {
panic(err)
}
// reinitialize all delegations
for _, del := range dels {
valAddr, err := sdk.ValAddressFromBech32(del.ValidatorAddress)
if err != nil {
panic(err)
}
delAddr := sdk.MustAccAddressFromBech32(del.DelegatorAddress)
if err := app.DistrKeeper.Hooks().BeforeDelegationCreated(ctx, delAddr, valAddr); err != nil {
// never called as BeforeDelegationCreated always returns nil
panic(fmt.Errorf("error while incrementing period: %w", err))
}
if err := app.DistrKeeper.Hooks().AfterDelegationModified(ctx, delAddr, valAddr); err != nil {
// never called as AfterDelegationModified always returns nil
panic(fmt.Errorf("error while creating a new delegation period record: %w", err))
}
}
// reset context height
ctx = ctx.WithBlockHeight(height)
// Handle staking state.
// iterate through redelegations, reset creation height
err = app.StakingKeeper.IterateRedelegations(ctx, func(_ int64, red stakingtypes.Redelegation) (stop bool) {
for i := range red.Entries {
red.Entries[i].CreationHeight = 0
}
err = app.StakingKeeper.SetRedelegation(ctx, red)
if err != nil {
panic(err)
}
return false
})
if err != nil {
panic(err)
}
// iterate through unbonding delegations, reset creation height
err = app.StakingKeeper.IterateUnbondingDelegations(ctx, func(_ int64, ubd stakingtypes.UnbondingDelegation) (stop bool) {
for i := range ubd.Entries {
ubd.Entries[i].CreationHeight = 0
}
err = app.StakingKeeper.SetUnbondingDelegation(ctx, ubd)
if err != nil {
panic(err)
}
return false
})
if err != nil {
panic(err)
}
// Iterate through validators by power descending, reset bond heights, and
// update bond intra-tx counters.
store := ctx.KVStore(app.GetKey(stakingtypes.StoreKey))
iter := storetypes.KVStoreReversePrefixIterator(store, stakingtypes.ValidatorsKey)
for ; iter.Valid(); iter.Next() {
addr := sdk.ValAddress(stakingtypes.AddressFromValidatorsKey(iter.Key()))
validator, err := app.StakingKeeper.GetValidator(ctx, addr)
if err != nil {
panic("expected validator, not found")
}
validator.UnbondingHeight = 0
if applyAllowedAddrs && !allowedAddrsMap[addr.String()] {
validator.Jailed = true
}
err = app.StakingKeeper.SetValidator(ctx, validator)
if err != nil {
panic(err)
}
}
if err := iter.Close(); err != nil {
app.Logger().Error("error while closing the key-value store reverse prefix iterator: ", err)
return
}
_, err = app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx)
if err != nil {
log.Fatal(err)
}
// Handle slashing state.
// reset start height on signing infos
err = app.SlashingKeeper.IterateValidatorSigningInfos(
ctx,
func(addr sdk.ConsAddress, info slashingtypes.ValidatorSigningInfo) (stop bool) {
info.StartHeight = 0
if err := app.SlashingKeeper.SetValidatorSigningInfo(ctx, addr, info); err != nil {
panic(err)
}
return false
},
)
if err != nil {
panic(err)
}
}

14
app/genesis.go Normal file
View File

@ -0,0 +1,14 @@
package app
import (
"encoding/json"
)
// GenesisState of the blockchain is represented here as a map of raw json
// messages key'd by a identifier string.
// The identifier is used to determine which module genesis information belongs
// to so it may be appropriately routed during init chain.
// Within this application default genesis information is retrieved from
// the ModuleBasicManager which populates json from each BasicModule
// object provided to it during init.
type GenesisState map[string]json.RawMessage

19
app/params/doc.go Normal file
View File

@ -0,0 +1,19 @@
/*
Package params defines the simulation parameters in the gaia.
It contains the default weights used for each transaction used on the module's
simulation. These weights define the chance for a transaction to be simulated at
any gived operation.
You can repace the default values for the weights by providing a params.json
file with the weights defined for each of the transaction operations:
{
"op_weight_msg_send": 60,
"op_weight_msg_delegate": 100,
}
In the example above, the `MsgSend` has 60% chance to be simulated, while the
`MsgDelegate` will always be simulated.
*/
package params

16
app/params/encoding.go Normal file
View File

@ -0,0 +1,16 @@
package params
import (
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/codec/types"
)
// EncodingConfig specifies the concrete encoding types to use for a given app.
// This is provided for compatibility between protobuf and amino implementations.
type EncodingConfig struct {
InterfaceRegistry types.InterfaceRegistry
Codec codec.Codec
TxConfig client.TxConfig
Amino *codec.LegacyAmino
}

42
app/params/proto.go Normal file
View File

@ -0,0 +1,42 @@
package params
import (
"github.com/cosmos/gogoproto/proto"
"cosmossdk.io/x/tx/signing"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/codec/address"
"github.com/cosmos/cosmos-sdk/codec/types"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/x/auth/tx"
)
// MakeEncodingConfig creates an EncodingConfig for an amino based test configuration.
func MakeEncodingConfig() EncodingConfig {
amino := codec.NewLegacyAmino()
interfaceRegistry, err := types.NewInterfaceRegistryWithOptions(types.InterfaceRegistryOptions{
ProtoFiles: proto.HybridResolver,
SigningOptions: signing.Options{
AddressCodec: address.Bech32Codec{
Bech32Prefix: sdk.GetConfig().GetBech32AccountAddrPrefix(),
},
ValidatorAddressCodec: address.Bech32Codec{
Bech32Prefix: sdk.GetConfig().GetBech32ValidatorAddrPrefix(),
},
},
})
if err != nil {
panic(err)
}
marshaler := codec.NewProtoCodec(interfaceRegistry)
txCfg := tx.NewTxConfig(marshaler, tx.DefaultSignModes)
return EncodingConfig{
InterfaceRegistry: interfaceRegistry,
Codec: marshaler,
TxConfig: txCfg,
Amino: amino,
}
}

364
app/sim_test.go Normal file
View File

@ -0,0 +1,364 @@
package app
import (
"encoding/json"
"flag"
"fmt"
"os"
"runtime/debug"
"strings"
"testing"
abci "github.com/cometbft/cometbft/abci/types"
cmtproto "github.com/cometbft/cometbft/proto/tendermint/types"
dbm "github.com/cosmos/cosmos-db"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"cosmossdk.io/log"
"cosmossdk.io/store"
storetypes "cosmossdk.io/store/types"
"cosmossdk.io/x/feegrant"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/server"
simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims"
simtypes "github.com/cosmos/cosmos-sdk/types/simulation"
authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper"
"github.com/cosmos/cosmos-sdk/x/simulation"
simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli"
slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types"
stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"
)
// SimAppChainID hardcoded chainID for simulation
const SimAppChainID = "simulation-app"
var FlagEnableStreamingValue bool
// Get flags every time the simulator is run
func init() {
simcli.GetSimulatorFlags()
flag.BoolVar(&FlagEnableStreamingValue, "EnableStreaming", false, "Enable streaming service")
}
// fauxMerkleModeOpt returns a BaseApp option to use a dbStoreAdapter instead of
// an IAVLStore for faster simulation speed.
func fauxMerkleModeOpt(bapp *baseapp.BaseApp) {
bapp.SetFauxMerkleMode()
}
// interBlockCacheOpt returns a BaseApp option function that sets the persistent
// inter-block write-through cache.
func interBlockCacheOpt() func(*baseapp.BaseApp) {
return baseapp.SetInterBlockCache(store.NewCommitKVStoreCacheManager())
}
func TestFullAppSimulation(t *testing.T) {
config, db, _, app := setupSimulationApp(t, "skipping application simulation")
// run randomized simulation
_, simParams, simErr := simulation.SimulateFromSeed(
t,
os.Stdout,
app.BaseApp,
simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()),
simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1
simtestutil.SimulationOperations(app, app.AppCodec(), config),
BlockedAddresses(),
config,
app.AppCodec(),
)
// export state and simParams before the simulation error is checked
err := simtestutil.CheckExportSimulation(app, config, simParams)
require.NoError(t, err)
require.NoError(t, simErr)
if config.Commit {
simtestutil.PrintStats(db)
}
}
func TestAppImportExport(t *testing.T) {
config, db, appOptions, app := setupSimulationApp(t, "skipping application import/export simulation")
// Run randomized simulation
_, simParams, simErr := simulation.SimulateFromSeed(
t,
os.Stdout,
app.BaseApp,
simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()),
simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1
simtestutil.SimulationOperations(app, app.AppCodec(), config),
BlockedAddresses(),
config,
app.AppCodec(),
)
// export state and simParams before the simulation error is checked
err := simtestutil.CheckExportSimulation(app, config, simParams)
require.NoError(t, err)
require.NoError(t, simErr)
if config.Commit {
simtestutil.PrintStats(db)
}
t.Log("exporting genesis...\n")
exported, err := app.ExportAppStateAndValidators(false, []string{}, []string{})
require.NoError(t, err)
t.Log("importing genesis...\n")
newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", simcli.FlagVerboseValue, simcli.FlagEnabledValue)
require.NoError(t, err, "simulation setup failed")
defer func() {
require.NoError(t, newDB.Close())
require.NoError(t, os.RemoveAll(newDir))
}()
newApp := NewChainApp(log.NewNopLogger(), newDB, nil, true, appOptions, nil, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID))
initReq := &abci.RequestInitChain{
AppStateBytes: exported.AppState,
}
ctxA := app.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight()})
ctxB := newApp.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight()})
_, err = newApp.InitChainer(ctxB, initReq)
if err != nil {
if strings.Contains(err.Error(), "validator set is empty after InitGenesis") {
t.Log("Skipping simulation as all validators have been unbonded")
t.Logf("err: %s stacktrace: %s\n", err, string(debug.Stack()))
return
}
}
require.NoError(t, err)
err = newApp.StoreConsensusParams(ctxB, exported.ConsensusParams)
require.NoError(t, err)
t.Log("comparing stores...")
// skip certain prefixes
skipPrefixes := map[string][][]byte{
stakingtypes.StoreKey: {
stakingtypes.UnbondingQueueKey, stakingtypes.RedelegationQueueKey, stakingtypes.ValidatorQueueKey,
stakingtypes.HistoricalInfoKey, stakingtypes.UnbondingIDKey, stakingtypes.UnbondingIndexKey,
stakingtypes.UnbondingTypeKey, stakingtypes.ValidatorUpdatesKey,
},
authzkeeper.StoreKey: {authzkeeper.GrantQueuePrefix},
feegrant.StoreKey: {feegrant.FeeAllowanceQueueKeyPrefix},
slashingtypes.StoreKey: {slashingtypes.ValidatorMissedBlockBitmapKeyPrefix},
}
storeKeys := app.GetStoreKeys()
require.NotEmpty(t, storeKeys)
for _, appKeyA := range storeKeys {
// only compare kvstores
if _, ok := appKeyA.(*storetypes.KVStoreKey); !ok {
continue
}
keyName := appKeyA.Name()
appKeyB := newApp.GetKey(keyName)
storeA := ctxA.KVStore(appKeyA)
storeB := ctxB.KVStore(appKeyB)
failedKVAs, failedKVBs := simtestutil.DiffKVStores(storeA, storeB, skipPrefixes[keyName])
if !assert.Equal(t, len(failedKVAs), len(failedKVBs), "unequal sets of key-values to compare in %q", keyName) {
for _, v := range failedKVBs {
t.Logf("store missmatch: %q\n", v)
}
t.FailNow()
}
t.Logf("compared %d different key/value pairs between %s and %s\n", len(failedKVAs), appKeyA, appKeyB)
if !assert.Equal(t, 0, len(failedKVAs), simtestutil.GetSimulationLog(keyName, app.SimulationManager().StoreDecoders, failedKVAs, failedKVBs)) {
for _, v := range failedKVAs {
t.Logf("store missmatch: %q\n", v)
}
t.FailNow()
}
}
}
func TestAppSimulationAfterImport(t *testing.T) {
config, db, appOptions, app := setupSimulationApp(t, "skipping application simulation after import")
// Run randomized simulation
stopEarly, simParams, simErr := simulation.SimulateFromSeed(
t,
os.Stdout,
app.BaseApp,
simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()),
simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1
simtestutil.SimulationOperations(app, app.AppCodec(), config),
BlockedAddresses(),
config,
app.AppCodec(),
)
// export state and simParams before the simulation error is checked
err := simtestutil.CheckExportSimulation(app, config, simParams)
require.NoError(t, err)
require.NoError(t, simErr)
if config.Commit {
simtestutil.PrintStats(db)
}
if stopEarly {
fmt.Println("can't export or import a zero-validator genesis, exiting test...")
return
}
fmt.Printf("exporting genesis...\n")
exported, err := app.ExportAppStateAndValidators(true, []string{}, []string{})
require.NoError(t, err)
fmt.Printf("importing genesis...\n")
newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", simcli.FlagVerboseValue, simcli.FlagEnabledValue)
require.NoError(t, err, "simulation setup failed")
defer func() {
require.NoError(t, newDB.Close())
require.NoError(t, os.RemoveAll(newDir))
}()
newApp := NewChainApp(log.NewNopLogger(), newDB, nil, true, appOptions, nil, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID))
_, err = newApp.InitChain(&abci.RequestInitChain{
ChainId: SimAppChainID,
AppStateBytes: exported.AppState,
})
require.NoError(t, err)
_, _, err = simulation.SimulateFromSeed(
t,
os.Stdout,
newApp.BaseApp,
simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()),
simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1
simtestutil.SimulationOperations(newApp, newApp.AppCodec(), config),
BlockedAddresses(),
config,
app.AppCodec(),
)
require.NoError(t, err)
}
func setupSimulationApp(t *testing.T, msg string) (simtypes.Config, dbm.DB, simtestutil.AppOptionsMap, *SonrApp) {
config := simcli.NewConfigFromFlags()
config.ChainID = SimAppChainID
db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue)
if skip {
t.Skip(msg)
}
require.NoError(t, err, "simulation setup failed")
t.Cleanup(func() {
require.NoError(t, db.Close())
require.NoError(t, os.RemoveAll(dir))
})
appOptions := make(simtestutil.AppOptionsMap, 0)
appOptions[flags.FlagHome] = dir // ensure a unique folder
appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue
app := NewChainApp(logger, db, nil, true, appOptions, nil, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID))
return config, db, appOptions, app
}
// TODO: Make another test for the fuzzer itself, which just has noOp txs
// and doesn't depend on the application.
func TestAppStateDeterminism(t *testing.T) {
if !simcli.FlagEnabledValue {
t.Skip("skipping application simulation")
}
config := simcli.NewConfigFromFlags()
config.InitialBlockHeight = 1
config.ExportParamsPath = ""
config.OnOperation = false
config.AllInvariants = false
config.ChainID = SimAppChainID
numSeeds := 3
numTimesToRunPerSeed := 3 // This used to be set to 5, but we've temporarily reduced it to 3 for the sake of faster CI.
appHashList := make([]json.RawMessage, numTimesToRunPerSeed)
// We will be overriding the random seed and just run a single simulation on the provided seed value
if config.Seed != simcli.DefaultSeedValue {
numSeeds = 1
}
appOptions := viper.New()
if FlagEnableStreamingValue {
m := make(map[string]interface{})
m["streaming.abci.keys"] = []string{"*"}
m["streaming.abci.plugin"] = "abci_v1"
m["streaming.abci.stop-node-on-err"] = true
for key, value := range m {
appOptions.SetDefault(key, value)
}
}
appOptions.SetDefault(flags.FlagHome, t.TempDir()) // ensure a unique folder
appOptions.SetDefault(server.FlagInvCheckPeriod, simcli.FlagPeriodValue)
for i := 0; i < numSeeds; i++ {
config.Seed += int64(i)
for j := 0; j < numTimesToRunPerSeed; j++ {
var logger log.Logger
if simcli.FlagVerboseValue {
logger = log.NewTestLogger(t)
} else {
logger = log.NewNopLogger()
}
db := dbm.NewMemDB()
app := NewChainApp(logger, db, nil, true, appOptions, nil, interBlockCacheOpt(), baseapp.SetChainID(SimAppChainID))
fmt.Printf(
"running non-determinism simulation; seed %d: %d/%d, attempt: %d/%d\n",
config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed,
)
_, _, err := simulation.SimulateFromSeed(
t,
os.Stdout,
app.BaseApp,
simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()),
simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1
simtestutil.SimulationOperations(app, app.AppCodec(), config),
BlockedAddresses(),
config,
app.AppCodec(),
)
require.NoError(t, err)
if config.Commit {
simtestutil.PrintStats(db)
}
appHash := app.LastCommitID().Hash
appHashList[j] = appHash
if j != 0 {
require.Equal(
t, string(appHashList[0]), string(appHashList[j]),
"non-determinism in seed %d: %d/%d, attempt: %d/%d\n", config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed,
)
}
}
}
}

432
app/test_helpers.go Normal file
View File

@ -0,0 +1,432 @@
package app
import (
"encoding/json"
"fmt"
"math/rand"
"os"
"path/filepath"
"testing"
"time"
abci "github.com/cometbft/cometbft/abci/types"
cmtjson "github.com/cometbft/cometbft/libs/json"
cmttypes "github.com/cometbft/cometbft/types"
dbm "github.com/cosmos/cosmos-db"
"github.com/stretchr/testify/require"
"cosmossdk.io/log"
sdkmath "cosmossdk.io/math"
pruningtypes "cosmossdk.io/store/pruning/types"
"cosmossdk.io/store/snapshots"
snapshottypes "cosmossdk.io/store/snapshots/types"
bam "github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/codec"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec"
"github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
"github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/server"
servertypes "github.com/cosmos/cosmos-sdk/server/types"
"github.com/cosmos/cosmos-sdk/testutil/mock"
"github.com/cosmos/cosmos-sdk/testutil/network"
simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/module/testutil"
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
minttypes "github.com/cosmos/cosmos-sdk/x/mint/types"
slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types"
stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"
)
// SetupOptions defines arguments that are passed into `ChainApp` constructor.
type SetupOptions struct {
Logger log.Logger
DB *dbm.MemDB
AppOpts servertypes.AppOptions
}
func setup(
t testing.TB,
chainID string,
withGenesis bool,
invCheckPeriod uint,
) (*SonrApp, GenesisState) {
db := dbm.NewMemDB()
nodeHome := t.TempDir()
snapshotDir := filepath.Join(nodeHome, "data", "snapshots")
snapshotDB, err := dbm.NewDB("metadata", dbm.GoLevelDBBackend, snapshotDir)
require.NoError(t, err)
t.Cleanup(func() { snapshotDB.Close() })
snapshotStore, err := snapshots.NewStore(snapshotDB, snapshotDir)
require.NoError(t, err)
appOptions := make(simtestutil.AppOptionsMap, 0)
appOptions[flags.FlagHome] = nodeHome // ensure unique folder
appOptions[server.FlagInvCheckPeriod] = invCheckPeriod
app := NewChainApp(
log.NewNopLogger(),
db,
nil,
true,
appOptions,
bam.SetChainID(chainID),
bam.SetSnapshot(snapshotStore, snapshottypes.SnapshotOptions{KeepRecent: 2}),
)
if withGenesis {
return app, app.DefaultGenesis()
}
return app, GenesisState{}
}
// NewChainAppWithCustomOptions initializes a new ChainApp with custom options.
func NewChainAppWithCustomOptions(t *testing.T, isCheckTx bool, options SetupOptions) *SonrApp {
t.Helper()
privVal := mock.NewPV()
pubKey, err := privVal.GetPubKey()
require.NoError(t, err)
// create validator set with single validator
validator := cmttypes.NewValidator(pubKey, 1)
valSet := cmttypes.NewValidatorSet([]*cmttypes.Validator{validator})
// generate genesis account
senderPrivKey := secp256k1.GenPrivKey()
acc := authtypes.NewBaseAccount(senderPrivKey.PubKey().Address().Bytes(), senderPrivKey.PubKey(), 0, 0)
balance := banktypes.Balance{
Address: acc.GetAddress().String(),
Coins: sdk.NewCoins(sdk.NewCoin(sdk.DefaultBondDenom, sdkmath.NewInt(100000000000000))),
}
app := NewChainApp(
options.Logger,
options.DB,
nil, true,
options.AppOpts,
)
genesisState := app.DefaultGenesis()
genesisState, err = GenesisStateWithValSet(app.AppCodec(), genesisState, valSet, []authtypes.GenesisAccount{acc}, balance)
require.NoError(t, err)
if !isCheckTx {
// init chain must be called to stop deliverState from being nil
stateBytes, err := cmtjson.MarshalIndent(genesisState, "", " ")
require.NoError(t, err)
// Initialize the chain
_, err = app.InitChain(
&abci.RequestInitChain{
Validators: []abci.ValidatorUpdate{},
ConsensusParams: simtestutil.DefaultConsensusParams,
AppStateBytes: stateBytes,
})
require.NoError(t, err)
}
return app
}
// Setup initializes a new ChainApp. A Nop logger is set in ChainApp.
func Setup(
t *testing.T,
) *SonrApp {
t.Helper()
privVal := mock.NewPV()
pubKey, err := privVal.GetPubKey()
require.NoError(t, err)
// create validator set with single validator
validator := cmttypes.NewValidator(pubKey, 1)
valSet := cmttypes.NewValidatorSet([]*cmttypes.Validator{validator})
// generate genesis account
senderPrivKey := secp256k1.GenPrivKey()
acc := authtypes.NewBaseAccount(senderPrivKey.PubKey().Address().Bytes(), senderPrivKey.PubKey(), 0, 0)
balance := banktypes.Balance{
Address: acc.GetAddress().String(),
Coins: sdk.NewCoins(sdk.NewCoin(sdk.DefaultBondDenom, sdkmath.NewInt(100000000000000))),
}
chainID := "testing"
app := SetupWithGenesisValSet(
t,
valSet,
[]authtypes.GenesisAccount{acc},
chainID,
balance,
)
return app
}
// SetupWithGenesisValSet initializes a new ChainApp with a validator set and genesis accounts
// that also act as delegators. For simplicity, each validator is bonded with a delegation
// of one consensus engine unit in the default token of the ChainApp from first genesis
// account. A Nop logger is set in ChainApp.
func SetupWithGenesisValSet(
t *testing.T,
valSet *cmttypes.ValidatorSet,
genAccs []authtypes.GenesisAccount,
chainID string,
balances ...banktypes.Balance,
) *SonrApp {
t.Helper()
app, genesisState := setup(
t, chainID, true, 5,
)
genesisState, err := GenesisStateWithValSet(app.AppCodec(), genesisState, valSet, genAccs, balances...)
require.NoError(t, err)
stateBytes, err := json.MarshalIndent(genesisState, "", " ")
require.NoError(t, err)
// init chain will set the validator set and initialize the genesis accounts
consensusParams := simtestutil.DefaultConsensusParams
consensusParams.Block.MaxGas = 100 * simtestutil.DefaultGenTxGas
_, err = app.InitChain(&abci.RequestInitChain{
ChainId: chainID,
Time: time.Now().UTC(),
Validators: []abci.ValidatorUpdate{},
ConsensusParams: consensusParams,
InitialHeight: app.LastBlockHeight() + 1,
AppStateBytes: stateBytes,
})
require.NoError(t, err)
_, err = app.FinalizeBlock(&abci.RequestFinalizeBlock{
Height: app.LastBlockHeight() + 1,
Hash: app.LastCommitID().Hash,
NextValidatorsHash: valSet.Hash(),
})
require.NoError(t, err)
return app
}
// SetupWithEmptyStore set up a chain app instance with empty DB
func SetupWithEmptyStore(t testing.TB) *SonrApp {
app, _ := setup(t, "testing", false, 0)
return app
}
// GenesisStateWithSingleValidator initializes GenesisState with a single validator and genesis accounts
// that also act as delegators.
func GenesisStateWithSingleValidator(t *testing.T, app *SonrApp) GenesisState {
t.Helper()
privVal := mock.NewPV()
pubKey, err := privVal.GetPubKey()
require.NoError(t, err)
// create validator set with single validator
validator := cmttypes.NewValidator(pubKey, 1)
valSet := cmttypes.NewValidatorSet([]*cmttypes.Validator{validator})
// generate genesis account
senderPrivKey := secp256k1.GenPrivKey()
acc := authtypes.NewBaseAccount(senderPrivKey.PubKey().Address().Bytes(), senderPrivKey.PubKey(), 0, 0)
balances := []banktypes.Balance{
{
Address: acc.GetAddress().String(),
Coins: sdk.NewCoins(sdk.NewCoin(sdk.DefaultBondDenom, sdkmath.NewInt(100000000000000))),
},
}
genesisState := app.DefaultGenesis()
genesisState, err = GenesisStateWithValSet(app.AppCodec(), genesisState, valSet, []authtypes.GenesisAccount{acc}, balances...)
require.NoError(t, err)
return genesisState
}
// AddTestAddrsIncremental constructs and returns accNum amount of accounts with an
// initial balance of accAmt in random order
func AddTestAddrsIncremental(app *SonrApp, ctx sdk.Context, accNum int, accAmt sdkmath.Int) []sdk.AccAddress {
return addTestAddrs(app, ctx, accNum, accAmt, simtestutil.CreateIncrementalAccounts)
}
func addTestAddrs(app *SonrApp, ctx sdk.Context, accNum int, accAmt sdkmath.Int, strategy simtestutil.GenerateAccountStrategy) []sdk.AccAddress {
testAddrs := strategy(accNum)
bondDenom, err := app.StakingKeeper.BondDenom(ctx)
if err != nil {
panic(err)
}
initCoins := sdk.NewCoins(sdk.NewCoin(bondDenom, accAmt))
for _, addr := range testAddrs {
initAccountWithCoins(app, ctx, addr, initCoins)
}
return testAddrs
}
func initAccountWithCoins(app *SonrApp, ctx sdk.Context, addr sdk.AccAddress, coins sdk.Coins) {
err := app.BankKeeper.MintCoins(ctx, minttypes.ModuleName, coins)
if err != nil {
panic(err)
}
err = app.BankKeeper.SendCoinsFromModuleToAccount(ctx, minttypes.ModuleName, addr, coins)
if err != nil {
panic(err)
}
}
// NewTestNetworkFixture returns a new ChainApp AppConstructor for network simulation tests
func NewTestNetworkFixture() network.TestFixture {
dir, err := os.MkdirTemp("", "simapp")
if err != nil {
panic(fmt.Sprintf("failed creating temporary directory: %v", err))
}
defer os.RemoveAll(dir)
app := NewChainApp(log.NewNopLogger(), dbm.NewMemDB(), nil, true, simtestutil.NewAppOptionsWithFlagHome(dir), nil)
appCtr := func(val network.ValidatorI) servertypes.Application {
return NewChainApp(
val.GetCtx().Logger, dbm.NewMemDB(), nil, true,
simtestutil.NewAppOptionsWithFlagHome(val.GetCtx().Config.RootDir),
bam.SetPruning(pruningtypes.NewPruningOptionsFromString(val.GetAppConfig().Pruning)),
bam.SetMinGasPrices(val.GetAppConfig().MinGasPrices),
bam.SetChainID(val.GetCtx().Viper.GetString(flags.FlagChainID)),
)
}
return network.TestFixture{
AppConstructor: appCtr,
GenesisState: app.DefaultGenesis(),
EncodingConfig: testutil.TestEncodingConfig{
InterfaceRegistry: app.InterfaceRegistry(),
Codec: app.AppCodec(),
TxConfig: app.TxConfig(),
Amino: app.LegacyAmino(),
},
}
}
// SignAndDeliverWithoutCommit signs and delivers a transaction. No commit
func SignAndDeliverWithoutCommit(t *testing.T, txCfg client.TxConfig, app *bam.BaseApp, msgs []sdk.Msg, fees sdk.Coins, chainID string, accNums, accSeqs []uint64, blockTime time.Time, priv ...cryptotypes.PrivKey) (*abci.ResponseFinalizeBlock, error) {
tx, err := simtestutil.GenSignedMockTx(
rand.New(rand.NewSource(time.Now().UnixNano())),
txCfg,
msgs,
fees,
simtestutil.DefaultGenTxGas,
chainID,
accNums,
accSeqs,
priv...,
)
require.NoError(t, err)
bz, err := txCfg.TxEncoder()(tx)
require.NoError(t, err)
return app.FinalizeBlock(&abci.RequestFinalizeBlock{
Height: app.LastBlockHeight() + 1,
Time: blockTime,
Txs: [][]byte{bz},
})
}
// GenesisStateWithValSet returns a new genesis state with the validator set
// copied from simtestutil with delegation not added to supply
func GenesisStateWithValSet(
codec codec.Codec,
genesisState map[string]json.RawMessage,
valSet *cmttypes.ValidatorSet,
genAccs []authtypes.GenesisAccount,
balances ...banktypes.Balance,
) (map[string]json.RawMessage, error) {
// set genesis accounts
authGenesis := authtypes.NewGenesisState(authtypes.DefaultParams(), genAccs)
genesisState[authtypes.ModuleName] = codec.MustMarshalJSON(authGenesis)
validators := make([]stakingtypes.Validator, 0, len(valSet.Validators))
delegations := make([]stakingtypes.Delegation, 0, len(valSet.Validators))
bondAmt := sdk.DefaultPowerReduction
for _, val := range valSet.Validators {
pk, err := cryptocodec.FromCmtPubKeyInterface(val.PubKey)
if err != nil {
return nil, fmt.Errorf("failed to convert pubkey: %w", err)
}
pkAny, err := codectypes.NewAnyWithValue(pk)
if err != nil {
return nil, fmt.Errorf("failed to create new any: %w", err)
}
validator := stakingtypes.Validator{
OperatorAddress: sdk.ValAddress(val.Address).String(),
ConsensusPubkey: pkAny,
Jailed: false,
Status: stakingtypes.Bonded,
Tokens: bondAmt,
DelegatorShares: sdkmath.LegacyOneDec(),
Description: stakingtypes.Description{},
UnbondingHeight: int64(0),
UnbondingTime: time.Unix(0, 0).UTC(),
Commission: stakingtypes.NewCommission(sdkmath.LegacyZeroDec(), sdkmath.LegacyZeroDec(), sdkmath.LegacyZeroDec()),
MinSelfDelegation: sdkmath.ZeroInt(),
}
validators = append(validators, validator)
delegations = append(delegations, stakingtypes.NewDelegation(genAccs[0].GetAddress().String(), sdk.ValAddress(val.Address).String(), sdkmath.LegacyOneDec()))
}
// set validators and delegations
stakingGenesis := stakingtypes.NewGenesisState(stakingtypes.DefaultParams(), validators, delegations)
genesisState[stakingtypes.ModuleName] = codec.MustMarshalJSON(stakingGenesis)
signingInfos := make([]slashingtypes.SigningInfo, len(valSet.Validators))
for i, val := range valSet.Validators {
signingInfos[i] = slashingtypes.SigningInfo{
Address: sdk.ConsAddress(val.Address).String(),
ValidatorSigningInfo: slashingtypes.ValidatorSigningInfo{},
}
}
slashingGenesis := slashingtypes.NewGenesisState(slashingtypes.DefaultParams(), signingInfos, nil)
genesisState[slashingtypes.ModuleName] = codec.MustMarshalJSON(slashingGenesis)
// add bonded amount to bonded pool module account
balances = append(balances, banktypes.Balance{
Address: authtypes.NewModuleAddress(stakingtypes.BondedPoolName).String(),
Coins: sdk.Coins{sdk.NewCoin(sdk.DefaultBondDenom, bondAmt.MulRaw(int64(len(valSet.Validators))))},
})
totalSupply := sdk.NewCoins()
for _, b := range balances {
// add genesis acc tokens to total supply
totalSupply = totalSupply.Add(b.Coins...)
}
// update total supply
bankGenesis := banktypes.NewGenesisState(banktypes.DefaultGenesisState().Params, balances, totalSupply, []banktypes.Metadata{}, []banktypes.SendEnabled{})
genesisState[banktypes.ModuleName] = codec.MustMarshalJSON(bankGenesis)
return genesisState, nil
}
type account struct {
priv *secp256k1.PrivKey
addr sdk.AccAddress
valKey *ed25519.PrivKey
}
func GenAccount() account {
priv := secp256k1.GenPrivKey()
return account{
priv: priv,
addr: sdk.AccAddress(priv.PubKey().Address()),
valKey: ed25519.GenPrivKey(),
}
}

35
app/test_support.go Normal file
View File

@ -0,0 +1,35 @@
package app
import (
capabilitykeeper "github.com/cosmos/ibc-go/modules/capability/keeper"
ibckeeper "github.com/cosmos/ibc-go/v8/modules/core/keeper"
"github.com/cosmos/cosmos-sdk/baseapp"
authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper"
bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper"
stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper"
)
func (app *SonrApp) GetIBCKeeper() *ibckeeper.Keeper {
return app.IBCKeeper
}
func (app *SonrApp) GetScopedIBCKeeper() capabilitykeeper.ScopedKeeper {
return app.ScopedIBCKeeper
}
func (app *SonrApp) GetBaseApp() *baseapp.BaseApp {
return app.BaseApp
}
func (app *SonrApp) GetBankKeeper() bankkeeper.Keeper {
return app.BankKeeper
}
func (app *SonrApp) GetStakingKeeper() *stakingkeeper.Keeper {
return app.StakingKeeper
}
func (app *SonrApp) GetAccountKeeper() authkeeper.AccountKeeper {
return app.AccountKeeper
}

61
app/upgrades.go Normal file
View File

@ -0,0 +1,61 @@
package app
import (
"fmt"
upgradetypes "cosmossdk.io/x/upgrade/types"
"github.com/onsonr/hway/app/upgrades"
"github.com/onsonr/hway/app/upgrades/noop"
)
// Upgrades list of chain upgrades
var Upgrades = []upgrades.Upgrade{}
// RegisterUpgradeHandlers registers the chain upgrade handlers
func (app *SonrApp) RegisterUpgradeHandlers() {
// setupLegacyKeyTables(&app.ParamsKeeper)
if len(Upgrades) == 0 {
// always have a unique upgrade registered for the current version to test in system tests
Upgrades = append(Upgrades, noop.NewUpgrade(app.Version()))
}
keepers := upgrades.AppKeepers{
AccountKeeper: &app.AccountKeeper,
ParamsKeeper: &app.ParamsKeeper,
ConsensusParamsKeeper: &app.ConsensusParamsKeeper,
CapabilityKeeper: app.CapabilityKeeper,
IBCKeeper: app.IBCKeeper,
Codec: app.appCodec,
GetStoreKey: app.GetKey,
}
app.GetStoreKeys()
// register all upgrade handlers
for _, upgrade := range Upgrades {
app.UpgradeKeeper.SetUpgradeHandler(
upgrade.UpgradeName,
upgrade.CreateUpgradeHandler(
app.ModuleManager,
app.configurator,
&keepers,
),
)
}
upgradeInfo, err := app.UpgradeKeeper.ReadUpgradeInfoFromDisk()
if err != nil {
panic(fmt.Sprintf("failed to read upgrade info from disk %s", err))
}
if app.UpgradeKeeper.IsSkipHeight(upgradeInfo.Height) {
return
}
// register store loader for current upgrade
for _, upgrade := range Upgrades {
if upgradeInfo.Name == upgrade.UpgradeName {
app.SetStoreLoader(upgradetypes.UpgradeStoreLoader(upgradeInfo.Height, &upgrade.StoreUpgrades)) // nolint:gosec
break
}
}
}

View File

@ -0,0 +1,34 @@
package noop
import (
"context"
storetypes "cosmossdk.io/store/types"
upgradetypes "cosmossdk.io/x/upgrade/types"
"github.com/cosmos/cosmos-sdk/types/module"
"github.com/onsonr/hway/app/upgrades"
)
// NewUpgrade constructor
func NewUpgrade(semver string) upgrades.Upgrade {
return upgrades.Upgrade{
UpgradeName: semver,
CreateUpgradeHandler: CreateUpgradeHandler,
StoreUpgrades: storetypes.StoreUpgrades{
Added: []string{},
Deleted: []string{},
},
}
}
func CreateUpgradeHandler(
mm upgrades.ModuleManager,
configurator module.Configurator,
ak *upgrades.AppKeepers,
) upgradetypes.UpgradeHandler {
return func(ctx context.Context, plan upgradetypes.Plan, fromVM module.VersionMap) (module.VersionMap, error) {
return mm.RunMigrations(ctx, configurator, fromVM)
}
}

44
app/upgrades/types.go Normal file
View File

@ -0,0 +1,44 @@
package upgrades
import (
"context"
capabilitykeeper "github.com/cosmos/ibc-go/modules/capability/keeper"
ibckeeper "github.com/cosmos/ibc-go/v8/modules/core/keeper"
storetypes "cosmossdk.io/store/types"
upgradetypes "cosmossdk.io/x/upgrade/types"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/types/module"
authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper"
consensusparamkeeper "github.com/cosmos/cosmos-sdk/x/consensus/keeper"
paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper"
)
type AppKeepers struct {
AccountKeeper *authkeeper.AccountKeeper
ParamsKeeper *paramskeeper.Keeper
ConsensusParamsKeeper *consensusparamkeeper.Keeper
Codec codec.Codec
GetStoreKey func(storeKey string) *storetypes.KVStoreKey
CapabilityKeeper *capabilitykeeper.Keeper
IBCKeeper *ibckeeper.Keeper
}
type ModuleManager interface {
RunMigrations(ctx context.Context, cfg module.Configurator, fromVM module.VersionMap) (module.VersionMap, error)
GetVersionMap() module.VersionMap
}
// Upgrade defines a struct containing necessary fields that a SoftwareUpgradeProposal
// must have written, in order for the state migration to go smoothly.
// An upgrade must implement this struct, and then set it in the app.go.
// The app.go will then define the handler.
type Upgrade struct {
// Upgrade version name, for the upgrade handler, e.g. `v7`
UpgradeName string
// CreateUpgradeHandler defines the function that creates an upgrade handler
CreateUpgradeHandler func(ModuleManager, module.Configurator, *AppKeepers) upgradetypes.UpgradeHandler
StoreUpgrades storetypes.StoreUpgrades
}

11
chains.yaml Normal file
View File

@ -0,0 +1,11 @@
# This file is used to create docker images using the heighliner binary.
# see: https://github.com/strangelove-ventures/heighliner
- name: sonr
dockerfile: cosmos
build-target: make install
binaries:
- /go/bin/sonrd
build-env:
- LEDGER_ENABLED=false
- BUILD_TAGS=muslc

10
chains/README.md Normal file
View File

@ -0,0 +1,10 @@
# Local Interchain Configurations
RUN:
- `make testnet` *(full setup: docker image, binary, keys, and ibc testnet start)*
- `spawn local-ic start testnet` *(Standalone start)*
## Documentation
* https://github.com/strangelove-ventures/interchaintest/tree/main/local-interchain

103
chains/ibc-testnet.json Normal file
View File

@ -0,0 +1,103 @@
{
"chains": [
{
"name": "core",
"ibc_paths": ["ibc-connection-1"],
"chain_id": "chainid-1",
"denom": "usnr",
"binary": "sonrd",
"bech32_prefix": "idx",
"docker_image": {
"repository": "core",
"version": "local"
},
"gas_prices": "0usnr",
"chain_type": "cosmos",
"coin_type": 118,
"trusting_period": "336h",
"gas_adjustment": 1.5,
"number_vals": 1,
"number_node": 0,
"debugging": true,
"block_time": "1000ms",
"host_port_override": {
"26657": "26657",
"1317": "1317",
"9090": "9090"
},
"encoding-options": ["wasm", "tokenfactory"],
"config_file_overrides": [
{
"file": "config/config.toml",
"paths": {
"moniker": "localvalmoniker",
"rpc.cors_allowed_origins": ["*"]
}
}
],
"genesis": {
"modify": [
{
"key": "app_state.gov.params.voting_period",
"value": "15s"
},
{
"key": "app_state.gov.params.expedited_voting_period",
"value": "10s"
},
{
"key": "app_state.gov.params.max_deposit_period",
"value": "15s"
},
{
"key": "app_state.gov.params.min_deposit.0.denom",
"value": "usnr"
},
{
"key": "app_state.gov.params.min_deposit.0.amount",
"value": "1"
}
],
"accounts": [
{
"name": "acc0",
"address": "idx1hj5fveer5cjtn4wd6wstzugjfdxzl0xpecp0nd",
"amount": "10000000000usnr",
"mnemonic": "decorate bright ozone fork gallery riot bus exhaust worth way bone indoor calm squirrel merry zero scheme cotton until shop any excess stage laundry"
},
{
"name": "acc1",
"address": "idx1efd63aw40lxf3n4mhf7dzhjkr453axur9vjt6y",
"amount": "10000000000usnr",
"mnemonic": "wealth flavor believe regret funny network recall kiss grape useless pepper cram hint member few certain unveil rather brick bargain curious require crowd raise"
}
],
"startup_commands": []
}
},
{
"name": "gaia",
"chain_id": "localcosmos-1",
"denom": "uatom",
"binary": "gaiad",
"bech32_prefix": "cosmos",
"docker_image": {
"version": "v9.1.0"
},
"block_time": "1000ms",
"gas_prices": "0%DENOM%",
"gas_adjustment": 2.0,
"ibc_paths": ["ibc-connection-1"],
"genesis": {
"accounts": [
{
"name": "acc0",
"address": "cosmos1hj5fveer5cjtn4wd6wstzugjfdxzl0xpxvjjvr",
"amount": "10000000%DENOM%",
"mnemonic": "decorate bright ozone fork gallery riot bus exhaust worth way bone indoor calm squirrel merry zero scheme cotton until shop any excess stage laundry"
}
]
}
}
]
}

78
chains/testnet.json Normal file
View File

@ -0,0 +1,78 @@
{
"chains": [
{
"name": "core",
"chain_id": "chainid-1",
"denom": "usnr",
"binary": "sonrd",
"bech32_prefix": "idx",
"docker_image": {
"repository": "core",
"version": "local"
},
"gas_prices": "0usnr",
"chain_type": "cosmos",
"coin_type": 118,
"trusting_period": "336h",
"gas_adjustment": 1.5,
"number_vals": 1,
"number_node": 0,
"debugging": true,
"block_time": "1000ms",
"host_port_override": {
"26657": "26657",
"1317": "1317",
"9090": "9090"
},
"encoding-options": ["wasm", "tokenfactory"],
"config_file_overrides": [
{
"file": "config/config.toml",
"paths": {
"moniker": "localvalmoniker",
"rpc.cors_allowed_origins": ["*"]
}
}
],
"genesis": {
"modify": [
{
"key": "app_state.gov.params.voting_period",
"value": "15s"
},
{
"key": "app_state.gov.params.expedited_voting_period",
"value": "10s"
},
{
"key": "app_state.gov.params.max_deposit_period",
"value": "15s"
},
{
"key": "app_state.gov.params.min_deposit.0.denom",
"value": "usnr"
},
{
"key": "app_state.gov.params.min_deposit.0.amount",
"value": "1"
}
],
"accounts": [
{
"name": "acc0",
"address": "idx1hj5fveer5cjtn4wd6wstzugjfdxzl0xpecp0nd",
"amount": "10000000000usnr",
"mnemonic": "decorate bright ozone fork gallery riot bus exhaust worth way bone indoor calm squirrel merry zero scheme cotton until shop any excess stage laundry"
},
{
"name": "acc1",
"address": "idx1efd63aw40lxf3n4mhf7dzhjkr453axur9vjt6y",
"amount": "10000000000usnr",
"mnemonic": "wealth flavor believe regret funny network recall kiss grape useless pepper cram hint member few certain unveil rather brick bargain curious require crowd raise"
}
],
"startup_commands": []
}
}
]
}

248
cmd/sonrd/commands.go Normal file
View File

@ -0,0 +1,248 @@
package main
import (
"errors"
"io"
"os"
cmtcfg "github.com/cometbft/cometbft/config"
dbm "github.com/cosmos/cosmos-db"
"github.com/onsonr/hway/app"
"github.com/spf13/cast"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"cosmossdk.io/log"
confixcmd "cosmossdk.io/tools/confix/cmd"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/debug"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/client/keys"
"github.com/cosmos/cosmos-sdk/client/pruning"
"github.com/cosmos/cosmos-sdk/client/rpc"
"github.com/cosmos/cosmos-sdk/client/snapshot"
codectypes "github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/server"
serverconfig "github.com/cosmos/cosmos-sdk/server/config"
servertypes "github.com/cosmos/cosmos-sdk/server/types"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/module"
authcmd "github.com/cosmos/cosmos-sdk/x/auth/client/cli"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
"github.com/cosmos/cosmos-sdk/x/crisis"
genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli"
)
// initCometBFTConfig helps to override default CometBFT Config values.
// return cmtcfg.DefaultConfig if no custom configuration is required for the application.
func initCometBFTConfig() *cmtcfg.Config {
cfg := cmtcfg.DefaultConfig()
// these values put a higher strain on node memory
// cfg.P2P.MaxNumInboundPeers = 100
// cfg.P2P.MaxNumOutboundPeers = 40
return cfg
}
// initAppConfig helps to override default appConfig template and configs.
// return "", nil if no custom configuration is required for the application.
func initAppConfig() (string, interface{}) {
// The following code snippet is just for reference.
type CustomAppConfig struct {
serverconfig.Config
}
// Optionally allow the chain developer to overwrite the SDK's default
// server config.
srvCfg := serverconfig.DefaultConfig()
// The SDK's default minimum gas price is set to "" (empty value) inside
// app.toml. If left empty by validators, the node will halt on startup.
// However, the chain developer can set a default app.toml value for their
// validators here.
//
// In summary:
// - if you leave srvCfg.MinGasPrices = "", all validators MUST tweak their
// own app.toml config,
// - if you set srvCfg.MinGasPrices non-empty, validators CAN tweak their
// own app.toml to override, or use this default value.
//
// In simapp, we set the min gas prices to 0.
srvCfg.MinGasPrices = "0stake"
// srvCfg.BaseConfig.IAVLDisableFastNode = true // disable fastnode by default
customAppConfig := CustomAppConfig{
Config: *srvCfg,
}
customAppTemplate := serverconfig.DefaultConfigTemplate
return customAppTemplate, customAppConfig
}
func initRootCmd(
rootCmd *cobra.Command,
txConfig client.TxConfig,
_ codectypes.InterfaceRegistry,
basicManager module.BasicManager,
) {
cfg := sdk.GetConfig()
cfg.Seal()
rootCmd.AddCommand(
genutilcli.InitCmd(basicManager, app.DefaultNodeHome),
NewTestnetCmd(basicManager, banktypes.GenesisBalancesIterator{}),
debug.Cmd(),
confixcmd.ConfigCommand(),
pruning.Cmd(newApp, app.DefaultNodeHome),
snapshot.Cmd(newApp),
)
server.AddCommands(rootCmd, app.DefaultNodeHome, newApp, appExport, addModuleInitFlags)
// add keybase, auxiliary RPC, query, genesis, and tx child commands
rootCmd.AddCommand(
server.StatusCommand(),
genesisCommand(txConfig, basicManager),
queryCommand(),
txCommand(),
keys.Commands(),
)
}
func addModuleInitFlags(startCmd *cobra.Command) {
crisis.AddModuleInitFlags(startCmd)
}
// genesisCommand builds genesis-related `simd genesis` command. Users may provide application specific commands as a parameter
func genesisCommand(txConfig client.TxConfig, basicManager module.BasicManager, cmds ...*cobra.Command) *cobra.Command {
cmd := genutilcli.Commands(txConfig, basicManager, app.DefaultNodeHome)
for _, subCmd := range cmds {
cmd.AddCommand(subCmd)
}
return cmd
}
func queryCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "query",
Aliases: []string{"q"},
Short: "Querying subcommands",
DisableFlagParsing: false,
SuggestionsMinimumDistance: 2,
RunE: client.ValidateCmd,
}
cmd.AddCommand(
rpc.QueryEventForTxCmd(),
server.QueryBlockCmd(),
authcmd.QueryTxsByEventsCmd(),
server.QueryBlocksCmd(),
authcmd.QueryTxCmd(),
server.QueryBlockResultsCmd(),
)
return cmd
}
func txCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "tx",
Short: "Transactions subcommands",
DisableFlagParsing: false,
SuggestionsMinimumDistance: 2,
RunE: client.ValidateCmd,
}
cmd.AddCommand(
authcmd.GetSignCommand(),
authcmd.GetSignBatchCommand(),
authcmd.GetMultiSignCommand(),
authcmd.GetMultiSignBatchCmd(),
authcmd.GetValidateSignaturesCommand(),
authcmd.GetBroadcastCommand(),
authcmd.GetEncodeCommand(),
authcmd.GetDecodeCommand(),
authcmd.GetSimulateCmd(),
)
return cmd
}
// newApp creates the application
func newApp(
logger log.Logger,
db dbm.DB,
traceStore io.Writer,
appOpts servertypes.AppOptions,
) servertypes.Application {
baseappOptions := server.DefaultBaseappOptions(appOpts)
if cast.ToBool(appOpts.Get("telemetry.enabled")) {
}
return app.NewChainApp(
logger, db, traceStore, true,
appOpts,
baseappOptions...,
)
}
func appExport(
logger log.Logger,
db dbm.DB,
traceStore io.Writer,
height int64,
forZeroHeight bool,
jailAllowedAddrs []string,
appOpts servertypes.AppOptions,
modulesToExport []string,
) (servertypes.ExportedApp, error) {
var chainApp *app.SonrApp
// this check is necessary as we use the flag in x/upgrade.
// we can exit more gracefully by checking the flag here.
homePath, ok := appOpts.Get(flags.FlagHome).(string)
if !ok || homePath == "" {
return servertypes.ExportedApp{}, errors.New("application home is not set")
}
viperAppOpts, ok := appOpts.(*viper.Viper)
if !ok {
return servertypes.ExportedApp{}, errors.New("appOpts is not viper.Viper")
}
// overwrite the FlagInvCheckPeriod
viperAppOpts.Set(server.FlagInvCheckPeriod, 1)
appOpts = viperAppOpts
chainApp = app.NewChainApp(
logger,
db,
traceStore,
height == -1,
appOpts,
nil,
)
if height != -1 {
if err := chainApp.LoadHeight(height); err != nil {
return servertypes.ExportedApp{}, err
}
}
return chainApp.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs, modulesToExport)
}
var tempDir = func() string {
dir, err := os.MkdirTemp("", "sonr")
if err != nil {
panic("failed to create temp dir: " + err.Error())
}
defer os.RemoveAll(dir)
return dir
}

19
cmd/sonrd/main.go Normal file
View File

@ -0,0 +1,19 @@
package main
import (
"os"
"cosmossdk.io/log"
svrcmd "github.com/cosmos/cosmos-sdk/server/cmd"
"github.com/onsonr/hway/app"
)
func main() {
rootCmd := NewRootCmd()
if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil {
log.NewLogger(rootCmd.OutOrStderr()).Error("failure when running app", "err", err)
os.Exit(1)
}
}

126
cmd/sonrd/root.go Normal file
View File

@ -0,0 +1,126 @@
package main
import (
"os"
dbm "github.com/cosmos/cosmos-db"
"github.com/spf13/cobra"
"cosmossdk.io/log"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/config"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
"github.com/cosmos/cosmos-sdk/server"
simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/tx/signing"
"github.com/cosmos/cosmos-sdk/version"
"github.com/cosmos/cosmos-sdk/x/auth/tx"
txmodule "github.com/cosmos/cosmos-sdk/x/auth/tx/config"
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
"github.com/onsonr/hway/app"
"github.com/onsonr/hway/app/params"
"github.com/onsonr/hway/internal/env"
// NewRootCmd creates a new root command for chain app. It is called once in the
// main function.
)
func NewRootCmd() *cobra.Command {
cfg := sdk.GetConfig()
cfg.SetBech32PrefixForAccount(app.Bech32PrefixAccAddr, app.Bech32PrefixAccPub)
cfg.SetBech32PrefixForValidator(app.Bech32PrefixValAddr, app.Bech32PrefixValPub)
cfg.SetBech32PrefixForConsensusNode(app.Bech32PrefixConsAddr, app.Bech32PrefixConsPub)
cfg.Seal()
// we "pre"-instantiate the application for getting the injected/configured encoding configuration
// note, this is not necessary when using app wiring, as depinject can be directly used (see root_v2.go)
tempApp := app.NewChainApp(
log.NewNopLogger(), dbm.NewMemDB(), nil, false, simtestutil.NewAppOptionsWithFlagHome(tempDir()),
)
encodingConfig := params.EncodingConfig{
InterfaceRegistry: tempApp.InterfaceRegistry(),
Codec: tempApp.AppCodec(),
TxConfig: tempApp.TxConfig(),
Amino: tempApp.LegacyAmino(),
}
initClientCtx := client.Context{}.
WithCodec(encodingConfig.Codec).
WithInterfaceRegistry(encodingConfig.InterfaceRegistry).
WithTxConfig(encodingConfig.TxConfig).
WithLegacyAmino(encodingConfig.Amino).
WithInput(os.Stdin).
WithAccountRetriever(authtypes.AccountRetriever{}).
WithHomeDir(app.DefaultNodeHome).
WithViper("")
rootCmd := &cobra.Command{
Use: version.AppName,
Short: version.AppName + " Daemon (server)",
SilenceErrors: true,
PersistentPreRunE: func(cmd *cobra.Command, _ []string) error {
// set the default command outputs
cmd.SetOut(cmd.OutOrStdout())
cmd.SetErr(cmd.ErrOrStderr())
initClientCtx = initClientCtx.WithCmdContext(cmd.Context())
initClientCtx, err := client.ReadPersistentCommandFlags(initClientCtx, cmd.Flags())
if err != nil {
return err
}
initClientCtx, err = config.ReadFromClientConfig(initClientCtx)
if err != nil {
return err
}
// This needs to go after ReadFromClientConfig, as that function
// sets the RPC client needed for SIGN_MODE_TEXTUAL. This sign mode
// is only available if the client is online.
if !initClientCtx.Offline {
enabledSignModes := append(tx.DefaultSignModes, signing.SignMode_SIGN_MODE_TEXTUAL)
txConfigOpts := tx.ConfigOptions{
EnabledSignModes: enabledSignModes,
TextualCoinMetadataQueryFn: txmodule.NewGRPCCoinMetadataQueryFn(initClientCtx),
}
txConfig, err := tx.NewTxConfigWithOptions(
initClientCtx.Codec,
txConfigOpts,
)
if err != nil {
return err
}
initClientCtx = initClientCtx.WithTxConfig(txConfig)
}
if err := client.SetCmdClientContextHandler(initClientCtx, cmd); err != nil {
return err
}
// Set the context chain ID and validator address
env.SetLocalChainID(initClientCtx.ChainID)
env.SetLocalValidatorAddress(initClientCtx.FromAddress.String())
customAppTemplate, customAppConfig := initAppConfig()
customCMTConfig := initCometBFTConfig()
return server.InterceptConfigsPreRunHandler(cmd, customAppTemplate, customAppConfig, customCMTConfig)
},
}
initRootCmd(rootCmd, encodingConfig.TxConfig, encodingConfig.InterfaceRegistry, tempApp.BasicModuleManager)
// add keyring to autocli opts
autoCliOpts := tempApp.AutoCliOpts()
initClientCtx, _ = config.ReadFromClientConfig(initClientCtx)
autoCliOpts.Keyring, _ = keyring.NewAutoCLIKeyring(initClientCtx.Keyring)
autoCliOpts.ClientCtx = initClientCtx
if err := autoCliOpts.EnhanceRootCommand(rootCmd); err != nil {
panic(err)
}
return rootCmd
}

581
cmd/sonrd/testnet.go Normal file
View File

@ -0,0 +1,581 @@
package main
// DONTCOVER
import (
"bufio"
"encoding/json"
"fmt"
"net"
"os"
"path/filepath"
"time"
cmtconfig "github.com/cometbft/cometbft/config"
cmttime "github.com/cometbft/cometbft/types/time"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"cosmossdk.io/math"
"cosmossdk.io/math/unsafe"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/cosmos/cosmos-sdk/client/tx"
"github.com/cosmos/cosmos-sdk/crypto/hd"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
"github.com/cosmos/cosmos-sdk/runtime"
"github.com/cosmos/cosmos-sdk/server"
srvconfig "github.com/cosmos/cosmos-sdk/server/config"
"github.com/cosmos/cosmos-sdk/testutil"
"github.com/cosmos/cosmos-sdk/testutil/network"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/module"
"github.com/cosmos/cosmos-sdk/version"
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
"github.com/cosmos/cosmos-sdk/x/genutil"
genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types"
stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"
"github.com/onsonr/hway/app"
)
var (
flagNodeDirPrefix = "node-dir-prefix"
flagNumValidators = "v"
flagOutputDir = "output-dir"
flagNodeDaemonHome = "node-daemon-home"
flagStartingIPAddress = "starting-ip-address"
flagEnableLogging = "enable-logging"
flagGRPCAddress = "grpc.address"
flagRPCAddress = "rpc.address"
flagAPIAddress = "api.address"
flagPrintMnemonic = "print-mnemonic"
// custom flags
flagCommitTimeout = "commit-timeout"
flagSingleHost = "single-host"
)
type initArgs struct {
algo string
chainID string
keyringBackend string
minGasPrices string
nodeDaemonHome string
nodeDirPrefix string
numValidators int
outputDir string
startingIPAddress string
singleMachine bool
}
type startArgs struct {
algo string
apiAddress string
chainID string
enableLogging bool
grpcAddress string
minGasPrices string
numValidators int
outputDir string
printMnemonic bool
rpcAddress string
timeoutCommit time.Duration
}
func addTestnetFlagsToCmd(cmd *cobra.Command) {
cmd.Flags().Int(flagNumValidators, 4, "Number of validators to initialize the testnet with")
cmd.Flags().StringP(flagOutputDir, "o", "./.testnets", "Directory to store initialization data for the testnet")
cmd.Flags().String(flags.FlagChainID, "", "genesis file chain-id, if left blank will be randomly created")
cmd.Flags().String(server.FlagMinGasPrices, fmt.Sprintf("0.000006%s", sdk.DefaultBondDenom), "Minimum gas prices to accept for transactions; All fees in a tx must meet this minimum (e.g. 0.01photino,0.001stake)")
cmd.Flags().String(flags.FlagKeyType, string(hd.Secp256k1Type), "Key signing algorithm to generate keys for")
// support old flags name for backwards compatibility
cmd.Flags().SetNormalizeFunc(func(f *pflag.FlagSet, name string) pflag.NormalizedName {
if name == flags.FlagKeyAlgorithm {
name = flags.FlagKeyType
}
return pflag.NormalizedName(name)
})
}
// NewTestnetCmd creates a root testnet command with subcommands to run an in-process testnet or initialize
// validator configuration files for running a multi-validator testnet in a separate process
func NewTestnetCmd(mbm module.BasicManager, genBalIterator banktypes.GenesisBalancesIterator) *cobra.Command {
testnetCmd := &cobra.Command{
Use: "testnet",
Short: "subcommands for starting or configuring local testnets",
DisableFlagParsing: true,
SuggestionsMinimumDistance: 2,
RunE: client.ValidateCmd,
}
testnetCmd.AddCommand(testnetStartCmd())
testnetCmd.AddCommand(testnetInitFilesCmd(mbm, genBalIterator))
return testnetCmd
}
// testnetInitFilesCmd returns a cmd to initialize all files for CometBFT testnet and application
func testnetInitFilesCmd(mbm module.BasicManager, genBalIterator banktypes.GenesisBalancesIterator) *cobra.Command {
cmd := &cobra.Command{
Use: "init-files",
Short: "Initialize config directories & files for a multi-validator testnet running locally via separate processes (e.g. Docker Compose or similar)",
Long: fmt.Sprintf(`init-files will setup "v" number of directories and populate each with
necessary files (private validator, genesis, config, etc.) for running "v" validator nodes.
Booting up a network with these validator folders is intended to be used with Docker Compose,
or a similar setup where each node has a manually configurable IP address.
Note, strict routability for addresses is turned off in the config file.
Example:
%s testnet init-files --v 4 --output-dir ./.testnets --starting-ip-address 192.168.10.2
`, version.AppName),
RunE: func(cmd *cobra.Command, _ []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
serverCtx := server.GetServerContextFromCmd(cmd)
config := serverCtx.Config
args := initArgs{}
args.outputDir, _ = cmd.Flags().GetString(flagOutputDir)
args.keyringBackend, _ = cmd.Flags().GetString(flags.FlagKeyringBackend)
args.chainID, _ = cmd.Flags().GetString(flags.FlagChainID)
args.minGasPrices, _ = cmd.Flags().GetString(server.FlagMinGasPrices)
args.nodeDirPrefix, _ = cmd.Flags().GetString(flagNodeDirPrefix)
args.nodeDaemonHome, _ = cmd.Flags().GetString(flagNodeDaemonHome)
args.startingIPAddress, _ = cmd.Flags().GetString(flagStartingIPAddress)
args.numValidators, _ = cmd.Flags().GetInt(flagNumValidators)
args.algo, _ = cmd.Flags().GetString(flags.FlagKeyType)
args.singleMachine, _ = cmd.Flags().GetBool(flagSingleHost)
config.Consensus.TimeoutCommit, err = cmd.Flags().GetDuration(flagCommitTimeout)
if err != nil {
return err
}
return initTestnetFiles(clientCtx, cmd, config, mbm, genBalIterator, clientCtx.TxConfig.SigningContext().ValidatorAddressCodec(), args)
},
}
addTestnetFlagsToCmd(cmd)
cmd.Flags().String(flagNodeDirPrefix, "node", "Prefix the directory name for each node with (node results in node0, node1, ...)")
cmd.Flags().String(flagNodeDaemonHome, version.AppName, "Home directory of the node's daemon configuration")
cmd.Flags().String(flagStartingIPAddress, "192.168.0.1", "Starting IP address (192.168.0.1 results in persistent peers list ID0@192.168.0.1:46656, ID1@192.168.0.2:46656, ...)")
cmd.Flags().String(flags.FlagKeyringBackend, flags.DefaultKeyringBackend, "Select keyring's backend (os|file|test)")
cmd.Flags().Duration(flagCommitTimeout, 5*time.Second, "Time to wait after a block commit before starting on the new height")
cmd.Flags().Bool(flagSingleHost, false, "Cluster runs on a single host machine with different ports")
return cmd
}
// testnetStartCmd returns a cmd to start multi validator in-process testnet
func testnetStartCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "start",
Short: "Launch an in-process multi-validator testnet",
Long: fmt.Sprintf(`testnet will launch an in-process multi-validator testnet,
and generate "v" directories, populated with necessary validator configuration files
(private validator, genesis, config, etc.).
Example:
%s testnet --v 4 --output-dir ./.testnets
`, version.AppName),
RunE: func(cmd *cobra.Command, _ []string) error {
args := startArgs{}
args.outputDir, _ = cmd.Flags().GetString(flagOutputDir)
args.chainID, _ = cmd.Flags().GetString(flags.FlagChainID)
args.minGasPrices, _ = cmd.Flags().GetString(server.FlagMinGasPrices)
args.numValidators, _ = cmd.Flags().GetInt(flagNumValidators)
args.algo, _ = cmd.Flags().GetString(flags.FlagKeyType)
args.enableLogging, _ = cmd.Flags().GetBool(flagEnableLogging)
args.rpcAddress, _ = cmd.Flags().GetString(flagRPCAddress)
args.apiAddress, _ = cmd.Flags().GetString(flagAPIAddress)
args.grpcAddress, _ = cmd.Flags().GetString(flagGRPCAddress)
args.printMnemonic, _ = cmd.Flags().GetBool(flagPrintMnemonic)
return startTestnet(cmd, args)
},
}
addTestnetFlagsToCmd(cmd)
cmd.Flags().Bool(flagEnableLogging, false, "Enable INFO logging of CometBFT validator nodes")
cmd.Flags().String(flagRPCAddress, "tcp://0.0.0.0:26657", "the RPC address to listen on")
cmd.Flags().String(flagAPIAddress, "tcp://0.0.0.0:1317", "the address to listen on for REST API")
cmd.Flags().String(flagGRPCAddress, "0.0.0.0:9090", "the gRPC server address to listen on")
cmd.Flags().Bool(flagPrintMnemonic, true, "print mnemonic of first validator to stdout for manual testing")
return cmd
}
const nodeDirPerm = 0o755
// initTestnetFiles initializes testnet files for a testnet to be run in a separate process
func initTestnetFiles(
clientCtx client.Context,
cmd *cobra.Command,
nodeConfig *cmtconfig.Config,
mbm module.BasicManager,
genBalIterator banktypes.GenesisBalancesIterator,
valAddrCodec runtime.ValidatorAddressCodec,
args initArgs,
) error {
if args.chainID == "" {
args.chainID = "chain-" + unsafe.Str(6)
}
nodeIDs := make([]string, args.numValidators)
valPubKeys := make([]cryptotypes.PubKey, args.numValidators)
appConfig := srvconfig.DefaultConfig()
appConfig.MinGasPrices = args.minGasPrices
appConfig.API.Enable = true
appConfig.Telemetry.Enabled = true
appConfig.Telemetry.PrometheusRetentionTime = 60
appConfig.Telemetry.EnableHostnameLabel = false
appConfig.Telemetry.GlobalLabels = [][]string{{"chain_id", args.chainID}}
var (
genAccounts []authtypes.GenesisAccount
genBalances []banktypes.Balance
genFiles []string
)
const (
rpcPort = 26657
apiPort = 1317
grpcPort = 9090
)
p2pPortStart := 26656
inBuf := bufio.NewReader(cmd.InOrStdin())
// generate private keys, node IDs, and initial transactions
for i := 0; i < args.numValidators; i++ {
var portOffset int
if args.singleMachine {
portOffset = i
p2pPortStart = 16656 // use different start point to not conflict with rpc port
nodeConfig.P2P.AddrBookStrict = false
nodeConfig.P2P.PexReactor = false
nodeConfig.P2P.AllowDuplicateIP = true
}
nodeDirName := fmt.Sprintf("%s%d", args.nodeDirPrefix, i)
nodeDir := filepath.Join(args.outputDir, nodeDirName, args.nodeDaemonHome)
gentxsDir := filepath.Join(args.outputDir, "gentxs")
nodeConfig.SetRoot(nodeDir)
nodeConfig.Moniker = nodeDirName
nodeConfig.RPC.ListenAddress = "tcp://0.0.0.0:26657"
appConfig.API.Address = fmt.Sprintf("tcp://0.0.0.0:%d", apiPort+portOffset)
appConfig.GRPC.Address = fmt.Sprintf("0.0.0.0:%d", grpcPort+portOffset)
appConfig.GRPCWeb.Enable = true
if err := os.MkdirAll(filepath.Join(nodeDir, "config"), nodeDirPerm); err != nil {
_ = os.RemoveAll(args.outputDir)
return err
}
ip, err := getIP(i, args.startingIPAddress)
if err != nil {
_ = os.RemoveAll(args.outputDir)
return err
}
nodeIDs[i], valPubKeys[i], err = genutil.InitializeNodeValidatorFiles(nodeConfig)
if err != nil {
_ = os.RemoveAll(args.outputDir)
return err
}
memo := fmt.Sprintf("%s@%s:%d", nodeIDs[i], ip, p2pPortStart+portOffset)
genFiles = append(genFiles, nodeConfig.GenesisFile())
kb, err := keyring.New(sdk.KeyringServiceName(), args.keyringBackend, nodeDir, inBuf, clientCtx.Codec)
if err != nil {
return err
}
keyringAlgos, _ := kb.SupportedAlgorithms()
algo, err := keyring.NewSigningAlgoFromString(args.algo, keyringAlgos)
if err != nil {
return err
}
addr, secret, err := testutil.GenerateSaveCoinKey(kb, nodeDirName, "", true, algo)
if err != nil {
_ = os.RemoveAll(args.outputDir)
return err
}
info := map[string]string{"secret": secret}
cliPrint, err := json.Marshal(info)
if err != nil {
return err
}
// save private key seed words
if err := writeFile(fmt.Sprintf("%v.json", "key_seed"), nodeDir, cliPrint); err != nil {
return err
}
accTokens := sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction)
accStakingTokens := sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction)
coins := sdk.Coins{
sdk.NewCoin("testtoken", accTokens),
sdk.NewCoin(sdk.DefaultBondDenom, accStakingTokens),
}
genBalances = append(genBalances, banktypes.Balance{Address: addr.String(), Coins: coins.Sort()})
genAccounts = append(genAccounts, authtypes.NewBaseAccount(addr, nil, 0, 0))
valStr, err := valAddrCodec.BytesToString(sdk.ValAddress(addr))
if err != nil {
return err
}
valTokens := sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction)
createValMsg, err := stakingtypes.NewMsgCreateValidator(
valStr,
valPubKeys[i],
sdk.NewCoin(sdk.DefaultBondDenom, valTokens),
stakingtypes.NewDescription(nodeDirName, "", "", "", ""),
stakingtypes.NewCommissionRates(math.LegacyOneDec(), math.LegacyOneDec(), math.LegacyOneDec()),
math.OneInt(),
)
if err != nil {
return err
}
txBuilder := clientCtx.TxConfig.NewTxBuilder()
if err := txBuilder.SetMsgs(createValMsg); err != nil {
return err
}
txBuilder.SetMemo(memo)
txFactory := tx.Factory{}
txFactory = txFactory.
WithChainID(args.chainID).
WithMemo(memo).
WithKeybase(kb).
WithTxConfig(clientCtx.TxConfig)
if err := tx.Sign(cmd.Context(), txFactory, nodeDirName, txBuilder, true); err != nil {
return err
}
txBz, err := clientCtx.TxConfig.TxJSONEncoder()(txBuilder.GetTx())
if err != nil {
return err
}
if err := writeFile(fmt.Sprintf("%v.json", nodeDirName), gentxsDir, txBz); err != nil {
return err
}
srvconfig.SetConfigTemplate(srvconfig.DefaultConfigTemplate)
srvconfig.WriteConfigFile(filepath.Join(nodeDir, "config", "app.toml"), appConfig)
}
if err := initGenFiles(clientCtx, mbm, args.chainID, genAccounts, genBalances, genFiles, args.numValidators); err != nil {
return err
}
err := collectGenFiles(
clientCtx, nodeConfig, args.chainID, nodeIDs, valPubKeys, args.numValidators,
args.outputDir, args.nodeDirPrefix, args.nodeDaemonHome, genBalIterator, valAddrCodec,
rpcPort, p2pPortStart, args.singleMachine,
)
if err != nil {
return err
}
cmd.PrintErrf("Successfully initialized %d node directories\n", args.numValidators)
return nil
}
func initGenFiles(
clientCtx client.Context, mbm module.BasicManager, chainID string,
genAccounts []authtypes.GenesisAccount, genBalances []banktypes.Balance,
genFiles []string, numValidators int,
) error {
appGenState := mbm.DefaultGenesis(clientCtx.Codec)
// set the accounts in the genesis state
var authGenState authtypes.GenesisState
clientCtx.Codec.MustUnmarshalJSON(appGenState[authtypes.ModuleName], &authGenState)
accounts, err := authtypes.PackAccounts(genAccounts)
if err != nil {
return err
}
authGenState.Accounts = accounts
appGenState[authtypes.ModuleName] = clientCtx.Codec.MustMarshalJSON(&authGenState)
// set the balances in the genesis state
var bankGenState banktypes.GenesisState
clientCtx.Codec.MustUnmarshalJSON(appGenState[banktypes.ModuleName], &bankGenState)
bankGenState.Balances = banktypes.SanitizeGenesisBalances(genBalances)
for _, bal := range bankGenState.Balances {
bankGenState.Supply = bankGenState.Supply.Add(bal.Coins...)
}
appGenState[banktypes.ModuleName] = clientCtx.Codec.MustMarshalJSON(&bankGenState)
appGenStateJSON, err := json.MarshalIndent(appGenState, "", " ")
if err != nil {
return err
}
appGenesis := genutiltypes.NewAppGenesisWithVersion(chainID, appGenStateJSON)
// generate empty genesis files for each validator and save
for i := 0; i < numValidators; i++ {
if err := appGenesis.SaveAs(genFiles[i]); err != nil {
return err
}
}
return nil
}
func collectGenFiles(
clientCtx client.Context, nodeConfig *cmtconfig.Config, chainID string,
nodeIDs []string, valPubKeys []cryptotypes.PubKey, numValidators int,
outputDir, nodeDirPrefix, nodeDaemonHome string, genBalIterator banktypes.GenesisBalancesIterator, valAddrCodec runtime.ValidatorAddressCodec,
rpcPortStart, p2pPortStart int,
singleMachine bool,
) error {
var appState json.RawMessage
genTime := cmttime.Now()
for i := 0; i < numValidators; i++ {
var portOffset int
if singleMachine {
portOffset = i
}
nodeDirName := fmt.Sprintf("%s%d", nodeDirPrefix, i)
nodeDir := filepath.Join(outputDir, nodeDirName, nodeDaemonHome)
gentxsDir := filepath.Join(outputDir, "gentxs")
nodeConfig.Moniker = nodeDirName
nodeConfig.RPC.ListenAddress = fmt.Sprintf("tcp://0.0.0.0:%d", rpcPortStart+portOffset)
nodeConfig.P2P.ListenAddress = fmt.Sprintf("tcp://0.0.0.0:%d", p2pPortStart+portOffset)
nodeConfig.SetRoot(nodeDir)
nodeID, valPubKey := nodeIDs[i], valPubKeys[i]
initCfg := genutiltypes.NewInitConfig(chainID, gentxsDir, nodeID, valPubKey)
appGenesis, err := genutiltypes.AppGenesisFromFile(nodeConfig.GenesisFile())
if err != nil {
return err
}
nodeAppState, err := genutil.GenAppStateFromConfig(clientCtx.Codec, clientCtx.TxConfig, nodeConfig, initCfg, appGenesis, genBalIterator, genutiltypes.DefaultMessageValidator,
valAddrCodec)
if err != nil {
return err
}
if appState == nil {
// set the canonical application state (they should not differ)
appState = nodeAppState
}
genFile := nodeConfig.GenesisFile()
// overwrite each validator's genesis file to have a canonical genesis time
if err := genutil.ExportGenesisFileWithTime(genFile, chainID, nil, appState, genTime); err != nil {
return err
}
}
return nil
}
func getIP(i int, startingIPAddr string) (ip string, err error) {
if len(startingIPAddr) == 0 {
ip, err = server.ExternalIP()
if err != nil {
return "", err
}
return ip, nil
}
return calculateIP(startingIPAddr, i)
}
func calculateIP(ip string, i int) (string, error) {
ipv4 := net.ParseIP(ip).To4()
if ipv4 == nil {
return "", fmt.Errorf("%v: non ipv4 address", ip)
}
for j := 0; j < i; j++ {
ipv4[3]++
}
return ipv4.String(), nil
}
func writeFile(name, dir string, contents []byte) error {
file := filepath.Join(dir, name)
if err := os.MkdirAll(dir, 0o755); err != nil {
return fmt.Errorf("could not create directory %q: %w", dir, err)
}
if err := os.WriteFile(file, contents, 0o600); err != nil {
return err
}
return nil
}
// startTestnet starts an in-process testnet
func startTestnet(cmd *cobra.Command, args startArgs) error {
networkConfig := network.DefaultConfig(app.NewTestNetworkFixture)
// Default networkConfig.ChainID is random, and we should only override it if chainID provided
// is non-empty
if args.chainID != "" {
networkConfig.ChainID = args.chainID
}
networkConfig.SigningAlgo = args.algo
networkConfig.MinGasPrices = args.minGasPrices
networkConfig.NumValidators = args.numValidators
networkConfig.EnableLogging = args.enableLogging
networkConfig.RPCAddress = args.rpcAddress
networkConfig.APIAddress = args.apiAddress
networkConfig.GRPCAddress = args.grpcAddress
networkConfig.PrintMnemonic = args.printMnemonic
networkConfig.TimeoutCommit = args.timeoutCommit
networkLogger := network.NewCLILogger(cmd)
baseDir := fmt.Sprintf("%s/%s", args.outputDir, networkConfig.ChainID)
if _, err := os.Stat(baseDir); !os.IsNotExist(err) {
return fmt.Errorf(
"testnests directory already exists for chain-id '%s': %s, please remove or select a new --chain-id",
networkConfig.ChainID, baseDir)
}
testnet, err := network.New(networkLogger, baseDir, networkConfig)
if err != nil {
return err
}
if _, err := testnet.WaitForHeight(1); err != nil {
return err
}
cmd.Println("press the Enter Key to terminate")
if _, err := fmt.Scanln(); err != nil { // wait for Enter Key
return err
}
testnet.Cleanup()
return nil
}

24
cmd/vltd/main.go Normal file
View File

@ -0,0 +1,24 @@
package main
import (
"github.com/onsonr/hway/pkg/vault"
"github.com/spf13/cobra"
)
func main() {
if err := serveCmd().Execute(); err != nil {
panic(err)
}
}
func serveCmd() *cobra.Command {
return &cobra.Command{
Use: "vltd",
Aliases: []string{"vault"},
Short: "run the vault rest api and htmx frontend",
DisableFlagParsing: false,
Run: func(cmd *cobra.Command, args []string) {
vault.Serve(cmd.Context())
},
}
}

63
configs/logs.json Normal file
View File

@ -0,0 +1,63 @@
{
"start_time": 1715665827,
"chains": [
{
"chain_id": "chainid-1",
"chain_name": "chainid-1",
"rpc_address": "http://0.0.0.0:26657",
"rest_address": "http://0.0.0.0:1317",
"grpc_address": "0.0.0.0:9090",
"p2p_address": "0.0.0.0:52887",
"ibc_paths": [
"ibc-connection-1"
]
},
{
"chain_id": "localcosmos-1",
"chain_name": "localcosmos-1",
"rpc_address": "http://0.0.0.0:52884",
"rest_address": "http://0.0.0.0:52881",
"grpc_address": "0.0.0.0:52885",
"p2p_address": "0.0.0.0:52883",
"ibc_paths": [
"ibc-connection-1"
]
}
],
"ibc_channels": [
{
"chain_id": "chainid-1",
"channel": {
"state": "STATE_OPEN",
"ordering": "ORDER_UNORDERED",
"counterparty": {
"port_id": "transfer",
"channel_id": "channel-0"
},
"connection_hops": [
"connection-0"
],
"version": "ics20-1",
"port_id": "transfer",
"channel_id": "channel-0"
}
},
{
"chain_id": "localcosmos-1",
"channel": {
"state": "STATE_OPEN",
"ordering": "ORDER_UNORDERED",
"counterparty": {
"port_id": "transfer",
"channel_id": "channel-0"
},
"connection_hops": [
"connection-0"
],
"version": "ics20-1",
"port_id": "transfer",
"channel_id": "channel-0"
}
}
]
}

85
contrib/devtools/Makefile Normal file
View File

@ -0,0 +1,85 @@
###
# Find OS and Go environment
# GO contains the Go binary
# FS contains the OS file separator
###
ifeq ($(OS),Windows_NT)
GO := $(shell where go.exe 2> NUL)
FS := "\\"
else
GO := $(shell command -v go 2> /dev/null)
FS := "/"
endif
ifeq ($(GO),)
$(error could not find go. Is it in PATH? $(GO))
endif
###############################################################################
### Functions ###
###############################################################################
go_get = $(if $(findstring Windows_NT,$(OS)),\
IF NOT EXIST $(GITHUBDIR)$(FS)$(1)$(FS) ( mkdir $(GITHUBDIR)$(FS)$(1) ) else (cd .) &\
IF NOT EXIST $(GITHUBDIR)$(FS)$(1)$(FS)$(2)$(FS) ( cd $(GITHUBDIR)$(FS)$(1) && git clone https://github.com/$(1)/$(2) ) else (cd .) &\
,\
mkdir -p $(GITHUBDIR)$(FS)$(1) &&\
(test ! -d $(GITHUBDIR)$(FS)$(1)$(FS)$(2) && cd $(GITHUBDIR)$(FS)$(1) && git clone https://github.com/$(1)/$(2)) || true &&\
)\
cd $(GITHUBDIR)$(FS)$(1)$(FS)$(2) && git fetch origin && git checkout -q $(3)
mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST)))
mkfile_dir := $(shell cd $(shell dirname $(mkfile_path)); pwd)
###############################################################################
### Tools ###
###############################################################################
PREFIX ?= /usr/local
BIN ?= $(PREFIX)/bin
UNAME_S ?= $(shell uname -s)
UNAME_M ?= $(shell uname -m)
GOPATH ?= $(shell $(GO) env GOPATH)
GITHUBDIR := $(GOPATH)$(FS)src$(FS)github.com
BUF_VERSION ?= 0.11.0
TOOLS_DESTDIR ?= $(GOPATH)/bin
STATIK = $(TOOLS_DESTDIR)/statik
RUNSIM = $(TOOLS_DESTDIR)/runsim
GOLANGCI_LINT = $(TOOLS_DESTDIR)/golangci-lint
tools: tools-stamp
tools-stamp: statik runsim golangci-lint
# Create dummy file to satisfy dependency and avoid
# rebuilding when this Makefile target is hit twice
# in a row.
touch $@
statik: $(STATIK)
$(STATIK):
@echo "Installing statik..."
@(cd /tmp && go install github.com/rakyll/statik@v0.1.6)
# Install the runsim binary with a temporary workaround of entering an outside
# directory as the "go get" command ignores the -mod option and will polute the
# go.{mod, sum} files.
#
# ref: https://github.com/golang/go/issues/30515
runsim: $(RUNSIM)
$(RUNSIM):
@echo "Installing runsim..."
@(cd /tmp && go install github.com/cosmos/tools/cmd/runsim@v1.0.0)
golangci-lint: $(GOLANGCI_LINT)
$(GOLANGCI_LINT):
@echo "Installing golangci-lint..."
@(cd /tmp && go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.47.0)
tools-clean:
rm -f $(STATIK) $(GOLANGCI_LINT) $(RUNSIM)
rm -f tools-stamp
.PHONY: tools-clean statik runsim

View File

@ -0,0 +1,6 @@
## Contributors
Thanks to the entire Cosmos SDK team and the contributors who put their efforts into making simulation testing
easier to implement. 🤗
https://github.com/cosmos/cosmos-sdk/blob/master/contrib/devtools/Makefile

171
crypto/accumulator/accumulator.go Executable file
View File

@ -0,0 +1,171 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package accumulator implements the cryptographic accumulator as described in https://eprint.iacr.org/2020/777.pdf
// It also implements the zero knowledge proof of knowledge protocol
// described in section 7 of the paper.
// Note: the paper only describes for non-membership witness case, but we don't
// use non-membership witness. We only implement the membership witness case.
package accumulator
import (
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"github.com/onsonr/hway/crypto/core/curves"
)
type structMarshal struct {
Curve string `bare:"curve"`
Value []byte `bare:"value"`
}
type Element curves.Scalar
// Coefficient is a point
type Coefficient curves.Point
// Accumulator is a point
type Accumulator struct {
value curves.Point
}
// New creates a new accumulator.
func (acc *Accumulator) New(curve *curves.PairingCurve) (*Accumulator, error) {
// If we need to support non-membership witness, we need to implement Accumulator Initialization
// as described in section 6 of <https://eprint.iacr.org/2020/777.pdf>
// for now we don't need non-membership witness
// i.e., it computes V0 = prod(y + α) * P, y ∈ Y_V0, P is a generator of G1. Since we do not use non-membership witness
// we just set the initial accumulator a G1 generator.
acc.value = curve.Scalar.Point().Generator()
return acc, nil
}
// WithElements initializes a new accumulator prefilled with entries
// Each member is assumed to be hashed
// V = prod(y + α) * V0, for all y∈ Y_V
func (acc *Accumulator) WithElements(curve *curves.PairingCurve, key *SecretKey, m []Element) (*Accumulator, error) {
_, err := acc.New(curve)
if err != nil {
return nil, err
}
y, err := key.BatchAdditions(m)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// AddElements accumulates a set of elements into the accumulator.
func (acc *Accumulator) AddElements(key *SecretKey, m []Element) (*Accumulator, error) {
if acc.value == nil || key.value == nil {
return nil, fmt.Errorf("accumulator and secret key should not be nil")
}
y, err := key.BatchAdditions(m)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// Add accumulates a single element into the accumulator
// V' = (y + alpha) * V
func (acc *Accumulator) Add(key *SecretKey, e Element) (*Accumulator, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil || e == nil {
return nil, fmt.Errorf("accumulator, secret key and element should not be nil")
}
y := e.Add(key.value) // y + alpha
acc.value = acc.value.Mul(y)
return acc, nil
}
// Remove removes a single element from accumulator if it exists
// V' = 1/(y+alpha) * V
func (acc *Accumulator) Remove(key *SecretKey, e Element) (*Accumulator, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil || e == nil {
return nil, fmt.Errorf("accumulator, secret key and element should not be nil")
}
y := e.Add(key.value) // y + alpha
y, err := y.Invert() // 1/(y+alpha)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// Update performs a batch addition and deletion as described on page 7, section 3 in
// https://eprint.iacr.org/2020/777.pdf
func (acc *Accumulator) Update(key *SecretKey, additions []Element, deletions []Element) (*Accumulator, []Coefficient, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil {
return nil, nil, fmt.Errorf("accumulator and secret key should not be nil")
}
// Compute dA(-alpha) = prod(y + alpha), y in the set of A ⊆ ACC-Y_V
a, err := key.BatchAdditions(additions)
if err != nil {
return nil, nil, err
}
// Compute dD(-alpha) = 1/prod(y + alpha), y in the set of D ⊆ Y_V
d, err := key.BatchDeletions(deletions)
if err != nil {
return nil, nil, err
}
// dA(-alpha)/dD(-alpha)
div := a.Mul(d)
newAcc := acc.value.Mul(div)
// build an array of coefficients
elements, err := key.CreateCoefficients(additions, deletions)
if err != nil {
return nil, nil, err
}
coefficients := make([]Coefficient, len(elements))
for i := 0; i < len(elements); i++ {
coefficients[i] = acc.value.Mul(elements[i])
}
acc.value = newAcc
return acc, coefficients, nil
}
// MarshalBinary converts Accumulator to bytes
func (acc Accumulator) MarshalBinary() ([]byte, error) {
if acc.value == nil {
return nil, fmt.Errorf("accumulator cannot be nil")
}
tv := &structMarshal{
Value: acc.value.ToAffineCompressed(),
Curve: acc.value.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets Accumulator from bytes
func (acc *Accumulator) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Value)
if err != nil {
return err
}
acc.value = value
return nil
}

View File

@ -0,0 +1,188 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"encoding/hex"
"fmt"
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestNewAccumulator100(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
accBz, err := acc.MarshalBinary()
require.NoError(t, err)
fmt.Println(accBz)
fmt.Println(len(accBz))
fmt.Println(hex.EncodeToString(accBz))
fmt.Println(len(hex.EncodeToString(accBz)))
require.Equal(t, 60, len(accBz), "Marshalled accumulator should be 60 bytes")
require.Equal(t, 120, len(hex.EncodeToString(accBz)), "Hex-encoded accumulator should be 120 characters")
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestNewAccumulator10K(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestNewAccumulator10M(t *testing.T) {
// Initiating 10M values takes time
if testing.Short() {
t.Skip("skipping test in short mode.")
}
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestWithElements(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, _ := new(SecretKey).New(curve, seed[:])
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
elements := []Element{element1, element2}
newAcc, err := new(Accumulator).WithElements(curve, key, elements)
require.NoError(t, err)
require.NotNil(t, newAcc)
require.NotEqual(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Identity().ToAffineCompressed())
require.NotEqual(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
_, _ = newAcc.Remove(key, element1)
_, _ = newAcc.Remove(key, element2)
require.Equal(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAdd(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc := &Accumulator{curve.PointG1.Generator()}
_, _ = acc.New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
element := curve.Scalar.Hash([]byte("value1"))
require.NoError(t, err)
require.NotNil(t, element)
_, _ = acc.Add(key, element)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestRemove(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element := curve.Scalar.Hash([]byte("value1"))
require.NoError(t, err)
require.NotNil(t, element)
// add element
_, _ = acc.Add(key, element)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
// remove element
acc, err = acc.Remove(key, element)
require.NoError(t, err)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAddElements(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc := &Accumulator{curve.PointG1.Generator()}
_, _ = acc.New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
elements := []Element{element1, element2, element3}
acc, err = acc.AddElements(key, elements)
require.NoError(t, err)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAccumulatorMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
point := curve.PointG1.Generator().Mul(curve.Scalar.New(2))
data, err := Accumulator{point}.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
// element cannot be empty
_, err = Accumulator{}.MarshalBinary()
require.Error(t, err)
e := &Accumulator{curve.PointG1.Generator()}
_ = e.UnmarshalBinary(data)
require.True(t, e.value.Equal(point))
}
func TestUpdate(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
elements := []Element{element1, element2, element3}
acc, _, err = acc.Update(key, elements, nil)
require.NoError(t, err)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
acc, _, err = acc.Update(key, nil, elements)
require.NoError(t, err)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}

244
crypto/accumulator/key.go Executable file
View File

@ -0,0 +1,244 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"github.com/onsonr/hway/crypto/core/curves"
)
// SecretKey is the secret alpha only held by the accumulator manager.
type SecretKey struct {
value curves.Scalar
}
// New creates a new secret key from the seed.
func (sk *SecretKey) New(curve *curves.PairingCurve, seed []byte) (*SecretKey, error) {
sk.value = curve.Scalar.Hash(seed)
return sk, nil
}
// GetPublicKey creates a public key from SecretKey sk
func (sk SecretKey) GetPublicKey(curve *curves.PairingCurve) (*PublicKey, error) {
if sk.value == nil || curve == nil {
return nil, fmt.Errorf("curve and sk value cannot be nil")
}
value := curve.Scalar.Point().(curves.PairingPoint).OtherGroup().Generator().Mul(sk.value)
return &PublicKey{value.(curves.PairingPoint)}, nil
}
// MarshalBinary converts SecretKey to bytes
func (sk SecretKey) MarshalBinary() ([]byte, error) {
if sk.value == nil {
return nil, fmt.Errorf("sk cannot be empty")
}
tv := &structMarshal{
Value: sk.value.Bytes(),
Curve: sk.value.Point().CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets SecretKey from bytes
func (sk *SecretKey) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewScalar().SetBytes(tv.Value)
if err != nil {
return err
}
sk.value = value
return nil
}
// BatchAdditions computes product(y + sk) for y in additions and output the product
func (sk SecretKey) BatchAdditions(additions []Element) (Element, error) {
if sk.value == nil {
return nil, fmt.Errorf("secret key cannot be empty")
}
mul := sk.value.One()
for i := 0; i < len(additions); i++ {
if additions[i] == nil {
return nil, fmt.Errorf("some element in additions is nil")
}
// y + alpha
temp := additions[i].Add(sk.value)
// prod(y + alpha)
mul = mul.Mul(temp)
}
return mul, nil
}
// BatchDeletions computes 1/product(y + sk) for y in deletions and output it
func (sk SecretKey) BatchDeletions(deletions []Element) (Element, error) {
v, err := sk.BatchAdditions(deletions)
if err != nil {
return nil, err
}
y, err := v.Invert()
if err != nil {
return nil, err
}
return y, nil
}
// CreateCoefficients creates the Batch Polynomial coefficients
// See page 7 of https://eprint.iacr.org/2020/777.pdf
func (sk SecretKey) CreateCoefficients(additions []Element, deletions []Element) ([]Element, error) {
if sk.value == nil {
return nil, fmt.Errorf("secret key should not be nil")
}
// vD(x) = ∑^{m}_{s=1}{ ∏ 1..s {yD_i + alpha}^-1 ∏ 1 ..s-1 {yD_j - x}
one := sk.value.One()
m1 := one.Neg() // m1 is -1
vD := make(polynomial, 0, len(deletions))
for s := 0; s < len(deletions); s++ {
// ∏ 1..s (yD_i + alpha)^-1
c, err := sk.BatchDeletions(deletions[0 : s+1])
if err != nil {
return nil, fmt.Errorf("error in sk batchDeletions")
}
poly := make(polynomial, 1, s+2)
poly[0] = one
// ∏ 1..(s-1) (yD_j - x)
for j := 0; j < s; j++ {
t := make(polynomial, 2)
// yD_j
t[0] = deletions[j]
// -x
t[1] = m1
// polynomial multiplication (yD_1-x) * (yD_2 - x) ...
poly, err = poly.Mul(t)
if err != nil {
return nil, err
}
}
poly, err = poly.MulScalar(c)
if err != nil {
return nil, err
}
vD, err = vD.Add(poly)
if err != nil {
return nil, err
}
}
// vD(x) * ∏ 1..n (yA_i + alpha)
bAdd, err := sk.BatchAdditions(additions)
if err != nil {
return nil, fmt.Errorf("error in sk batchAdditions")
}
vD, err = vD.MulScalar(bAdd)
if err != nil {
return nil, err
}
// vA(x) = ∑^n_{s=1}{ ∏ 1..s-1 {yA_i + alpha} ∏ s+1..n {yA_j - x} }
vA := make(polynomial, 0, len(additions))
for s := 0; s < len(additions); s++ {
// ∏ 1..s-1 {yA_i + alpha}
var c Element
if s == 0 {
c = one
} else {
c, err = sk.BatchAdditions(additions[0:s])
if err != nil {
return nil, err
}
}
poly := make(polynomial, 1, s+2)
poly[0] = one
// ∏ s+1..n {yA_j - x}
for j := s + 1; j < len(additions); j++ {
t := make(polynomial, 2)
t[0] = additions[j]
t[1] = m1
// polynomial multiplication (yA_1-x) * (yA_2 - x) ...
poly, err = poly.Mul(t)
if err != nil {
return nil, err
}
}
poly, err = poly.MulScalar(c)
if err != nil {
return nil, err
}
vA, err = vA.Add(poly)
if err != nil {
return nil, err
}
}
// vA - vD
vA, err = vA.Sub(vD)
if err != nil {
return nil, err
}
result := make([]Element, len(vA))
for i := 0; i < len(vA); i++ {
result[i] = vA[i]
}
return result, nil
}
// PublicKey is the public key of accumulator, it should be sk * generator of G2
type PublicKey struct {
value curves.PairingPoint
}
// MarshalBinary converts PublicKey to bytes
func (pk PublicKey) MarshalBinary() ([]byte, error) {
if pk.value == nil {
return nil, fmt.Errorf("public key cannot be nil")
}
tv := &structMarshal{
Value: pk.value.ToAffineCompressed(),
Curve: pk.value.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets PublicKey from bytes
func (pk *PublicKey) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetPairingCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewScalar().Point().FromAffineCompressed(tv.Value)
if err != nil {
return err
}
var ok bool
pk.value, ok = value.(curves.PairingPoint)
if !ok {
return errors.New("can't convert to PairingPoint")
}
return nil
}

88
crypto/accumulator/key_test.go Executable file
View File

@ -0,0 +1,88 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestSecretKeyMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
data, err := SecretKey{curve.Scalar.One()}.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
e := &SecretKey{curve.Scalar.New(2)}
err = e.UnmarshalBinary(data)
require.NoError(t, err)
require.Equal(t, e.value.Bytes(), curve.Scalar.One().Bytes())
// element cannot be empty
_, err = SecretKey{}.MarshalBinary()
require.Error(t, err)
}
func TestPublicKeyMarshal(t *testing.T) {
// Actually test both toBytes() and from()
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk := &SecretKey{curve.Scalar.New(3)}
pk, _ := sk.GetPublicKey(curve)
pkBytes, err := pk.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, pkBytes)
pk2 := &PublicKey{}
err = pk2.UnmarshalBinary(pkBytes)
require.NoError(t, err)
require.True(t, pk.value.Equal(pk2.value))
}
func TestBatch(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
sk, _ := new(SecretKey).New(curve, seed[:])
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
elements := []Element{element1, element2}
add, err := sk.BatchAdditions(elements)
require.NoError(t, err)
require.NotNil(t, add)
del, err := sk.BatchDeletions(elements)
require.NoError(t, err)
require.NotNil(t, del)
result := add.Mul(del)
require.Equal(t, result, curve.Scalar.One())
g1 := curve.PointG1.Generator()
acc := g1.Mul(add)
require.NotEqual(t, acc, g1)
acc = acc.Mul(del)
require.Equal(t, acc.ToAffineCompressed(), g1.ToAffineCompressed())
acc2 := g1.Mul(result)
require.True(t, acc2.Equal(g1))
}
func TestCoefficient(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
element4 := curve.Scalar.Hash([]byte("value4"))
element5 := curve.Scalar.Hash([]byte("value5"))
elements := []Element{element1, element2, element3, element4, element5}
coefficients, err := sk.CreateCoefficients(elements[0:2], elements[2:5])
require.NoError(t, err)
require.Equal(t, len(coefficients), 3)
}

204
crypto/accumulator/lib.go Executable file
View File

@ -0,0 +1,204 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"fmt"
"math"
"github.com/onsonr/hway/crypto/core/curves"
)
// dad constructs two polynomials - dA(x) and dD(x)
// dA(y) = prod(y_A,t - y), t = 1...n
// dD(y) = prod(y_D,t - y), t = 1...n
func dad(values []Element, y Element) (Element, error) {
if values == nil || y == nil {
return nil, fmt.Errorf("curve, values or y should not be nil")
}
for _, value := range values {
if value == nil {
return nil, fmt.Errorf("some element is nil")
}
}
result := y.One()
if len(values) == 1 {
a := values[0]
result = a.Sub(y)
} else {
for i := 0; i < len(values); i++ {
temp := values[i].Sub(y)
result = result.Mul(temp)
}
}
return result, nil
}
type polynomialPoint []curves.Point
// evaluate evaluates a PolynomialG1 on input x.
func (p polynomialPoint) evaluate(x curves.Scalar) (curves.Point, error) {
if p == nil {
return nil, fmt.Errorf("p cannot be empty")
}
for i := 0; i < len(p); i++ {
if p[i] == nil {
return nil, fmt.Errorf("some coefficient in p is nil")
}
}
pp := x
res := p[0]
for i := 1; i < len(p); i++ {
r := p[i].Mul(pp)
res = res.Add(r)
pp = pp.Mul(x)
}
return res, nil
}
// Add adds two PolynomialG1
func (p polynomialPoint) Add(rhs polynomialPoint) (polynomialPoint, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make(polynomialPoint, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Add(c.Identity())
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Add(c.Identity())
} else {
result[i] = result[i].Add(c)
}
}
return result, nil
}
// Mul for PolynomialG1 computes rhs * p, p is a polynomial, rhs is a value
func (p polynomialPoint) Mul(rhs curves.Scalar) (polynomialPoint, error) {
result := make(polynomialPoint, len(p))
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Mul(rhs)
}
return result, nil
}
type polynomial []curves.Scalar
// Add adds two polynomials
func (p polynomial) Add(rhs polynomial) (polynomial, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make([]curves.Scalar, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Clone()
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Clone()
} else {
result[i] = result[i].Add(c)
}
}
return result, nil
}
// Sub computes p-rhs and returns
func (p polynomial) Sub(rhs polynomial) (polynomial, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make([]curves.Scalar, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Clone()
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Neg()
} else {
result[i] = result[i].Sub(c)
}
}
return result, nil
}
// Mul multiplies two polynomials - p * rhs
func (p polynomial) Mul(rhs polynomial) (polynomial, error) {
// Check for each coefficient that should not be nil
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("coefficient in p at %d is nil", i)
}
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("coefficient in rhs at %d is nil", i)
}
}
m := len(p)
n := len(rhs)
// Initialize the product polynomial
prod := make(polynomial, m+n-1)
for i := 0; i < len(prod); i++ {
prod[i] = p[0].Zero()
}
// Multiply two polynomials term by term
for i, cp := range p {
for j, cr := range rhs {
temp := cp.Mul(cr)
prod[i+j] = prod[i+j].Add(temp)
}
}
return prod, nil
}
// MulScalar computes p * rhs, where rhs is a scalar value
func (p polynomial) MulScalar(rhs curves.Scalar) (polynomial, error) {
result := make(polynomial, len(p))
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("coefficient at %d is nil", i)
}
result[i] = c.Mul(rhs)
}
return result, nil
}

404
crypto/accumulator/lib_test.go Executable file
View File

@ -0,0 +1,404 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestEvaluatePolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
output1, err := poly.evaluate(curve.Scalar.New(1))
require.NoError(t, err)
require.NotNil(t, output1)
result1 := curve.PointG1.Generator().Mul(curve.Scalar.New(6))
require.Equal(t, output1.ToAffineCompressed(), result1.ToAffineCompressed())
output2, err := poly.evaluate(curve.Scalar.New(2))
require.NoError(t, err)
require.NotNil(t, output2)
result2 := curve.PointG1.Generator().Mul(curve.Scalar.New(11))
require.Equal(t, output2.ToAffineCompressed(), result2.ToAffineCompressed())
}
func TestEvaluatePolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
_, err := poly.evaluate(curve.Scalar.New(1))
require.Error(t, err)
}
func TestAddAssignPolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
output, err := poly1.Add(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i].ToAffineCompressed(), result[i].ToAffineCompressed())
}
// Test polynomials with unequal length
poly3 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
}
output2, err := poly1.Add(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].ToAffineCompressed(), result2[i].ToAffineCompressed())
}
// Test polynomial with Capacity
poly4 := make(polynomialPoint, 0, 3)
poly5, err := poly4.Add(poly1)
require.NoError(t, err)
require.Equal(t, len(poly5), len(poly1))
for i := 0; i < len(poly5); i++ {
require.Equal(t, poly5[i].ToAffineCompressed(), poly1[i].ToAffineCompressed())
}
}
func TestAddAssignPolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
output, err := poly1.Add(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulAssignPolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
rhs := curve.Scalar.New(3)
output, err := poly.Mul(rhs)
require.NoError(t, err)
require.NotNil(t, output)
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(9)),
curve.PointG1.Generator().Mul(curve.Scalar.New(6)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
for i := 0; i < len(poly2); i++ {
require.Equal(t, output[i].ToAffineCompressed(), poly2[i].ToAffineCompressed())
}
}
func TestMulAssignPolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
rhs := curve.Scalar.New(3)
output, err := poly.Mul(rhs)
require.Error(t, err)
require.Nil(t, output)
}
func TestPushPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
scalar := curve.Scalar.New(4)
result := append(poly, scalar)
require.Equal(t, result[3], scalar)
// Push one more
scalar2 := curve.Scalar.New(5)
result2 := append(result, scalar2)
require.Equal(t, result2[4], scalar2)
// Push to a new polynomial
newPoly := polynomial{}
newPoly = append(newPoly, scalar)
require.Equal(t, newPoly[0], scalar)
newPoly = append(newPoly, scalar2)
require.Equal(t, newPoly[1], scalar2)
}
func TestAddAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Add(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(4),
curve.Scalar.New(4),
curve.Scalar.New(4),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i], result[i])
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
}
output2, err := poly1.Add(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(4),
curve.Scalar.New(4),
curve.Scalar.New(1),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i], result2[i])
}
}
func TestAddAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Add(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestSubAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Sub(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(2),
curve.Scalar.New(0),
curve.Scalar.New(-2),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i].Bytes(), result[i].Bytes())
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
curve.Scalar.New(4),
}
output2, err := poly1.Sub(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(2),
curve.Scalar.New(0),
curve.Scalar.New(-2),
curve.Scalar.New(-4),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].Bytes(), result2[i].Bytes())
}
}
func TestSubAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Sub(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Mul(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(3),
curve.Scalar.New(8),
curve.Scalar.New(14),
curve.Scalar.New(8),
curve.Scalar.New(3),
}
for i := 0; i < len(result); i++ {
require.Equal(t, output[i].Bytes(), result[i].Bytes())
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
}
output2, err := poly1.Mul(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(3),
curve.Scalar.New(8),
curve.Scalar.New(5),
curve.Scalar.New(2),
}
require.Equal(t, len(output2), 4)
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].Bytes(), result2[i].Bytes())
}
}
func TestMulAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Mul(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulValueAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
rhs := curve.Scalar.New(3)
output, err := poly.MulScalar(rhs)
require.NoError(t, err)
require.NotNil(t, output)
coefficients2 := []curves.Scalar{
curve.Scalar.New(9),
curve.Scalar.New(6),
curve.Scalar.New(3),
}
for i := 0; i < len(coefficients2); i++ {
require.Equal(t, output[i].Bytes(), coefficients2[i].Bytes())
}
}
func TestMulValueAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
rhs := curve.Scalar.New(3)
output, err := poly.MulScalar(rhs)
require.Error(t, err)
require.Nil(t, output)
}

518
crypto/accumulator/proof.go Executable file
View File

@ -0,0 +1,518 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"bytes"
crand "crypto/rand"
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"github.com/onsonr/hway/crypto/core/curves"
)
type proofParamsMarshal struct {
X []byte `bare:"x"`
Y []byte `bare:"y"`
Z []byte `bare:"z"`
Curve string `bare:"curve"`
}
// ProofParams contains four distinct public generators of G1 - X, Y, Z
type ProofParams struct {
x, y, z curves.Point
}
// New samples X, Y, Z, K
func (p *ProofParams) New(curve *curves.PairingCurve, pk *PublicKey, entropy []byte) (*ProofParams, error) {
pkBytes, err := pk.MarshalBinary()
if err != nil {
return nil, err
}
prefix := bytes.Repeat([]byte{0xFF}, 32)
data := append(prefix, entropy...)
data = append(data, pkBytes...)
p.z = curve.Scalar.Point().Hash(data)
data[0] = 0xFE
p.y = curve.Scalar.Point().Hash(data)
data[0] = 0xFD
p.x = curve.Scalar.Point().Hash(data)
return p, nil
}
// MarshalBinary converts ProofParams to bytes
func (p *ProofParams) MarshalBinary() ([]byte, error) {
if p.x == nil || p.y == nil || p.z == nil {
return nil, fmt.Errorf("some value x, y, or z is nil")
}
tv := &proofParamsMarshal{
X: p.x.ToAffineCompressed(),
Y: p.y.ToAffineCompressed(),
Z: p.z.ToAffineCompressed(),
Curve: p.x.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes to ProofParams
func (p *ProofParams) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(proofParamsMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
x, err := curve.NewIdentityPoint().FromAffineCompressed(tv.X)
if err != nil {
return err
}
y, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Y)
if err != nil {
return err
}
z, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Z)
if err != nil {
return err
}
p.x = x
p.y = y
p.z = z
return nil
}
// MembershipProofCommitting contains value computed in Proof of knowledge and
// Blinding phases as described in section 7 of https://eprint.iacr.org/2020/777.pdf
type MembershipProofCommitting struct {
eC curves.Point
tSigma curves.Point
tRho curves.Point
deltaSigma curves.Scalar
deltaRho curves.Scalar
blindingFactor curves.Scalar
rSigma curves.Scalar
rRho curves.Scalar
rDeltaSigma curves.Scalar
rDeltaRho curves.Scalar
sigma curves.Scalar
rho curves.Scalar
capRSigma curves.Point
capRRho curves.Point
capRDeltaSigma curves.Point
capRDeltaRho curves.Point
capRE curves.Scalar
accumulator curves.Point
witnessValue curves.Scalar
xG1 curves.Point
yG1 curves.Point
zG1 curves.Point
}
// New initiates values of MembershipProofCommitting
func (mpc *MembershipProofCommitting) New(
witness *MembershipWitness,
acc *Accumulator,
pp *ProofParams,
pk *PublicKey,
) (*MembershipProofCommitting, error) {
// Randomly select σ, ρ
sigma := witness.y.Random(crand.Reader)
rho := witness.y.Random(crand.Reader)
// E_C = C + (σ + ρ)Z
t := sigma
t = t.Add(rho)
eC := pp.z
eC = eC.Mul(t)
eC = eC.Add(witness.c)
// T_σ = σX
tSigma := pp.x
tSigma = tSigma.Mul(sigma)
// T_ρ = ρY
tRho := pp.y
tRho = tRho.Mul(rho)
// δ_σ = yσ
deltaSigma := witness.y
deltaSigma = deltaSigma.Mul(sigma)
// δ_ρ = yρ
deltaRho := witness.y
deltaRho = deltaRho.Mul(rho)
// Randomly pick r_σ,r_ρ,r_δσ,r_δρ
rY := witness.y.Random(crand.Reader)
rSigma := witness.y.Random(crand.Reader)
rRho := witness.y.Random(crand.Reader)
rDeltaSigma := witness.y.Random(crand.Reader)
rDeltaRho := witness.y.Random(crand.Reader)
// R_σ = r_σ X
capRSigma := pp.x
capRSigma = capRSigma.Mul(rSigma)
// R_ρ = ρY
capRRho := pp.y
capRRho = capRRho.Mul(rRho)
// R_δσ = r_y T_σ - r_δσ X
negX := pp.x
negX = negX.Neg()
capRDeltaSigma := tSigma.Mul(rY)
capRDeltaSigma = capRDeltaSigma.Add(negX.Mul(rDeltaSigma))
// R_δρ = r_y T_ρ - r_δρ Y
negY := pp.y
negY = negY.Neg()
capRDeltaRho := tRho.Mul(rY)
capRDeltaRho = capRDeltaRho.Add(negY.Mul(rDeltaRho))
// P~
g2 := pk.value.Generator()
// -r_δσ - r_δρ
exp := rDeltaSigma
exp = exp.Add(rDeltaRho)
exp = exp.Neg()
// -r_σ - r_ρ
exp2 := rSigma
exp2 = exp2.Add(rRho)
exp2 = exp2.Neg()
// rY * eC
rYeC := eC.Mul(rY)
// (-r_δσ - r_δρ)*Z
expZ := pp.z.Mul(exp)
// (-r_σ - r_ρ)*Z
exp2Z := pp.z.Mul(exp2)
// Prepare
rYeCPrep, ok := rYeC.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
g2Prep, ok := g2.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
expZPrep, ok := expZ.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
exp2ZPrep, ok := exp2Z.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
pkPrep := pk.value
// Pairing
capRE := g2Prep.MultiPairing(rYeCPrep, g2Prep, expZPrep, g2Prep, exp2ZPrep, pkPrep)
return &MembershipProofCommitting{
eC,
tSigma,
tRho,
deltaSigma,
deltaRho,
rY,
rSigma,
rRho,
rDeltaSigma,
rDeltaRho,
sigma,
rho,
capRSigma,
capRRho,
capRDeltaSigma,
capRDeltaRho,
capRE,
acc.value,
witness.y,
pp.x,
pp.y,
pp.z,
}, nil
}
// GetChallenge returns bytes that need to be hashed for generating challenge.
// V || Ec || T_sigma || T_rho || R_E || R_sigma || R_rho || R_delta_sigma || R_delta_rho
func (mpc MembershipProofCommitting) GetChallengeBytes() []byte {
res := mpc.accumulator.ToAffineCompressed()
res = append(res, mpc.eC.ToAffineCompressed()...)
res = append(res, mpc.tSigma.ToAffineCompressed()...)
res = append(res, mpc.tRho.ToAffineCompressed()...)
res = append(res, mpc.capRE.Bytes()...)
res = append(res, mpc.capRSigma.ToAffineCompressed()...)
res = append(res, mpc.capRRho.ToAffineCompressed()...)
res = append(res, mpc.capRDeltaSigma.ToAffineCompressed()...)
res = append(res, mpc.capRDeltaRho.ToAffineCompressed()...)
return res
}
// GenProof computes the s values for Fiat-Shamir and return the actual
// proof to be sent to the verifier given the challenge c.
func (mpc *MembershipProofCommitting) GenProof(c curves.Scalar) *MembershipProof {
// s_y = r_y + c*y
sY := schnorr(mpc.blindingFactor, mpc.witnessValue, c)
// s_σ = r_σ + c*σ
sSigma := schnorr(mpc.rSigma, mpc.sigma, c)
// s_ρ = r_ρ + c*ρ
sRho := schnorr(mpc.rRho, mpc.rho, c)
// s_δσ = rδσ + c*δ_σ
sDeltaSigma := schnorr(mpc.rDeltaSigma, mpc.deltaSigma, c)
// s_δρ = rδρ + c*δ_ρ
sDeltaRho := schnorr(mpc.rDeltaRho, mpc.deltaRho, c)
return &MembershipProof{
mpc.eC,
mpc.tSigma,
mpc.tRho,
sSigma,
sRho,
sDeltaSigma,
sDeltaRho,
sY,
}
}
func schnorr(r, v, challenge curves.Scalar) curves.Scalar {
res := v
res = res.Mul(challenge)
res = res.Add(r)
return res
}
type membershipProofMarshal struct {
EC []byte `bare:"e_c"`
TSigma []byte `bare:"t_sigma"`
TRho []byte `bare:"t_rho"`
SSigma []byte `bare:"s_sigma"`
SRho []byte `bare:"s_rho"`
SDeltaSigma []byte `bare:"s_delta_sigma"`
SDeltaRho []byte `bare:"s_delta_rho"`
SY []byte `bare:"s_y"`
Curve string `bare:"curve"`
}
// MembershipProof contains values in the proof to be verified
type MembershipProof struct {
eC curves.Point
tSigma curves.Point
tRho curves.Point
sSigma curves.Scalar
sRho curves.Scalar
sDeltaSigma curves.Scalar
sDeltaRho curves.Scalar
sY curves.Scalar
}
// Finalize computes values in the proof to be verified.
func (mp *MembershipProof) Finalize(acc *Accumulator, pp *ProofParams, pk *PublicKey, challenge curves.Scalar) (*MembershipProofFinal, error) {
// R_σ = s_δ X + c T_σ
negTSigma := mp.tSigma
negTSigma = negTSigma.Neg()
capRSigma := pp.x.Mul(mp.sSigma)
capRSigma = capRSigma.Add(negTSigma.Mul(challenge))
// R_ρ = s_ρ Y + c T_ρ
negTRho := mp.tRho
negTRho = negTRho.Neg()
capRRho := pp.y.Mul(mp.sRho)
capRRho = capRRho.Add(negTRho.Mul(challenge))
// R_δσ = s_y T_σ - s_δσ X
negX := pp.x
negX = negX.Neg()
capRDeltaSigma := mp.tSigma.Mul(mp.sY)
capRDeltaSigma = capRDeltaSigma.Add(negX.Mul(mp.sDeltaSigma))
// R_δρ = s_y T_ρ - s_δρ Y
negY := pp.y
negY = negY.Neg()
capRDeltaRho := mp.tRho.Mul(mp.sY)
capRDeltaRho = capRDeltaRho.Add(negY.Mul(mp.sDeltaRho))
// tildeP
g2 := pk.value.Generator()
// Compute capRE, the pairing
// E_c * s_y
eCsY := mp.eC.Mul(mp.sY)
// (-s_delta_sigma - s_delta_rho) * Z
exp := mp.sDeltaSigma
exp = exp.Add(mp.sDeltaRho)
exp = exp.Neg()
expZ := pp.z.Mul(exp)
// (-c) * V
exp = challenge.Neg()
expV := acc.value.Mul(exp)
// E_c * s_y + (-s_delta_sigma - s_delta_rho) * Z + (-c) * V
lhs := eCsY.Add(expZ).Add(expV)
// (-s_sigma - s_rho) * Z
exp = mp.sSigma
exp = exp.Add(mp.sRho)
exp = exp.Neg()
expZ2 := pp.z.Mul(exp)
// E_c * c
cEc := mp.eC.Mul(challenge)
// (-s_sigma - s_rho) * Z + E_c * c
rhs := cEc.Add(expZ2)
// Prepare
lhsPrep, ok := lhs.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
g2Prep, ok := g2.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
rhsPrep, ok := rhs.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
pkPrep := pk.value
// capRE
capRE := g2Prep.MultiPairing(lhsPrep, g2Prep, rhsPrep, pkPrep)
return &MembershipProofFinal{
acc.value,
mp.eC,
mp.tSigma,
mp.tRho,
capRE,
capRSigma,
capRRho,
capRDeltaSigma,
capRDeltaRho,
}, nil
}
// MarshalBinary converts MembershipProof to bytes
func (mp MembershipProof) MarshalBinary() ([]byte, error) {
tv := &membershipProofMarshal{
EC: mp.eC.ToAffineCompressed(),
TSigma: mp.tSigma.ToAffineCompressed(),
TRho: mp.tRho.ToAffineCompressed(),
SSigma: mp.sSigma.Bytes(),
SRho: mp.sRho.Bytes(),
SDeltaSigma: mp.sDeltaSigma.Bytes(),
SDeltaRho: mp.sDeltaRho.Bytes(),
SY: mp.sY.Bytes(),
Curve: mp.eC.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes to MembershipProof
func (mp *MembershipProof) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(membershipProofMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
eC, err := curve.NewIdentityPoint().FromAffineCompressed(tv.EC)
if err != nil {
return err
}
tSigma, err := curve.NewIdentityPoint().FromAffineCompressed(tv.TSigma)
if err != nil {
return err
}
tRho, err := curve.NewIdentityPoint().FromAffineCompressed(tv.TRho)
if err != nil {
return err
}
sSigma, err := curve.NewScalar().SetBytes(tv.SSigma)
if err != nil {
return err
}
sRho, err := curve.NewScalar().SetBytes(tv.SRho)
if err != nil {
return err
}
sDeltaSigma, err := curve.NewScalar().SetBytes(tv.SDeltaSigma)
if err != nil {
return err
}
sDeltaRho, err := curve.NewScalar().SetBytes(tv.SDeltaRho)
if err != nil {
return err
}
sY, err := curve.NewScalar().SetBytes(tv.SY)
if err != nil {
return err
}
mp.eC = eC
mp.tSigma = tSigma
mp.tRho = tRho
mp.sSigma = sSigma
mp.sRho = sRho
mp.sDeltaSigma = sDeltaSigma
mp.sDeltaRho = sDeltaRho
mp.sY = sY
return nil
}
// MembershipProofFinal contains values that are input to Fiat-Shamir Heuristic
type MembershipProofFinal struct {
accumulator curves.Point
eC curves.Point
tSigma curves.Point
tRho curves.Point
capRE curves.Scalar
capRSigma curves.Point
capRRho curves.Point
capRDeltaSigma curves.Point
capRDeltaRho curves.Point
}
// GetChallenge computes Fiat-Shamir Heuristic taking input values of MembershipProofFinal
func (m MembershipProofFinal) GetChallenge(curve *curves.PairingCurve) curves.Scalar {
res := m.accumulator.ToAffineCompressed()
res = append(res, m.eC.ToAffineCompressed()...)
res = append(res, m.tSigma.ToAffineCompressed()...)
res = append(res, m.tRho.ToAffineCompressed()...)
res = append(res, m.capRE.Bytes()...)
res = append(res, m.capRSigma.ToAffineCompressed()...)
res = append(res, m.capRRho.ToAffineCompressed()...)
res = append(res, m.capRDeltaSigma.ToAffineCompressed()...)
res = append(res, m.capRDeltaRho.ToAffineCompressed()...)
challenge := curve.Scalar.Hash(res)
return challenge
}

182
crypto/accumulator/proof_test.go Executable file
View File

@ -0,0 +1,182 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestProofParamsMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
params, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, params.x)
require.NotNil(t, params.y)
require.NotNil(t, params.z)
bytes, err := params.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, bytes)
params2 := &ProofParams{
curve.PointG1.Generator(),
curve.PointG1.Generator(),
curve.PointG1.Generator(),
}
err = params2.UnmarshalBinary(bytes)
require.NoError(t, err)
require.True(t, params.x.Equal(params2.x))
require.True(t, params.y.Equal(params2.y))
require.True(t, params.z.Equal(params2.z))
}
func TestMembershipProof(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// Initiate a new accumulator
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
// Initiate a new membership witness for value elements[3]
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
// Create proof parameters, which contains randomly sampled G1 points X, Y, Z, K
params, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, params.x)
require.NotNil(t, params.y)
require.NotNil(t, params.z)
mpc, err := new(MembershipProofCommitting).New(wit, acc, params, pk)
require.NoError(t, err)
testMPC(t, mpc)
challenge := curve.Scalar.Hash(mpc.GetChallengeBytes())
require.NotNil(t, challenge)
proof := mpc.GenProof(challenge)
require.NotNil(t, proof)
testProof(t, proof)
finalProof, err := proof.Finalize(acc, params, pk, challenge)
require.NoError(t, err)
require.NotNil(t, finalProof)
testFinalProof(t, finalProof)
challenge2 := finalProof.GetChallenge(curve)
require.Equal(t, challenge, challenge2)
// Check we can still have a valid proof even if accumulator and witness are updated
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
additions := data[0:2]
deletions := data[2:5]
_, coefficients, err := acc.Update(sk, additions, deletions)
require.NoError(t, err)
require.NotNil(t, coefficients)
_, err = wit.BatchUpdate(additions, deletions, coefficients)
require.NoError(t, err)
newParams, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, newParams.x)
require.NotNil(t, newParams.y)
require.NotNil(t, newParams.z)
newMPC, err := new(MembershipProofCommitting).New(wit, acc, newParams, pk)
require.NoError(t, err)
testMPC(t, newMPC)
challenge3 := curve.Scalar.Hash(newMPC.GetChallengeBytes())
require.NotNil(t, challenge3)
newProof := newMPC.GenProof(challenge3)
require.NotNil(t, newProof)
testProof(t, newProof)
newFinalProof, err := newProof.Finalize(acc, newParams, pk, challenge3)
require.NoError(t, err)
require.NotNil(t, newFinalProof)
testFinalProof(t, newFinalProof)
challenge4 := newFinalProof.GetChallenge(curve)
require.Equal(t, challenge3, challenge4)
}
func testMPC(t *testing.T, mpc *MembershipProofCommitting) {
require.NotNil(t, mpc.eC)
require.NotNil(t, mpc.tSigma)
require.NotNil(t, mpc.tRho)
require.NotNil(t, mpc.deltaSigma)
require.NotNil(t, mpc.deltaRho)
require.NotNil(t, mpc.blindingFactor)
require.NotNil(t, mpc.rSigma)
require.NotNil(t, mpc.rRho)
require.NotNil(t, mpc.rDeltaSigma)
require.NotNil(t, mpc.rDeltaRho)
require.NotNil(t, mpc.sigma)
require.NotNil(t, mpc.rho)
require.NotNil(t, mpc.capRSigma)
require.NotNil(t, mpc.capRRho)
require.NotNil(t, mpc.capRDeltaSigma)
require.NotNil(t, mpc.capRDeltaRho)
require.NotNil(t, mpc.capRE)
require.NotNil(t, mpc.accumulator)
require.NotNil(t, mpc.witnessValue)
require.NotNil(t, mpc.xG1)
require.NotNil(t, mpc.yG1)
require.NotNil(t, mpc.zG1)
}
func testProof(t *testing.T, proof *MembershipProof) {
require.NotNil(t, proof.eC)
require.NotNil(t, proof.tSigma)
require.NotNil(t, proof.tRho)
require.NotNil(t, proof.sSigma)
require.NotNil(t, proof.sRho)
require.NotNil(t, proof.sDeltaSigma)
require.NotNil(t, proof.sDeltaRho)
require.NotNil(t, proof.sY)
}
func testFinalProof(t *testing.T, finalProof *MembershipProofFinal) {
require.NotNil(t, finalProof.accumulator)
require.NotNil(t, finalProof.eC)
require.NotNil(t, finalProof.tSigma)
require.NotNil(t, finalProof.tRho)
require.NotNil(t, finalProof.capRE)
require.NotNil(t, finalProof.capRSigma)
require.NotNil(t, finalProof.capRRho)
require.NotNil(t, finalProof.capRDeltaSigma)
require.NotNil(t, finalProof.capRDeltaRho)
}

375
crypto/accumulator/witness.go Executable file
View File

@ -0,0 +1,375 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"github.com/onsonr/hway/crypto/core/curves"
)
// MembershipWitness contains the witness c and the value y respect to the accumulator state.
type MembershipWitness struct {
c curves.Point
y curves.Scalar
}
// New creates a new membership witness
func (mw *MembershipWitness) New(y Element, acc *Accumulator, sk *SecretKey) (*MembershipWitness, error) {
if acc.value == nil || acc.value.IsIdentity() {
return nil, fmt.Errorf("value of accumulator should not be nil")
}
if sk.value == nil || sk.value.IsZero() {
return nil, fmt.Errorf("secret key should not be nil")
}
if y == nil || y.IsZero() {
return nil, fmt.Errorf("y should not be nil")
}
newAcc := &Accumulator{acc.value}
_, err := newAcc.Remove(sk, y)
if err != nil {
return nil, err
}
mw.c = newAcc.value
mw.y = y.Add(y.Zero())
return mw, nil
}
// Verify the MembershipWitness mw is a valid witness as per section 4 in
// <https://eprint.iacr.org/2020/777>
func (mw MembershipWitness) Verify(pk *PublicKey, acc *Accumulator) error {
if mw.c == nil || mw.y == nil || mw.c.IsIdentity() || mw.y.IsZero() {
return fmt.Errorf("c and y should not be nil")
}
if pk.value == nil || pk.value.IsIdentity() {
return fmt.Errorf("invalid public key")
}
if acc.value == nil || acc.value.IsIdentity() {
return fmt.Errorf("accumulator value should not be nil")
}
// Set -tildeP
g2, ok := pk.value.Generator().(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// y*tildeP + tildeQ, tildeP is a G2 generator.
p, ok := g2.Mul(mw.y).Add(pk.value).(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// Prepare
witness, ok := mw.c.(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
v, ok := acc.value.Neg().(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// Check e(witness, y*tildeP + tildeQ) * e(-acc, tildeP) == Identity
result := p.MultiPairing(witness, p, v, g2)
if !result.IsOne() {
return fmt.Errorf("invalid result")
}
return nil
}
// ApplyDelta returns C' = dA(y)/dD(y)*C + 1/dD(y) * <Gamma_y, Omega>
// according to the witness update protocol described in section 4 of
// https://eprint.iacr.org/2020/777.pdf
func (mw *MembershipWitness) ApplyDelta(delta *Delta) (*MembershipWitness, error) {
if mw.c == nil || mw.y == nil || delta == nil {
return nil, fmt.Errorf("y, c or delta should not be nil")
}
// C' = dA(y)/dD(y)*C + 1/dD(y) * <Gamma_y, Omega>
mw.c = mw.c.Mul(delta.d).Add(delta.p)
return mw, nil
}
// BatchUpdate performs batch update as described in section 4
func (mw *MembershipWitness) BatchUpdate(additions []Element, deletions []Element, coefficients []Coefficient) (*MembershipWitness, error) {
delta, err := evaluateDelta(mw.y, additions, deletions, coefficients)
if err != nil {
return nil, err
}
mw, err = mw.ApplyDelta(delta)
if err != nil {
return nil, fmt.Errorf("applyDelta fails")
}
return mw, nil
}
// MultiBatchUpdate performs multi-batch update using epoch as described in section 4.2
func (mw *MembershipWitness) MultiBatchUpdate(A [][]Element, D [][]Element, C [][]Coefficient) (*MembershipWitness, error) {
delta, err := evaluateDeltas(mw.y, A, D, C)
if err != nil {
return nil, fmt.Errorf("evaluateDeltas fails")
}
mw, err = mw.ApplyDelta(delta)
if err != nil {
return nil, err
}
return mw, nil
}
// MarshalBinary converts a membership witness to bytes
func (mw MembershipWitness) MarshalBinary() ([]byte, error) {
if mw.c == nil || mw.y == nil {
return nil, fmt.Errorf("c and y value should not be nil")
}
result := append(mw.c.ToAffineCompressed(), mw.y.Bytes()...)
tv := &structMarshal{
Value: result,
Curve: mw.c.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes into MembershipWitness
func (mw *MembershipWitness) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("input data should not be nil")
}
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
ptLength := len(curve.Point.ToAffineCompressed())
scLength := len(curve.Scalar.Bytes())
expectedLength := ptLength + scLength
if len(tv.Value) != expectedLength {
return fmt.Errorf("invalid byte sequence")
}
cValue, err := curve.Point.FromAffineCompressed(tv.Value[:ptLength])
if err != nil {
return err
}
yValue, err := curve.Scalar.SetBytes(tv.Value[ptLength:])
if err != nil {
return err
}
mw.c = cValue
mw.y = yValue
return nil
}
// Delta contains values d and p, where d should be the division dA(y)/dD(y) on some value y
// p should be equal to 1/dD * <Gamma_y, Omega>
type Delta struct {
d curves.Scalar
p curves.Point
}
// MarshalBinary converts Delta into bytes
func (d *Delta) MarshalBinary() ([]byte, error) {
if d.d == nil || d.p == nil {
return nil, fmt.Errorf("d and p should not be nil")
}
var result []byte
result = append(result, d.p.ToAffineCompressed()...)
result = append(result, d.d.Bytes()...)
tv := &structMarshal{
Value: result,
Curve: d.p.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts data into Delta
func (d *Delta) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
ptLength := len(curve.Point.ToAffineCompressed())
scLength := len(curve.Scalar.Bytes())
expectedLength := ptLength + scLength
if len(tv.Value) != expectedLength {
return fmt.Errorf("invalid byte sequence")
}
pValue, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Value[:ptLength])
if err != nil {
return err
}
dValue, err := curve.NewScalar().SetBytes(tv.Value[ptLength:])
if err != nil {
return err
}
d.d = dValue
d.p = pValue
return nil
}
// evaluateDeltas compute values used for membership witness batch update with epoch
// as described in section 4.2, page 11 of https://eprint.iacr.org/2020/777.pdf
func evaluateDeltas(y Element, A [][]Element, D [][]Element, C [][]Coefficient) (*Delta, error) {
if len(A) != len(D) || len(A) != len(C) {
return nil, fmt.Errorf("a, d, c should have same length")
}
one := y.One()
size := len(A)
// dA(x) = ∏ 1..n (yA_i - x)
aa := make([]curves.Scalar, 0)
// dD(x) = ∏ 1..m (yD_i - x)
dd := make([]curves.Scalar, 0)
a := one
d := one
// dA_{a->b}(y) = ∏ a..b dAs(y)
// dD_{a->b}(y) = ∏ a..b dDs(y)
for i := 0; i < size; i++ {
adds := A[i]
dels := D[i]
// ta = dAs(y)
ta, err := dad(adds, y)
if err != nil {
return nil, fmt.Errorf("dad on additions fails")
}
// td = dDs(y)
td, err := dad(dels, y)
if err != nil {
return nil, fmt.Errorf("dad on deletions fails")
}
// ∏ a..b dAs(y)
a = a.Mul(ta)
// ∏ a..b dDs(y)
d = d.Mul(td)
aa = append(aa, ta)
dd = append(dd, td)
}
// If this fails, then this value was removed.
d, err := d.Invert()
if err != nil {
return nil, fmt.Errorf("no inverse exists")
}
// <Gamma_y, Omega>
p := make(polynomialPoint, 0, size)
// Ωi->j+1 = ∑ 1..t (dAt * dDt-1) · Ω
for i := 0; i < size; i++ {
// t = i+1
// ∏^(t-1)_(h=i+1)
ddh := one
// dDi→t1 (y)
for h := 0; h < i; h++ {
ddh = ddh.Mul(dd[h])
}
// ∏^(j+1)_(k=t+1)
dak := one
// dAt->j(y)
for k := i + 1; k < size; k++ {
dak = dak.Mul(aa[k])
}
// dDi->t-1(y) * dAt->j(y)
dak = dak.Mul(ddh)
pp := make(polynomialPoint, len(C[i]))
for j := 0; j < len(pp); j++ {
pp[j] = C[i][j]
}
// dDi->t-1(y) * dAt->j(y) · Ω
pp, err := pp.Mul(dak)
if err != nil {
return nil, fmt.Errorf("pp.Mul fails")
}
p, err = p.Add(pp)
if err != nil {
return nil, fmt.Errorf("pp.Add fails")
}
}
// dAi->j(y)/dDi->j(y)
a = a.Mul(d)
// Ωi->j(y)
v, err := p.evaluate(y)
if err != nil {
return nil, fmt.Errorf("p.evaluate fails")
}
// (1/dDi->j(y)) * Ωi->j(y)
v = v.Mul(d)
// return
return &Delta{d: a, p: v}, nil
}
// evaluateDelta computes values used for membership witness batch update
// as described in section 4.1 of https://eprint.iacr.org/2020/777.pdf
func evaluateDelta(y Element, additions []Element, deletions []Element, coefficients []Coefficient) (*Delta, error) {
// dD(y) = ∏ 1..m (yD_i - y), d = 1/dD(y)
var err error
d, err := dad(deletions, y)
if err != nil {
return nil, fmt.Errorf("dad fails on deletions")
}
d, err = d.Invert()
if err != nil {
return nil, fmt.Errorf("no inverse exists")
}
// dA(y) = ∏ 1..n (yA_i - y)
a, err := dad(additions, y)
if err != nil {
return nil, fmt.Errorf("dad fails on additions")
}
// dA(y)/dD(y)
a = a.Mul(d)
// Create a PolynomialG1 from coefficients
p := make(polynomialPoint, len(coefficients))
for i := 0; i < len(coefficients); i++ {
p[i] = coefficients[i]
}
// <Gamma_y, Omega>
v, err := p.evaluate(y)
if err != nil {
return nil, fmt.Errorf("p.evaluate fails")
}
// 1/dD * <Gamma_y, Omega>
v = v.Mul(d)
return &Delta{d: a, p: v}, nil
}

View File

@ -0,0 +1,229 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func Test_Membership_Witness_New(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, _ := new(SecretKey).New(curve, seed[:])
acc, _ := new(Accumulator).New(curve)
e := curve.Scalar.New(2)
mw, err := new(MembershipWitness).New(e, acc, key)
require.NoError(t, err)
require.NotNil(t, mw.c)
require.NotNil(t, mw.y)
}
func Test_Membership_Witness_Marshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
mw := &MembershipWitness{
curve.PointG1.Generator().Mul(curve.Scalar.New(10)),
curve.Scalar.New(15),
}
data, err := mw.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
newMW := &MembershipWitness{}
err = newMW.UnmarshalBinary(data)
require.NoError(t, err)
require.True(t, mw.c.Equal(newMW.c))
require.Equal(t, 0, mw.y.Cmp(newMW.y))
}
func Test_Membership(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// nm_witness_max works as well if set to value larger than 0 for this test.x
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
require.False(t, acc.value.IsIdentity())
require.True(t, acc.value.IsOnCurve())
require.NotEqual(t, acc.value, curve.NewG1GeneratorPoint())
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
err = wit.Verify(pk, acc)
require.NoError(t, err)
// Test wrong cases, forge a wrong witness
wrongWit := MembershipWitness{
curve.PointG1.Identity(),
curve.Scalar.One(),
}
err = wrongWit.Verify(pk, acc)
require.Error(t, err)
// Test wrong cases, forge a wrong accumulator
wrongAcc := &Accumulator{
curve.PointG1.Generator(),
}
err = wit.Verify(pk, wrongAcc)
require.Error(t, err)
}
func Test_Membership_Batch_Update(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// nm_witness_max works as well if set to value larger than 0 for this test.
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
err = wit.Verify(pk, acc)
require.Nil(t, err)
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
additions := data[0:2]
deletions := data[2:5]
_, coefficients, err := acc.Update(sk, additions, deletions)
require.NoError(t, err)
require.NotNil(t, coefficients)
_, err = wit.BatchUpdate(additions, deletions, coefficients)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
}
func Test_Membership_Multi_Batch_Update(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
element8 := curve.Scalar.Hash([]byte("10"))
element9 := curve.Scalar.Hash([]byte("11"))
element10 := curve.Scalar.Hash([]byte("12"))
element11 := curve.Scalar.Hash([]byte("13"))
element12 := curve.Scalar.Hash([]byte("14"))
element13 := curve.Scalar.Hash([]byte("15"))
element14 := curve.Scalar.Hash([]byte("16"))
element15 := curve.Scalar.Hash([]byte("17"))
element16 := curve.Scalar.Hash([]byte("18"))
element17 := curve.Scalar.Hash([]byte("19"))
element18 := curve.Scalar.Hash([]byte("20"))
elements := []Element{
element1,
element2,
element3,
element4,
element5,
element6,
element7,
element8,
element9,
element10,
element11,
element12,
element13,
element14,
element15,
element16,
element17,
element18,
}
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
adds1 := data[0:2]
dels1 := data[2:5]
_, coeffs1, err := acc.Update(sk, adds1, dels1)
require.NoError(t, err)
require.NotNil(t, coeffs1)
dels2 := elements[8:10]
_, coeffs2, err := acc.Update(sk, []Element{}, dels2)
require.NoError(t, err)
require.NotNil(t, coeffs2)
dels3 := elements[11:14]
_, coeffs3, err := acc.Update(sk, []Element{}, dels3)
require.NoError(t, err)
require.NotNil(t, coeffs3)
a := make([][]Element, 3)
a[0] = adds1
a[1] = []Element{}
a[2] = []Element{}
d := make([][]Element, 3)
d[0] = dels1
d[1] = dels2
d[2] = dels3
c := make([][]Coefficient, 3)
c[0] = coeffs1
c[1] = coeffs2
c[2] = coeffs3
_, err = wit.MultiBatchUpdate(a, d, c)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
}

66
crypto/bip32/bip32.go Normal file
View File

@ -0,0 +1,66 @@
package bip32
import (
"crypto/hmac"
"crypto/sha512"
"encoding/binary"
"errors"
"math/big"
"github.com/btcsuite/btcd/btcec/v2"
)
// ComputePublicKey computes the public key of a child key given the extended public key, chain code, and index.
func ComputePublicKey(extPubKey []byte, chainCode uint32, index int) ([]byte, error) {
// Check if the index is a hardened child key
if chainCode&0x80000000 != 0 && index < 0 {
return nil, errors.New("invalid index")
}
// Serialize the public key
pubKey, err := btcec.ParsePubKey(extPubKey)
if err != nil {
return nil, err
}
pubKeyBytes := pubKey.SerializeCompressed()
// Serialize the index
indexBytes := make([]byte, 4)
binary.BigEndian.PutUint32(indexBytes, uint32(index))
// Compute the HMAC-SHA512
mac := hmac.New(sha512.New, []byte{byte(chainCode)})
mac.Write(pubKeyBytes)
mac.Write(indexBytes)
I := mac.Sum(nil)
// Split I into two 32-byte sequences
IL := I[:32]
// Convert IL to a big integer
ilNum := new(big.Int).SetBytes(IL)
// Check if parse256(IL) >= n
curve := btcec.S256()
if ilNum.Cmp(curve.N) >= 0 {
return nil, errors.New("invalid child key")
}
// Compute the child public key
ilx, ily := curve.ScalarBaseMult(IL)
childX, childY := curve.Add(ilx, ily, pubKey.X(), pubKey.Y())
lx := newBigIntFieldVal(childX)
ly := newBigIntFieldVal(childY)
// Create the child public key
childPubKey := btcec.NewPublicKey(lx, ly)
childPubKeyBytes := childPubKey.SerializeCompressed()
return childPubKeyBytes, nil
}
// newBigIntFieldVal creates a new field value from a big integer.
func newBigIntFieldVal(val *big.Int) *btcec.FieldVal {
lx := new(btcec.FieldVal)
lx.SetByteSlice(val.Bytes())
return lx
}

View File

@ -0,0 +1,57 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package bulletproof
import (
"github.com/pkg/errors"
"golang.org/x/crypto/sha3"
"github.com/onsonr/hway/crypto/core/curves"
)
// generators contains a list of points to be used as generators for bulletproofs.
type generators []curves.Point
// ippGenerators holds generators necessary for an Inner Product Proof
// It includes a single u generator, and a list of generators divided in half to G and H
// See lines 10 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
type ippGenerators struct {
G generators
H generators
}
// getGeneratorPoints generates generators using HashToCurve with Shake256(domain) as input
// lenVector is the length of the scalars used for the Inner Product Proof
// getGeneratorPoints will return 2*lenVector + 1 total points, split between a single u generator
// and G and H lists of vectors per the IPP specification
// See lines 10 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
func getGeneratorPoints(lenVector int, domain []byte, curve curves.Curve) (*ippGenerators, error) {
shake := sha3.NewShake256()
_, err := shake.Write(domain)
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints shake.Write")
}
numPoints := lenVector * 2
points := make([]curves.Point, numPoints)
for i := 0; i < numPoints; i++ {
bytes := [64]byte{}
_, err := shake.Read(bytes[:])
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints shake.Read")
}
nextPoint := curve.Point.Hash(bytes[:])
points[i] = nextPoint
}
// Get G and H by splitting points in half
G, H, err := splitPointVector(points)
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints splitPointVector")
}
out := ippGenerators{G: G, H: H}
return &out, nil
}

View File

@ -0,0 +1,61 @@
package bulletproof
import (
"testing"
"github.com/stretchr/testify/require"
"golang.org/x/crypto/sha3"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestGeneratorsHappyPath(t *testing.T) {
curve := curves.ED25519()
gs, err := getGeneratorPoints(10, []byte("test"), *curve)
gsConcatenated := concatIPPGenerators(*gs)
require.NoError(t, err)
require.Len(t, gs.G, 10)
require.Len(t, gs.H, 10)
require.True(t, noDuplicates(gsConcatenated))
}
func TestGeneratorsUniquePerDomain(t *testing.T) {
curve := curves.ED25519()
gs1, err := getGeneratorPoints(10, []byte("test"), *curve)
gs1Concatenated := concatIPPGenerators(*gs1)
require.NoError(t, err)
gs2, err := getGeneratorPoints(10, []byte("test2"), *curve)
gs2Concatenated := concatIPPGenerators(*gs2)
require.NoError(t, err)
require.True(t, areDisjoint(gs1Concatenated, gs2Concatenated))
}
func noDuplicates(gs generators) bool {
seen := map[[32]byte]bool{}
for _, G := range gs {
value := sha3.Sum256(G.ToAffineCompressed())
if seen[value] {
return false
}
seen[value] = true
}
return true
}
func areDisjoint(gs1, gs2 generators) bool {
for _, g1 := range gs1 {
for _, g2 := range gs2 {
if g1.Equal(g2) {
return false
}
}
}
return true
}
func concatIPPGenerators(ippGens ippGenerators) generators {
var out generators
out = append(out, ippGens.G...)
out = append(out, ippGens.H...)
return out
}

181
crypto/bulletproof/helpers.go Executable file
View File

@ -0,0 +1,181 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package bulletproof
import (
"github.com/pkg/errors"
"github.com/onsonr/hway/crypto/core/curves"
)
// innerProduct takes two lists of scalars (a, b) and performs the dot product returning a single scalar.
func innerProduct(a, b []curves.Scalar) (curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
if len(a) < 1 {
return nil, errors.New("length of vectors must be at least one")
}
// Get a new scalar of value zero of the same curve as input arguments
innerProduct := a[0].Zero()
for i, aElem := range a {
bElem := b[i]
// innerProduct = aElem*bElem + innerProduct
innerProduct = aElem.MulAdd(bElem, innerProduct)
}
return innerProduct, nil
}
// splitPointVector takes a vector of points, splits it in half returning each half.
func splitPointVector(points []curves.Point) ([]curves.Point, []curves.Point, error) {
if len(points) < 1 {
return nil, nil, errors.New("length of points must be at least one")
}
if len(points)&0x01 != 0 {
return nil, nil, errors.New("length of points must be even")
}
nPrime := len(points) >> 1
firstHalf := points[:nPrime]
secondHalf := points[nPrime:]
return firstHalf, secondHalf, nil
}
// splitScalarVector takes a vector of scalars, splits it in half returning each half.
func splitScalarVector(scalars []curves.Scalar) ([]curves.Scalar, []curves.Scalar, error) {
if len(scalars) < 1 {
return nil, nil, errors.New("length of scalars must be at least one")
}
if len(scalars)&0x01 != 0 {
return nil, nil, errors.New("length of scalars must be even")
}
nPrime := len(scalars) >> 1
firstHalf := scalars[:nPrime]
secondHalf := scalars[nPrime:]
return firstHalf, secondHalf, nil
}
// multiplyScalarToPointVector takes a single scalar and a list of points, multiplies each point by scalar.
func multiplyScalarToPointVector(x curves.Scalar, g []curves.Point) []curves.Point {
products := make([]curves.Point, len(g))
for i, gElem := range g {
product := gElem.Mul(x)
products[i] = product
}
return products
}
// multiplyScalarToScalarVector takes a single scalar (x) and a list of scalars (a), multiplies each scalar in the vector by the scalar.
func multiplyScalarToScalarVector(x curves.Scalar, a []curves.Scalar) []curves.Scalar {
products := make([]curves.Scalar, len(a))
for i, aElem := range a {
product := aElem.Mul(x)
products[i] = product
}
return products
}
// multiplyPairwisePointVectors takes two lists of points (g, h) and performs a pairwise multiplication returning a list of points.
func multiplyPairwisePointVectors(g, h []curves.Point) ([]curves.Point, error) {
if len(g) != len(h) {
return nil, errors.New("length of point vectors must be the same")
}
product := make([]curves.Point, len(g))
for i, gElem := range g {
product[i] = gElem.Add(h[i])
}
return product, nil
}
// multiplyPairwiseScalarVectors takes two lists of points (a, b) and performs a pairwise multiplication returning a list of scalars.
func multiplyPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of point vectors must be the same")
}
product := make([]curves.Scalar, len(a))
for i, aElem := range a {
product[i] = aElem.Mul(b[i])
}
return product, nil
}
// addPairwiseScalarVectors takes two lists of scalars (a, b) and performs a pairwise addition returning a list of scalars.
func addPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
sum := make([]curves.Scalar, len(a))
for i, aElem := range a {
sum[i] = aElem.Add(b[i])
}
return sum, nil
}
// subtractPairwiseScalarVectors takes two lists of scalars (a, b) and performs a pairwise subtraction returning a list of scalars.
func subtractPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
diff := make([]curves.Scalar, len(a))
for i, aElem := range a {
diff[i] = aElem.Sub(b[i])
}
return diff, nil
}
// invertScalars takes a list of scalars then returns a list with each element inverted.
func invertScalars(xs []curves.Scalar) ([]curves.Scalar, error) {
xinvs := make([]curves.Scalar, len(xs))
for i, x := range xs {
xinv, err := x.Invert()
if err != nil {
return nil, errors.Wrap(err, "bulletproof helpers invertx")
}
xinvs[i] = xinv
}
return xinvs, nil
}
// isPowerOfTwo returns whether a number i is a power of two or not.
func isPowerOfTwo(i int) bool {
return i&(i-1) == 0
}
// get2nVector returns a scalar vector 2^n such that [1, 2, 4, ... 2^(n-1)]
// See k^n and 2^n definitions on pg 12 of https://eprint.iacr.org/2017/1066.pdf
func get2nVector(length int, curve curves.Curve) []curves.Scalar {
vector2n := make([]curves.Scalar, length)
vector2n[0] = curve.Scalar.One()
for i := 1; i < length; i++ {
vector2n[i] = vector2n[i-1].Double()
}
return vector2n
}
func get1nVector(length int, curve curves.Curve) []curves.Scalar {
vector1n := make([]curves.Scalar, length)
for i := 0; i < length; i++ {
vector1n[i] = curve.Scalar.One()
}
return vector1n
}
func getknVector(k curves.Scalar, length int, curve curves.Curve) []curves.Scalar {
vectorkn := make([]curves.Scalar, length)
vectorkn[0] = curve.Scalar.One()
vectorkn[1] = k
for i := 2; i < length; i++ {
vectorkn[i] = vectorkn[i-1].Mul(k)
}
return vectorkn
}

View File

@ -0,0 +1,85 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestInnerProductHappyPath(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(3, *curve)
b := randScalarVec(3, *curve)
_, err := innerProduct(a, b)
require.NoError(t, err)
}
func TestInnerProductMismatchedLengths(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(3, *curve)
b := randScalarVec(4, *curve)
_, err := innerProduct(a, b)
require.Error(t, err)
}
func TestInnerProductEmptyVector(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(0, *curve)
b := randScalarVec(0, *curve)
_, err := innerProduct(a, b)
require.Error(t, err)
}
func TestInnerProductOut(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(2, *curve)
b := randScalarVec(2, *curve)
c, err := innerProduct(a, b)
require.NoError(t, err)
// Calculate manually a0*b0 + a1*b1
cPrime := a[0].Mul(b[0]).Add(a[1].Mul(b[1]))
require.Equal(t, c, cPrime)
}
func TestSplitListofPointsHappyPath(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(10, *curve)
firstHalf, secondHalf, err := splitPointVector(points)
require.NoError(t, err)
require.Len(t, firstHalf, 5)
require.Len(t, secondHalf, 5)
}
func TestSplitListofPointsOddLength(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(11, *curve)
_, _, err := splitPointVector(points)
require.Error(t, err)
}
func TestSplitListofPointsZeroLength(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(0, *curve)
_, _, err := splitPointVector(points)
require.Error(t, err)
}
func randScalarVec(length int, curve curves.Curve) []curves.Scalar {
out := make([]curves.Scalar, length)
for i := 0; i < length; i++ {
out[i] = curve.Scalar.Random(crand.Reader)
}
return out
}
func randPointVec(length int, curve curves.Curve) []curves.Point {
out := make([]curves.Point, length)
for i := 0; i < length; i++ {
out[i] = curve.Point.Random(crand.Reader)
}
return out
}

396
crypto/bulletproof/ipp_prover.go Executable file
View File

@ -0,0 +1,396 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package bulletproof implements the zero knowledge protocol bulletproofs as defined in https://eprint.iacr.org/2017/1066.pdf
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"github.com/onsonr/hway/crypto/core/curves"
)
// InnerProductProver is the struct used to create InnerProductProofs
// It specifies which curve to use and holds precomputed generators
// See NewInnerProductProver() for prover initialization.
type InnerProductProver struct {
curve curves.Curve
generators ippGenerators
}
// InnerProductProof contains necessary output for the inner product proof
// a and b are the final input vectors of scalars, they should be of length 1
// Ls and Rs are calculated per recursion of the IPP and are necessary for verification
// See section 3.1 on pg 15 of https://eprint.iacr.org/2017/1066.pdf
type InnerProductProof struct {
a, b curves.Scalar
capLs, capRs []curves.Point
curve *curves.Curve
}
// ippRecursion is the same as IPP but tracks recursive a', b', g', h' and Ls and Rs
// It should only be used internally by InnerProductProver.Prove()
// See L35 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
type ippRecursion struct {
a, b []curves.Scalar
c curves.Scalar
capLs, capRs []curves.Point
g, h []curves.Point
u, capP curves.Point
transcript *merlin.Transcript
}
// NewInnerProductProver initializes a new prover
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A prover can be used to construct inner product proofs for vectors of length less than or equal to maxVectorLength
// A prover is defined by an explicit curve.
func NewInnerProductProver(maxVectorLength int, domain []byte, curve curves.Curve) (*InnerProductProver, error) {
generators, err := getGeneratorPoints(maxVectorLength, domain, curve)
if err != nil {
return nil, errors.Wrap(err, "ipp getGenerators")
}
return &InnerProductProver{curve: curve, generators: *generators}, nil
}
// NewInnerProductProof initializes a new InnerProductProof for a specified curve
// This should be used in tandem with UnmarshalBinary() to convert a marshaled proof into the struct.
func NewInnerProductProof(curve *curves.Curve) *InnerProductProof {
var capLs, capRs []curves.Point
newProof := InnerProductProof{
a: curve.NewScalar(),
b: curve.NewScalar(),
capLs: capLs,
capRs: capRs,
curve: curve,
}
return &newProof
}
// rangeToIPP takes the output of a range proof and converts it into an inner product proof
// See section 4.2 on pg 20
// The conversion specifies generators to use (g and hPrime), as well as the two vectors l, r of which the inner product is tHat
// Additionally, note that the P used for the IPP is in fact P*h^-mu from the range proof.
func (prover *InnerProductProver) rangeToIPP(proofG, proofH []curves.Point, l, r []curves.Scalar, tHat curves.Scalar, capPhmuinv, u curves.Point, transcript *merlin.Transcript) (*InnerProductProof, error) {
// Note that P as a witness is only g^l * h^r
// P needs to be in the form of g^l * h^r * u^<l,r>
// Calculate the final P including the u^<l,r> term
utHat := u.Mul(tHat)
capP := capPhmuinv.Add(utHat)
// Use params to prove inner product
recursionParams := &ippRecursion{
a: l,
b: r,
capLs: []curves.Point{},
capRs: []curves.Point{},
c: tHat,
g: proofG,
h: proofH,
capP: capP,
u: u,
transcript: transcript,
}
return prover.proveRecursive(recursionParams)
}
// getP returns the initial P value given two scalars a,b and point u
// This method should only be used for testing
// See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) getP(a, b []curves.Scalar, u curves.Point) (curves.Point, error) {
// Vectors must have length power of two
if !isPowerOfTwo(len(a)) {
return nil, errors.New("ipp vector length must be power of two")
}
// Generator vectors must be same length
if len(prover.generators.G) != len(prover.generators.H) {
return nil, errors.New("ipp generator lengths of g and h must be equal")
}
// Inner product requires len(a) == len(b) else error is returned
c, err := innerProduct(a, b)
if err != nil {
return nil, errors.Wrap(err, "ipp getInnerProduct")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:len(a)]
proofH := prover.generators.H[0:len(b)]
// initial P = g^a * h^b * u^(a dot b) (See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf)
ga := prover.curve.NewGeneratorPoint().SumOfProducts(proofG, a)
hb := prover.curve.NewGeneratorPoint().SumOfProducts(proofH, b)
uadotb := u.Mul(c)
capP := ga.Add(hb).Add(uadotb)
return capP, nil
}
// Prove executes the prover protocol on pg 16 of https://eprint.iacr.org/2017/1066.pdf
// It generates an inner product proof for vectors a and b, using u to blind the inner product in P
// A transcript is used for the Fiat Shamir heuristic.
func (prover *InnerProductProver) Prove(a, b []curves.Scalar, u curves.Point, transcript *merlin.Transcript) (*InnerProductProof, error) {
// Vectors must have length power of two
if !isPowerOfTwo(len(a)) {
return nil, errors.New("ipp vector length must be power of two")
}
// Generator vectors must be same length
if len(prover.generators.G) != len(prover.generators.H) {
return nil, errors.New("ipp generator lengths of g and h must be equal")
}
// Inner product requires len(a) == len(b) else error is returned
c, err := innerProduct(a, b)
if err != nil {
return nil, errors.Wrap(err, "ipp getInnerProduct")
}
// Length of vectors must be less than the number of generators generated
if len(a) > len(prover.generators.G) {
return nil, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:len(a)]
proofH := prover.generators.H[0:len(b)]
// initial P = g^a * h^b * u^(a dot b) (See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf)
ga := prover.curve.NewGeneratorPoint().SumOfProducts(proofG, a)
hb := prover.curve.NewGeneratorPoint().SumOfProducts(proofH, b)
uadotb := u.Mul(c)
capP := ga.Add(hb).Add(uadotb)
recursionParams := &ippRecursion{
a: a,
b: b,
capLs: []curves.Point{},
capRs: []curves.Point{},
c: c,
g: proofG,
h: proofH,
capP: capP,
u: u,
transcript: transcript,
}
return prover.proveRecursive(recursionParams)
}
// proveRecursive executes the recursion on pg 16 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) proveRecursive(recursionParams *ippRecursion) (*InnerProductProof, error) {
// length checks
if len(recursionParams.a) != len(recursionParams.b) {
return nil, errors.New("ipp proveRecursive a and b different lengths")
}
if len(recursionParams.g) != len(recursionParams.h) {
return nil, errors.New("ipp proveRecursive g and h different lengths")
}
if len(recursionParams.a) != len(recursionParams.g) {
return nil, errors.New("ipp proveRecursive scalar and point vectors different lengths")
}
// Base case (L14, pg16 of https://eprint.iacr.org/2017/1066.pdf)
if len(recursionParams.a) == 1 {
proof := &InnerProductProof{
a: recursionParams.a[0],
b: recursionParams.b[0],
capLs: recursionParams.capLs,
capRs: recursionParams.capRs,
curve: &prover.curve,
}
return proof, nil
}
// Split current state into low (first half) vs high (second half) vectors
aLo, aHi, err := splitScalarVector(recursionParams.a)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitScalarVector")
}
bLo, bHi, err := splitScalarVector(recursionParams.b)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitScalarVector")
}
gLo, gHi, err := splitPointVector(recursionParams.g)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitPointVector")
}
hLo, hHi, err := splitPointVector(recursionParams.h)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitPointVector")
}
// c_l, c_r (L21,22, pg16 of https://eprint.iacr.org/2017/1066.pdf)
cL, err := innerProduct(aLo, bHi)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
cR, err := innerProduct(aHi, bLo)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
// L, R (L23,24, pg16 of https://eprint.iacr.org/2017/1066.pdf)
lga := prover.curve.Point.SumOfProducts(gHi, aLo)
lhb := prover.curve.Point.SumOfProducts(hLo, bHi)
ucL := recursionParams.u.Mul(cL)
capL := lga.Add(lhb).Add(ucL)
rga := prover.curve.Point.SumOfProducts(gLo, aHi)
rhb := prover.curve.Point.SumOfProducts(hHi, bLo)
ucR := recursionParams.u.Mul(cR)
capR := rga.Add(rhb).Add(ucR)
// Add L,R for verifier to use to calculate final g, h
newL := recursionParams.capLs
newL = append(newL, capL)
newR := recursionParams.capRs
newR = append(newR, capR)
// Get x from L, R for non-interactive (See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf)
// Note this replaces the interactive model, i.e. L36-28 of pg16 of https://eprint.iacr.org/2017/1066.pdf
x, err := prover.calcx(capL, capR, recursionParams.transcript)
if err != nil {
return nil, errors.Wrap(err, "recursionParams calcx")
}
// Calculate recursive inputs
xInv, err := x.Invert()
if err != nil {
return nil, errors.Wrap(err, "recursionParams x.Invert")
}
// g', h' (L29,30, pg16 of https://eprint.iacr.org/2017/1066.pdf)
gLoxInverse := multiplyScalarToPointVector(xInv, gLo)
gHix := multiplyScalarToPointVector(x, gHi)
gPrime, err := multiplyPairwisePointVectors(gLoxInverse, gHix)
if err != nil {
return nil, errors.Wrap(err, "recursionParams multiplyPairwisePointVectors")
}
hLox := multiplyScalarToPointVector(x, hLo)
hHixInv := multiplyScalarToPointVector(xInv, hHi)
hPrime, err := multiplyPairwisePointVectors(hLox, hHixInv)
if err != nil {
return nil, errors.Wrap(err, "recursionParams multiplyPairwisePointVectors")
}
// P' (L31, pg16 of https://eprint.iacr.org/2017/1066.pdf)
xSquare := x.Square()
xInvSquare := xInv.Square()
LxSquare := capL.Mul(xSquare)
RxInvSquare := capR.Mul(xInvSquare)
PPrime := LxSquare.Add(recursionParams.capP).Add(RxInvSquare)
// a', b' (L33, 34, pg16 of https://eprint.iacr.org/2017/1066.pdf)
aLox := multiplyScalarToScalarVector(x, aLo)
aHixIn := multiplyScalarToScalarVector(xInv, aHi)
aPrime, err := addPairwiseScalarVectors(aLox, aHixIn)
if err != nil {
return nil, errors.Wrap(err, "recursionParams addPairwiseScalarVectors")
}
bLoxInv := multiplyScalarToScalarVector(xInv, bLo)
bHix := multiplyScalarToScalarVector(x, bHi)
bPrime, err := addPairwiseScalarVectors(bLoxInv, bHix)
if err != nil {
return nil, errors.Wrap(err, "recursionParams addPairwiseScalarVectors")
}
// c'
cPrime, err := innerProduct(aPrime, bPrime)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
// Make recursive call (L35, pg16 of https://eprint.iacr.org/2017/1066.pdf)
recursiveIPP := &ippRecursion{
a: aPrime,
b: bPrime,
capLs: newL,
capRs: newR,
c: cPrime,
g: gPrime,
h: hPrime,
capP: PPrime,
u: recursionParams.u,
transcript: recursionParams.transcript,
}
out, err := prover.proveRecursive(recursiveIPP)
if err != nil {
return nil, errors.Wrap(err, "recursionParams proveRecursive")
}
return out, nil
}
// calcx uses a merlin transcript for Fiat Shamir
// For each recursion, it takes the current state of the transcript and appends the newly calculated L and R values
// A new scalar is then read from the transcript
// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) calcx(capL, capR curves.Point, transcript *merlin.Transcript) (curves.Scalar, error) {
// Add the newest capL and capR values to transcript
transcript.AppendMessage([]byte("addRecursiveL"), capL.ToAffineUncompressed())
transcript.AppendMessage([]byte("addRecursiveR"), capR.ToAffineUncompressed())
// Read 64 bytes from, set to scalar
outBytes := transcript.ExtractBytes([]byte("getx"), 64)
x, err := prover.curve.NewScalar().SetBytesWide(outBytes)
if err != nil {
return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide")
}
return x, nil
}
// MarshalBinary takes an inner product proof and marshals into bytes.
func (proof *InnerProductProof) MarshalBinary() []byte {
var out []byte
out = append(out, proof.a.Bytes()...)
out = append(out, proof.b.Bytes()...)
for i, capLElem := range proof.capLs {
capRElem := proof.capRs[i]
out = append(out, capLElem.ToAffineCompressed()...)
out = append(out, capRElem.ToAffineCompressed()...)
}
return out
}
// UnmarshalBinary takes bytes of a marshaled proof and writes them into an inner product proof
// The inner product proof used should be from the output of NewInnerProductProof().
func (proof *InnerProductProof) UnmarshalBinary(data []byte) error {
scalarLen := len(proof.curve.NewScalar().Bytes())
pointLen := len(proof.curve.NewGeneratorPoint().ToAffineCompressed())
ptr := 0
// Get scalars
a, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary SetBytes")
}
proof.a = a
ptr += scalarLen
b, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary SetBytes")
}
proof.b = b
ptr += scalarLen
// Get points
var capLs, capRs []curves.Point //nolint:prealloc // pointer arithmetic makes it too unreadable.
for ptr < len(data) {
capLElem, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary FromAffineCompressed")
}
capLs = append(capLs, capLElem)
ptr += pointLen
capRElem, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary FromAffineCompressed")
}
capRs = append(capRs, capRElem)
ptr += pointLen
}
proof.capLs = capLs
proof.capRs = capRs
return nil
}

View File

@ -0,0 +1,99 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestIPPHappyPath(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(8, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(a, b, u, transcript)
require.NoError(t, err)
require.Equal(t, 3, len(proof.capLs))
require.Equal(t, 3, len(proof.capRs))
}
func TestIPPMismatchedVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(4, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPNonPowerOfTwoLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(3, *curve)
b := randScalarVec(3, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPZeroLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(0, *curve)
b := randScalarVec(0, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPGreaterThanMaxLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(16, *curve)
b := randScalarVec(16, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPMarshal(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(8, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(a, b, u, transcript)
require.NoError(t, err)
proofMarshaled := proof.MarshalBinary()
proofPrime := NewInnerProductProof(curve)
err = proofPrime.UnmarshalBinary(proofMarshaled)
require.NoError(t, err)
require.Zero(t, proof.a.Cmp(proofPrime.a))
require.Zero(t, proof.b.Cmp(proofPrime.b))
for i, proofCapLElem := range proof.capLs {
proofPrimeCapLElem := proofPrime.capLs[i]
require.True(t, proofCapLElem.Equal(proofPrimeCapLElem))
proofCapRElem := proof.capRs[i]
proofPrimeCapRElem := proofPrime.capRs[i]
require.True(t, proofCapRElem.Equal(proofPrimeCapRElem))
}
}

View File

@ -0,0 +1,209 @@
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"github.com/onsonr/hway/crypto/core/curves"
)
// InnerProductVerifier is the struct used to verify inner product proofs
// It specifies which curve to use and holds precomputed generators
// See NewInnerProductProver() for prover initialization.
type InnerProductVerifier struct {
curve curves.Curve
generators ippGenerators
}
// NewInnerProductVerifier initializes a new verifier
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A verifier can be used to verify inner product proofs for vectors of length less than or equal to maxVectorLength
// A verifier is defined by an explicit curve.
func NewInnerProductVerifier(maxVectorLength int, domain []byte, curve curves.Curve) (*InnerProductVerifier, error) {
generators, err := getGeneratorPoints(maxVectorLength, domain, curve)
if err != nil {
return nil, errors.Wrap(err, "ipp getGenerators")
}
return &InnerProductVerifier{curve: curve, generators: *generators}, nil
}
// Verify verifies the given proof inputs
// It implements the final comparison of section 3.1 on pg17 of https://eprint.iacr.org/2017/1066.pdf
func (verifier *InnerProductVerifier) Verify(capP, u curves.Point, proof *InnerProductProof, transcript *merlin.Transcript) (bool, error) {
if len(proof.capLs) != len(proof.capRs) {
return false, errors.New("ipp capLs and capRs must be same length")
}
// Generator vectors must be same length
if len(verifier.generators.G) != len(verifier.generators.H) {
return false, errors.New("ipp generator lengths of g and h must be equal")
}
// Get generators for each elem in a, b and one more for u
// len(Ls) = log n, therefore can just exponentiate
n := 1 << len(proof.capLs)
// Length of vectors must be less than the number of generators generated
if n > len(verifier.generators.G) {
return false, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := verifier.generators.G[0:n]
proofH := verifier.generators.H[0:n]
xs, err := getxs(transcript, proof.capLs, proof.capRs, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "verifier getxs")
}
s, err := verifier.getsNew(xs, n)
if err != nil {
return false, errors.Wrap(err, "verifier getss")
}
lhs, err := verifier.getLHS(u, proof, proofG, proofH, s)
if err != nil {
return false, errors.Wrap(err, "verify getLHS")
}
rhs, err := verifier.getRHS(capP, proof, xs)
if err != nil {
return false, errors.Wrap(err, "verify getRHS")
}
return lhs.Equal(rhs), nil
}
// Verify verifies the given proof inputs
// It implements the final comparison of section 3.1 on pg17 of https://eprint.iacr.org/2017/1066.pdf
func (verifier *InnerProductVerifier) VerifyFromRangeProof(proofG, proofH []curves.Point, capPhmuinv, u curves.Point, tHat curves.Scalar, proof *InnerProductProof, transcript *merlin.Transcript) (bool, error) {
// Get generators for each elem in a, b and one more for u
// len(Ls) = log n, therefore can just exponentiate
n := 1 << len(proof.capLs)
xs, err := getxs(transcript, proof.capLs, proof.capRs, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "verifier getxs")
}
s, err := verifier.gets(xs, n)
if err != nil {
return false, errors.Wrap(err, "verifier getss")
}
lhs, err := verifier.getLHS(u, proof, proofG, proofH, s)
if err != nil {
return false, errors.Wrap(err, "verify getLHS")
}
utHat := u.Mul(tHat)
capP := capPhmuinv.Add(utHat)
rhs, err := verifier.getRHS(capP, proof, xs)
if err != nil {
return false, errors.Wrap(err, "verify getRHS")
}
return lhs.Equal(rhs), nil
}
// getRHS gets the right hand side of the final comparison of section 3.1 on pg17.
func (*InnerProductVerifier) getRHS(capP curves.Point, proof *InnerProductProof, xs []curves.Scalar) (curves.Point, error) {
product := capP
for j, Lj := range proof.capLs {
Rj := proof.capRs[j]
xj := xs[j]
xjSquare := xj.Square()
xjSquareInv, err := xjSquare.Invert()
if err != nil {
return nil, errors.Wrap(err, "verify invert")
}
LjxjSquare := Lj.Mul(xjSquare)
RjxjSquareInv := Rj.Mul(xjSquareInv)
product = product.Add(LjxjSquare).Add(RjxjSquareInv)
}
return product, nil
}
// getLHS gets the left hand side of the final comparison of section 3.1 on pg17.
func (verifier *InnerProductVerifier) getLHS(u curves.Point, proof *InnerProductProof, g, h []curves.Point, s []curves.Scalar) (curves.Point, error) {
sInv, err := invertScalars(s)
if err != nil {
return nil, errors.Wrap(err, "verify invertScalars")
}
// g^(a*s)
as := multiplyScalarToScalarVector(proof.a, s)
gas := verifier.curve.Point.SumOfProducts(g, as)
// h^(b*s^-1)
bsInv := multiplyScalarToScalarVector(proof.b, sInv)
hbsInv := verifier.curve.Point.SumOfProducts(h, bsInv)
// u^a*b
ab := proof.a.Mul(proof.b)
uab := u.Mul(ab)
// g^(a*s) * h^(b*s^-1) * u^a*b
out := gas.Add(hbsInv).Add(uab)
return out, nil
}
// getxs calculates the x values from Ls and Rs
// Note that each x is read from the transcript, then the L and R at a certain index are written to the transcript
// This mirrors the reading of xs and writing of Ls and Rs in the prover.
func getxs(transcript *merlin.Transcript, capLs, capRs []curves.Point, curve curves.Curve) ([]curves.Scalar, error) {
xs := make([]curves.Scalar, len(capLs))
for i, capLi := range capLs {
capRi := capRs[i]
// Add the newest L and R values to transcript
transcript.AppendMessage([]byte("addRecursiveL"), capLi.ToAffineUncompressed())
transcript.AppendMessage([]byte("addRecursiveR"), capRi.ToAffineUncompressed())
// Read 64 bytes from, set to scalar
outBytes := transcript.ExtractBytes([]byte("getx"), 64)
x, err := curve.NewScalar().SetBytesWide(outBytes)
if err != nil {
return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide")
}
xs[i] = x
}
return xs, nil
}
// gets calculates the vector s of values used for verification
// See the second expression of section 3.1 on pg15
// nolint
func (verifier *InnerProductVerifier) gets(xs []curves.Scalar, n int) ([]curves.Scalar, error) {
ss := make([]curves.Scalar, n)
for i := 0; i < n; i++ {
si := verifier.curve.Scalar.One()
for j, xj := range xs {
if i>>(len(xs)-j-1)&0x01 == 1 {
si = si.Mul(xj)
} else {
xjInverse, err := xj.Invert()
if err != nil {
return nil, errors.Wrap(err, "getss invert")
}
si = si.Mul(xjInverse)
}
}
ss[i] = si
}
return ss, nil
}
// getsNew calculates the vector s of values used for verification
// It provides analogous functionality as gets(), but uses a O(n) algorithm vs O(nlogn)
// The algorithm inverts all xs, then begins multiplying the inversion by the square of x elements to
// calculate all s values thus minimizing necessary inversions/ computation.
func (verifier *InnerProductVerifier) getsNew(xs []curves.Scalar, n int) ([]curves.Scalar, error) {
var err error
ss := make([]curves.Scalar, n)
// First element is all xs inverted mul'd
ss[0] = verifier.curve.Scalar.One()
for _, xj := range xs {
ss[0] = ss[0].Mul(xj)
}
ss[0], err = ss[0].Invert()
if err != nil {
return nil, errors.Wrap(err, "ipp gets inv ss0")
}
for j, xj := range xs {
xjSquared := xj.Square()
for i := 0; i < n; i += 1 << (len(xs) - j) {
ss[i+1<<(len(xs)-j-1)] = ss[i].Mul(xjSquared)
}
}
return ss, nil
}

View File

@ -0,0 +1,79 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestIPPVerifyHappyPath(t *testing.T) {
curve := curves.ED25519()
vecLength := 256
prover, err := NewInnerProductProver(vecLength, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(vecLength, *curve)
b := randScalarVec(vecLength, *curve)
u := curve.Point.Random(crand.Reader)
transcriptProver := merlin.NewTranscript("test")
proof, err := prover.Prove(a, b, u, transcriptProver)
require.NoError(t, err)
verifier, err := NewInnerProductVerifier(vecLength, []byte("test"), *curve)
require.NoError(t, err)
capP, err := prover.getP(a, b, u)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
verified, err := verifier.Verify(capP, u, proof, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}
func BenchmarkIPPVerification(bench *testing.B) {
curve := curves.ED25519()
vecLength := 1024
prover, _ := NewInnerProductProver(vecLength, []byte("test"), *curve)
a := randScalarVec(vecLength, *curve)
b := randScalarVec(vecLength, *curve)
u := curve.Point.Random(crand.Reader)
transcriptProver := merlin.NewTranscript("test")
proof, _ := prover.Prove(a, b, u, transcriptProver)
verifier, _ := NewInnerProductVerifier(vecLength, []byte("test"), *curve)
capP, _ := prover.getP(a, b, u)
transcriptVerifier := merlin.NewTranscript("test")
verified, _ := verifier.Verify(capP, u, proof, transcriptVerifier)
require.True(bench, verified)
}
func TestIPPVerifyInvalidProof(t *testing.T) {
curve := curves.ED25519()
vecLength := 64
prover, err := NewInnerProductProver(vecLength, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(vecLength, *curve)
b := randScalarVec(vecLength, *curve)
u := curve.Point.Random(crand.Reader)
aPrime := randScalarVec(64, *curve)
bPrime := randScalarVec(64, *curve)
uPrime := curve.Point.Random(crand.Reader)
transcriptProver := merlin.NewTranscript("test")
proofPrime, err := prover.Prove(aPrime, bPrime, uPrime, transcriptProver)
require.NoError(t, err)
verifier, err := NewInnerProductVerifier(vecLength, []byte("test"), *curve)
require.NoError(t, err)
capP, err := prover.getP(a, b, u)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
// Check for different capP, u from proof
verified, err := verifier.Verify(capP, u, proofPrime, transcriptVerifier)
require.NoError(t, err)
require.False(t, verified)
}

View File

@ -0,0 +1,348 @@
package bulletproof
import (
crand "crypto/rand"
"github.com/gtank/merlin"
"github.com/pkg/errors"
"github.com/onsonr/hway/crypto/core/curves"
)
// BatchProve proves that a list of scalars v are in the range n.
// It implements the aggregating logarithmic proofs defined on pg21.
// Instead of taking a single value and a single blinding factor, BatchProve takes in a list of values and list of
// blinding factors.
func (prover *RangeProver) BatchProve(v, gamma []curves.Scalar, n int, proofGenerators RangeProofGenerators, transcript *merlin.Transcript) (*RangeProof, error) {
// Define nm as the total bits required for secrets, calculated as number of secrets * n
m := len(v)
nm := n * m
// nm must be less than or equal to the number of generators generated
if nm > len(prover.generators.G) {
return nil, errors.New("ipp vector length must be less than or equal to maxVectorLength")
}
// In case where nm is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:nm]
proofH := prover.generators.H[0:nm]
// Check that each elem in v is in range [0, 2^n]
for _, vi := range v {
checkedRange := checkRange(vi, n)
if checkedRange != nil {
return nil, checkedRange
}
}
// L40 on pg19
aL, err := getaLBatched(v, n, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
onenm := get1nVector(nm, prover.curve)
// L41 on pg19
aR, err := subtractPairwiseScalarVectors(aL, onenm)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
alpha := prover.curve.Scalar.Random(crand.Reader)
// Calc A (L44, pg19)
halpha := proofGenerators.h.Mul(alpha)
gaL := prover.curve.Point.SumOfProducts(proofG, aL)
haR := prover.curve.Point.SumOfProducts(proofH, aR)
capA := halpha.Add(gaL).Add(haR)
// L45, 46, pg19
sL := getBlindingVector(nm, prover.curve)
sR := getBlindingVector(nm, prover.curve)
rho := prover.curve.Scalar.Random(crand.Reader)
// Calc S (L47, pg19)
hrho := proofGenerators.h.Mul(rho)
gsL := prover.curve.Point.SumOfProducts(proofG, sL)
hsR := prover.curve.Point.SumOfProducts(proofH, sR)
capS := hrho.Add(gsL).Add(hsR)
// Fiat Shamir for y,z (L49, pg19)
capV := getcapVBatched(v, gamma, proofGenerators.g, proofGenerators.h)
y, z, err := calcyzBatched(capV, capA, capS, transcript, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc t_1, t_2
// See the l(X), r(X), equations on pg 21
// Use l(X)'s and r(X)'s constant and linear terms to derive t_1 and t_2
// (a_l - z*1^n)
zonenm := multiplyScalarToScalarVector(z, onenm)
constantTerml, err := subtractPairwiseScalarVectors(aL, zonenm)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
linearTerml := sL
// zSum term, see equation 71 on pg21
zSum := getSumTermrXBatched(z, n, len(v), prover.curve)
// a_r + z*1^nm
aRPluszonenm, err := addPairwiseScalarVectors(aR, zonenm)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
ynm := getknVector(y, nm, prover.curve)
hadamard, err := multiplyPairwiseScalarVectors(ynm, aRPluszonenm)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
constantTermr, err := addPairwiseScalarVectors(hadamard, zSum)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
linearTermr, err := multiplyPairwiseScalarVectors(ynm, sR)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// t_1 (as the linear coefficient) is the sum of the dot products of l(X)'s linear term dot r(X)'s constant term
// and r(X)'s linear term dot l(X)'s constant term
t1FirstTerm, err := innerProduct(linearTerml, constantTermr)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t1SecondTerm, err := innerProduct(linearTermr, constantTerml)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t1 := t1FirstTerm.Add(t1SecondTerm)
// t_2 (as the quadratic coefficient) is the dot product of l(X)'s and r(X)'s linear terms
t2, err := innerProduct(linearTerml, linearTermr)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// L52, pg20
tau1 := prover.curve.Scalar.Random(crand.Reader)
tau2 := prover.curve.Scalar.Random(crand.Reader)
// T_1, T_2 (L53, pg20)
capT1 := proofGenerators.g.Mul(t1).Add(proofGenerators.h.Mul(tau1))
capT2 := proofGenerators.g.Mul(t2).Add(proofGenerators.h.Mul(tau2))
// Fiat shamir for x (L55, pg20)
x, err := calcx(capT1, capT2, transcript, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc l
// Instead of using the expression in the line, evaluate l() at x
sLx := multiplyScalarToScalarVector(x, linearTerml)
l, err := addPairwiseScalarVectors(constantTerml, sLx)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc r
// Instead of using the expression in the line, evaluate r() at x
ynsRx := multiplyScalarToScalarVector(x, linearTermr)
r, err := addPairwiseScalarVectors(constantTermr, ynsRx)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc t hat
// For efficiency, instead of calculating the dot product, evaluate t() at x
zm := getknVector(z, m, prover.curve)
zsquarezm := multiplyScalarToScalarVector(z.Square(), zm)
sumv := prover.curve.Scalar.Zero()
for i := 0; i < m; i++ {
elem := zsquarezm[i].Mul(v[i])
sumv = sumv.Add(elem)
}
deltayzBatched, err := deltayzBatched(y, z, n, m, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t0 := sumv.Add(deltayzBatched)
tLinear := t1.Mul(x)
tQuadratic := t2.Mul(x.Square())
tHat := t0.Add(tLinear).Add(tQuadratic)
// Calc tau_x (L61, pg20)
tau2xsquare := tau2.Mul(x.Square())
tau1x := tau1.Mul(x)
zsum := prover.curve.Scalar.Zero()
zExp := z.Clone()
for j := 1; j < m+1; j++ {
zExp = zExp.Mul(z)
zsum = zsum.Add(zExp.Mul(gamma[j-1]))
}
taux := tau2xsquare.Add(tau1x).Add(zsum)
// Calc mu (L62, pg20)
mu := alpha.Add(rho.Mul(x))
// Calc IPP (See section 4.2)
hPrime, err := gethPrime(proofH, y, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// P is redefined in batched case, see bottom equation on pg21.
capPhmu := getPhmuBatched(proofG, hPrime, proofGenerators.h, capA, capS, x, y, z, mu, n, m, prover.curve)
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := prover.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
ipp, err := prover.ippProver.rangeToIPP(proofG, hPrime, l, r, tHat, capPhmu, proofGenerators.u.Mul(w), transcript)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
out := &RangeProof{
capA: capA,
capS: capS,
capT1: capT1,
capT2: capT2,
taux: taux,
mu: mu,
tHat: tHat,
ipp: ipp,
curve: &prover.curve,
}
return out, nil
}
// See final term of L71 on pg 21
// Sigma_{j=1}^{m} z^{1+j} * (0^{(j-1)*n} || 2^{n} || 0^{(m-j)*n}).
func getSumTermrXBatched(z curves.Scalar, n, m int, curve curves.Curve) []curves.Scalar {
twoN := get2nVector(n, curve)
var out []curves.Scalar
// The final power should be one more than m
zExp := z.Clone()
for j := 0; j < m; j++ {
zExp = zExp.Mul(z)
elem := multiplyScalarToScalarVector(zExp, twoN)
out = append(out, elem...)
}
return out
}
func getcapVBatched(v, gamma []curves.Scalar, g, h curves.Point) []curves.Point {
out := make([]curves.Point, len(v))
for i, vi := range v {
out[i] = getcapV(vi, gamma[i], g, h)
}
return out
}
func getaLBatched(v []curves.Scalar, n int, curve curves.Curve) ([]curves.Scalar, error) {
var aL []curves.Scalar
for _, vi := range v {
aLi, err := getaL(vi, n, curve)
if err != nil {
return nil, err
}
aL = append(aL, aLi...)
}
return aL, nil
}
func calcyzBatched(capV []curves.Point, capA, capS curves.Point, transcript *merlin.Transcript, curve curves.Curve) (curves.Scalar, curves.Scalar, error) {
// Add the A,S values to transcript
for _, capVi := range capV {
transcript.AppendMessage([]byte("addV"), capVi.ToAffineUncompressed())
}
transcript.AppendMessage([]byte("addcapA"), capA.ToAffineUncompressed())
transcript.AppendMessage([]byte("addcapS"), capS.ToAffineUncompressed())
// Read 64 bytes twice from, set to scalar for y and z
yBytes := transcript.ExtractBytes([]byte("gety"), 64)
y, err := curve.NewScalar().SetBytesWide(yBytes)
if err != nil {
return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide")
}
zBytes := transcript.ExtractBytes([]byte("getz"), 64)
z, err := curve.NewScalar().SetBytesWide(zBytes)
if err != nil {
return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide")
}
return y, z, nil
}
func deltayzBatched(y, z curves.Scalar, n, m int, curve curves.Curve) (curves.Scalar, error) {
// z - z^2
zMinuszsquare := z.Sub(z.Square())
// 1^(n*m)
onenm := get1nVector(n*m, curve)
// <1^nm, y^nm>
onenmdotynm, err := innerProduct(onenm, getknVector(y, n*m, curve))
if err != nil {
return nil, errors.Wrap(err, "deltayz")
}
// (z - z^2)*<1^n, y^n>
termFirst := zMinuszsquare.Mul(onenmdotynm)
// <1^n, 2^n>
onendottwon, err := innerProduct(get1nVector(n, curve), get2nVector(n, curve))
if err != nil {
return nil, errors.Wrap(err, "deltayz")
}
termSecond := curve.Scalar.Zero()
zExp := z.Square()
for j := 1; j < m+1; j++ {
zExp = zExp.Mul(z)
elem := zExp.Mul(onendottwon)
termSecond = termSecond.Add(elem)
}
// (z - z^2)*<1^n, y^n> - z^3*<1^n, 2^n>
out := termFirst.Sub(termSecond)
return out, nil
}
// Bottom equation on pg21.
func getPhmuBatched(proofG, proofHPrime []curves.Point, h, capA, capS curves.Point, x, y, z, mu curves.Scalar, n, m int, curve curves.Curve) curves.Point {
twoN := get2nVector(n, curve)
// h'^(z*y^n + z^2*2^n)
lastElem := curve.NewIdentityPoint()
zExp := z.Clone()
for j := 1; j < m+1; j++ {
// Get subvector of h
hSubvector := proofHPrime[(j-1)*n : j*n]
// z^(j+1)
zExp = zExp.Mul(z)
exp := multiplyScalarToScalarVector(zExp, twoN)
// Final elem
elem := curve.Point.SumOfProducts(hSubvector, exp)
lastElem = lastElem.Add(elem)
}
zynm := multiplyScalarToScalarVector(z, getknVector(y, n*m, curve))
hPrimezynm := curve.Point.SumOfProducts(proofHPrime, zynm)
lastElem = lastElem.Add(hPrimezynm)
// S^x
capSx := capS.Mul(x)
// g^-z --> -z*<1,g>
onenm := get1nVector(n*m, curve)
zNeg := z.Neg()
zinvonen := multiplyScalarToScalarVector(zNeg, onenm)
zgdotonen := curve.Point.SumOfProducts(proofG, zinvonen)
// L66 on pg20
P := capA.Add(capSx).Add(zgdotonen).Add(lastElem)
hmu := h.Mul(mu)
Phmu := P.Sub(hmu)
return Phmu
}

View File

@ -0,0 +1,102 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestRangeBatchProverHappyPath(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.Random(crand.Reader)
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
require.NotNil(t, proof)
require.Equal(t, 10, len(proof.ipp.capLs))
require.Equal(t, 10, len(proof.ipp.capRs))
}
func TestGetaLBatched(t *testing.T) {
curve := curves.ED25519()
v1 := curve.Scalar.Random(crand.Reader)
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
aL, err := getaLBatched(v, 256, *curve)
require.NoError(t, err)
twoN := get2nVector(256, *curve)
for i := 1; i < len(v)+1; i++ {
vec := aL[(i-1)*256 : i*256]
product, err := innerProduct(vec, twoN)
require.NoError(t, err)
require.Zero(t, product.Cmp(v[i-1]))
}
}
func TestRangeBatchProverMarshal(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.Random(crand.Reader)
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
proofMarshaled := proof.MarshalBinary()
proofPrime := NewRangeProof(curve)
err = proofPrime.UnmarshalBinary(proofMarshaled)
require.NoError(t, err)
require.True(t, proof.capA.Equal(proofPrime.capA))
require.True(t, proof.capS.Equal(proofPrime.capS))
require.True(t, proof.capT1.Equal(proofPrime.capT1))
require.True(t, proof.capT2.Equal(proofPrime.capT2))
require.Zero(t, proof.taux.Cmp(proofPrime.taux))
require.Zero(t, proof.mu.Cmp(proofPrime.mu))
require.Zero(t, proof.tHat.Cmp(proofPrime.tHat))
}

View File

@ -0,0 +1,91 @@
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"github.com/onsonr/hway/crypto/core/curves"
)
// VerifyBatched verifies a given batched range proof.
// It takes in a list of commitments to the secret values as capV instead of a single commitment to a single point
// when compared to the unbatched single range proof case.
func (verifier *RangeVerifier) VerifyBatched(proof *RangeProof, capV []curves.Point, proofGenerators RangeProofGenerators, n int, transcript *merlin.Transcript) (bool, error) {
// Define nm as the total bits required for secrets, calculated as number of secrets * n
m := len(capV)
nm := n * m
// nm must be less than the number of generators generated
if nm > len(verifier.generators.G) {
return false, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := verifier.generators.G[0:nm]
proofH := verifier.generators.H[0:nm]
// Calc y,z,x from Fiat Shamir heuristic
y, z, err := calcyzBatched(capV, proof.capA, proof.capS, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
x, err := calcx(proof.capT1, proof.capT2, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := verifier.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return false, errors.Wrap(err, "rangeproof prove")
}
// Calc delta(y,z), redefined for batched case on pg21
deltayzBatched, err := deltayzBatched(y, z, n, m, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
// Check tHat: L65, pg20
// See equation 72 on pg21
tHatIsValid := verifier.checktHatBatched(proof, capV, proofGenerators.g, proofGenerators.h, deltayzBatched, x, z, m)
if !tHatIsValid {
return false, errors.New("rangeproof verify tHat is invalid")
}
// Verify IPP
hPrime, err := gethPrime(proofH, y, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
capPhmu := getPhmuBatched(proofG, hPrime, proofGenerators.h, proof.capA, proof.capS, x, y, z, proof.mu, n, m, verifier.curve)
ippVerified, err := verifier.ippVerifier.VerifyFromRangeProof(proofG, hPrime, capPhmu, proofGenerators.u.Mul(w), proof.tHat, proof.ipp, transcript)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
return ippVerified, nil
}
// L65, pg20.
func (verifier *RangeVerifier) checktHatBatched(proof *RangeProof, capV []curves.Point, g, h curves.Point, deltayz, x, z curves.Scalar, m int) bool {
// g^tHat * h^tau_x
gtHat := g.Mul(proof.tHat)
htaux := h.Mul(proof.taux)
lhs := gtHat.Add(htaux)
// V^z^2 * g^delta(y,z) * Tau_1^x * Tau_2^x^2
// g^delta(y,z) * V^(z^2*z^m) * Tau_1^x * Tau_2^x^2
zm := getknVector(z, m, verifier.curve)
zsquarezm := multiplyScalarToScalarVector(z.Square(), zm)
capVzsquaretwom := verifier.curve.Point.SumOfProducts(capV, zsquarezm)
gdeltayz := g.Mul(deltayz)
capTau1x := proof.capT1.Mul(x)
capTau2xsquare := proof.capT2.Mul(x.Square())
rhs := capVzsquaretwom.Add(gdeltayz).Add(capTau1x).Add(capTau2xsquare)
// Compare lhs =? rhs
return lhs.Equal(rhs)
}

View File

@ -0,0 +1,148 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestRangeBatchVerifyHappyPath(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.Random(crand.Reader)
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapVBatched(v, gamma, g, h)
verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}
func TestRangeBatchVerifyNotInRange(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.One()
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
_, err = prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.Error(t, err)
}
func TestRangeBatchVerifyNonRandom(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.One()
v2 := curve.Scalar.One()
v3 := curve.Scalar.One()
v4 := curve.Scalar.One()
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapVBatched(v, gamma, g, h)
verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}
func TestRangeBatchVerifyInvalid(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.One()
v2 := curve.Scalar.One()
v3 := curve.Scalar.One()
v4 := curve.Scalar.One()
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapVBatched(v, gamma, g, h)
capV[0] = curve.Point.Random(crand.Reader)
verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier)
require.Error(t, err)
require.False(t, verified)
}

View File

@ -0,0 +1,476 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package bulletproof implements the zero knowledge protocol bulletproofs as defined in https://eprint.iacr.org/2017/1066.pdf
package bulletproof
import (
crand "crypto/rand"
"math/big"
"github.com/gtank/merlin"
"github.com/pkg/errors"
"github.com/onsonr/hway/crypto/core/curves"
)
// RangeProver is the struct used to create RangeProofs
// It specifies which curve to use and holds precomputed generators
// See NewRangeProver() for prover initialization.
type RangeProver struct {
curve curves.Curve
generators *ippGenerators
ippProver *InnerProductProver
}
// RangeProof is the struct used to hold a range proof
// capA is a commitment to a_L and a_R using randomness alpha
// capS is a commitment to s_L and s_R using randomness rho
// capTau1,2 are commitments to t1,t2 respectively using randomness tau_1,2
// tHat represents t(X) as defined on page 19
// taux is the blinding factor for tHat
// ipp is the inner product proof used for compacting the transfer of l,r (See 4.2 on pg20).
type RangeProof struct {
capA, capS, capT1, capT2 curves.Point
taux, mu, tHat curves.Scalar
ipp *InnerProductProof
curve *curves.Curve
}
type RangeProofGenerators struct {
g, h, u curves.Point
}
// NewRangeProver initializes a new prover
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A prover can be used to construct range proofs for vectors of length less than or equal to maxVectorLength
// A prover is defined by an explicit curve.
func NewRangeProver(maxVectorLength int, rangeDomain, ippDomain []byte, curve curves.Curve) (*RangeProver, error) {
generators, err := getGeneratorPoints(maxVectorLength, rangeDomain, curve)
if err != nil {
return nil, errors.Wrap(err, "range NewRangeProver")
}
ippProver, err := NewInnerProductProver(maxVectorLength, ippDomain, curve)
if err != nil {
return nil, errors.Wrap(err, "range NewRangeProver")
}
return &RangeProver{curve: curve, generators: generators, ippProver: ippProver}, nil
}
// NewRangeProof initializes a new RangeProof for a specified curve
// This should be used in tandem with UnmarshalBinary() to convert a marshaled proof into the struct.
func NewRangeProof(curve *curves.Curve) *RangeProof {
out := RangeProof{
capA: nil,
capS: nil,
capT1: nil,
capT2: nil,
taux: nil,
mu: nil,
tHat: nil,
ipp: NewInnerProductProof(curve),
curve: curve,
}
return &out
}
// Prove uses the range prover to prove that some value v is within the range [0, 2^n]
// It implements the protocol defined on pgs 19,20 in https://eprint.iacr.org/2017/1066.pdf
// v is the value of which to prove the range
// n is the power that specifies the upper bound of the range, ie. 2^n
// gamma is a scalar used for as a blinding factor
// g, h, u are unique points used as generators for the blinding factor
// transcript is a merlin transcript to be used for the fiat shamir heuristic.
func (prover *RangeProver) Prove(v, gamma curves.Scalar, n int, proofGenerators RangeProofGenerators, transcript *merlin.Transcript) (*RangeProof, error) {
// n must be less than or equal to the number of generators generated
if n > len(prover.generators.G) {
return nil, errors.New("ipp vector length must be less than or equal to maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:n]
proofH := prover.generators.H[0:n]
// Check that v is in range [0, 2^n]
if bigZero := big.NewInt(0); v.BigInt().Cmp(bigZero) == -1 {
return nil, errors.New("v is less than 0")
}
bigTwo := big.NewInt(2)
if n < 0 {
return nil, errors.New("n cannot be less than 0")
}
bigN := big.NewInt(int64(n))
var bigTwoToN big.Int
bigTwoToN.Exp(bigTwo, bigN, nil)
if v.BigInt().Cmp(&bigTwoToN) == 1 {
return nil, errors.New("v is greater than 2^n")
}
// L40 on pg19
aL, err := getaL(v, n, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
onen := get1nVector(n, prover.curve)
// L41 on pg19
aR, err := subtractPairwiseScalarVectors(aL, onen)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
alpha := prover.curve.Scalar.Random(crand.Reader)
// Calc A (L44, pg19)
halpha := proofGenerators.h.Mul(alpha)
gaL := prover.curve.Point.SumOfProducts(proofG, aL)
haR := prover.curve.Point.SumOfProducts(proofH, aR)
capA := halpha.Add(gaL).Add(haR)
// L45, 46, pg19
sL := getBlindingVector(n, prover.curve)
sR := getBlindingVector(n, prover.curve)
rho := prover.curve.Scalar.Random(crand.Reader)
// Calc S (L47, pg19)
hrho := proofGenerators.h.Mul(rho)
gsL := prover.curve.Point.SumOfProducts(proofG, sL)
hsR := prover.curve.Point.SumOfProducts(proofH, sR)
capS := hrho.Add(gsL).Add(hsR)
// Fiat Shamir for y,z (L49, pg19)
capV := getcapV(v, gamma, proofGenerators.g, proofGenerators.h)
y, z, err := calcyz(capV, capA, capS, transcript, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc t_1, t_2
// See the l(X), r(X), t(X) equations on pg 19
// Use l(X)'s and r(X)'s constant and linear terms to derive t_1 and t_2
// (a_l - z*1^n)
zonen := multiplyScalarToScalarVector(z, onen)
constantTerml, err := subtractPairwiseScalarVectors(aL, zonen)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
linearTerml := sL
// z^2 * 2^N
twoN := get2nVector(n, prover.curve)
zSquareTwon := multiplyScalarToScalarVector(z.Square(), twoN)
// a_r + z*1^n
aRPluszonen, err := addPairwiseScalarVectors(aR, zonen)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
yn := getknVector(y, n, prover.curve)
hadamard, err := multiplyPairwiseScalarVectors(yn, aRPluszonen)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
constantTermr, err := addPairwiseScalarVectors(hadamard, zSquareTwon)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
linearTermr, err := multiplyPairwiseScalarVectors(yn, sR)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// t_1 (as the linear coefficient) is the sum of the dot products of l(X)'s linear term dot r(X)'s constant term
// and r(X)'s linear term dot l(X)'s constant term
t1FirstTerm, err := innerProduct(linearTerml, constantTermr)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t1SecondTerm, err := innerProduct(linearTermr, constantTerml)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t1 := t1FirstTerm.Add(t1SecondTerm)
// t_2 (as the quadratic coefficient) is the dot product of l(X)'s and r(X)'s linear terms
t2, err := innerProduct(linearTerml, linearTermr)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// L52, pg20
tau1 := prover.curve.Scalar.Random(crand.Reader)
tau2 := prover.curve.Scalar.Random(crand.Reader)
// T_1, T_2 (L53, pg20)
capT1 := proofGenerators.g.Mul(t1).Add(proofGenerators.h.Mul(tau1))
capT2 := proofGenerators.g.Mul(t2).Add(proofGenerators.h.Mul(tau2))
// Fiat shamir for x (L55, pg20)
x, err := calcx(capT1, capT2, transcript, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc l (L58, pg20)
// Instead of using the expression in the line, evaluate l() at x
sLx := multiplyScalarToScalarVector(x, linearTerml)
l, err := addPairwiseScalarVectors(constantTerml, sLx)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc r (L59, pg20)
// Instead of using the expression in the line, evaluate r() at x
ynsRx := multiplyScalarToScalarVector(x, linearTermr)
r, err := addPairwiseScalarVectors(constantTermr, ynsRx)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc t hat (L60, pg20)
// For efficiency, instead of calculating the dot product, evaluate t() at x
deltayz, err := deltayz(y, z, n, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t0 := v.Mul(z.Square()).Add(deltayz)
tLinear := t1.Mul(x)
tQuadratic := t2.Mul(x.Square())
tHat := t0.Add(tLinear).Add(tQuadratic)
// Calc tau_x (L61, pg20)
tau2xsquare := tau2.Mul(x.Square())
tau1x := tau1.Mul(x)
zsquaregamma := z.Square().Mul(gamma)
taux := tau2xsquare.Add(tau1x).Add(zsquaregamma)
// Calc mu (L62, pg20)
mu := alpha.Add(rho.Mul(x))
// Calc IPP (See section 4.2)
hPrime, err := gethPrime(proofH, y, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
capPhmu, err := getPhmu(proofG, hPrime, proofGenerators.h, capA, capS, x, y, z, mu, n, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := prover.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
ipp, err := prover.ippProver.rangeToIPP(proofG, hPrime, l, r, tHat, capPhmu, proofGenerators.u.Mul(w), transcript)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
out := &RangeProof{
capA: capA,
capS: capS,
capT1: capT1,
capT2: capT2,
taux: taux,
mu: mu,
tHat: tHat,
ipp: ipp,
curve: &prover.curve,
}
return out, nil
}
// MarshalBinary takes a range proof and marshals into bytes.
func (proof *RangeProof) MarshalBinary() []byte {
var out []byte
out = append(out, proof.capA.ToAffineCompressed()...)
out = append(out, proof.capS.ToAffineCompressed()...)
out = append(out, proof.capT1.ToAffineCompressed()...)
out = append(out, proof.capT2.ToAffineCompressed()...)
out = append(out, proof.taux.Bytes()...)
out = append(out, proof.mu.Bytes()...)
out = append(out, proof.tHat.Bytes()...)
out = append(out, proof.ipp.MarshalBinary()...)
return out
}
// UnmarshalBinary takes bytes of a marshaled proof and writes them into a range proof
// The range proof used should be from the output of NewRangeProof().
func (proof *RangeProof) UnmarshalBinary(data []byte) error {
scalarLen := len(proof.curve.NewScalar().Bytes())
pointLen := len(proof.curve.NewGeneratorPoint().ToAffineCompressed())
ptr := 0
// Get points
capA, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary FromAffineCompressed")
}
proof.capA = capA
ptr += pointLen
capS, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary FromAffineCompressed")
}
proof.capS = capS
ptr += pointLen
capT1, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary FromAffineCompressed")
}
proof.capT1 = capT1
ptr += pointLen
capT2, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary FromAffineCompressed")
}
proof.capT2 = capT2
ptr += pointLen
// Get scalars
taux, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary SetBytes")
}
proof.taux = taux
ptr += scalarLen
mu, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary SetBytes")
}
proof.mu = mu
ptr += scalarLen
tHat, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary SetBytes")
}
proof.tHat = tHat
ptr += scalarLen
// Get IPP
err = proof.ipp.UnmarshalBinary(data[ptr:])
if err != nil {
return errors.New("rangeProof UnmarshalBinary")
}
return nil
}
// checkRange validates whether some scalar v is within the range [0, 2^n - 1]
// It will return an error if v is less than 0 or greater than 2^n - 1
// Otherwise it will return nil.
func checkRange(v curves.Scalar, n int) error {
bigOne := big.NewInt(1)
if n < 0 {
return errors.New("n cannot be less than 0")
}
var bigTwoToN big.Int
bigTwoToN.Lsh(bigOne, uint(n))
if v.BigInt().Cmp(&bigTwoToN) == 1 {
return errors.New("v is greater than 2^n")
}
return nil
}
// getBlindingVector returns a vector of scalars used as blinding factors for commitments.
func getBlindingVector(length int, curve curves.Curve) []curves.Scalar {
vec := make([]curves.Scalar, length)
for i := 0; i < length; i++ {
vec[i] = curve.Scalar.Random(crand.Reader)
}
return vec
}
// getcapV returns a commitment to v using blinding factor gamma.
func getcapV(v, gamma curves.Scalar, g, h curves.Point) curves.Point {
return h.Mul(gamma).Add(g.Mul(v))
}
// getaL obtains the bit vector representation of v
// See the a_L definition towards the bottom of pg 17 of https://eprint.iacr.org/2017/1066.pdf
func getaL(v curves.Scalar, n int, curve curves.Curve) ([]curves.Scalar, error) {
var err error
vBytes := v.Bytes()
zero := curve.Scalar.Zero()
one := curve.Scalar.One()
aL := make([]curves.Scalar, n)
for j := 0; j < len(aL); j++ {
aL[j] = zero
}
for i := 0; i < n; i++ {
ithBit := vBytes[i>>3] >> (i & 0x07) & 0x01
aL[i], err = cmoveScalar(zero, one, int(ithBit), curve)
if err != nil {
return nil, errors.Wrap(err, "getaL")
}
}
return aL, nil
}
// cmoveScalar provides a constant time operation that returns x if which is 0 and returns y if which is 1.
func cmoveScalar(x, y curves.Scalar, which int, curve curves.Curve) (curves.Scalar, error) {
if which != 0 && which != 1 {
return nil, errors.New("cmoveScalar which must be 0 or 1")
}
mask := -byte(which)
xBytes := x.Bytes()
yBytes := y.Bytes()
for i, xByte := range xBytes {
xBytes[i] ^= (xByte ^ yBytes[i]) & mask
}
out, err := curve.NewScalar().SetBytes(xBytes)
if err != nil {
return nil, errors.Wrap(err, "cmoveScalar SetBytes")
}
return out, nil
}
// calcyz uses a merlin transcript for Fiat Shamir
// It takes the current state of the transcript and appends the newly calculated capA and capS values
// Two new scalars are then read from the transcript
// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf
func calcyz(capV, capA, capS curves.Point, transcript *merlin.Transcript, curve curves.Curve) (curves.Scalar, curves.Scalar, error) {
// Add the A,S values to transcript
transcript.AppendMessage([]byte("addV"), capV.ToAffineUncompressed())
transcript.AppendMessage([]byte("addcapA"), capA.ToAffineUncompressed())
transcript.AppendMessage([]byte("addcapS"), capS.ToAffineUncompressed())
// Read 64 bytes twice from, set to scalar for y and z
yBytes := transcript.ExtractBytes([]byte("gety"), 64)
y, err := curve.NewScalar().SetBytesWide(yBytes)
if err != nil {
return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide")
}
zBytes := transcript.ExtractBytes([]byte("getz"), 64)
z, err := curve.NewScalar().SetBytesWide(zBytes)
if err != nil {
return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide")
}
return y, z, nil
}
// calcx uses a merlin transcript for Fiat Shamir
// It takes the current state of the transcript and appends the newly calculated capT1 and capT2 values
// A new scalar is then read from the transcript
// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf
func calcx(capT1, capT2 curves.Point, transcript *merlin.Transcript, curve curves.Curve) (curves.Scalar, error) {
// Add the Tau1,2 values to transcript
transcript.AppendMessage([]byte("addcapT1"), capT1.ToAffineUncompressed())
transcript.AppendMessage([]byte("addcapT2"), capT2.ToAffineUncompressed())
// Read 64 bytes from, set to scalar
outBytes := transcript.ExtractBytes([]byte("getx"), 64)
x, err := curve.NewScalar().SetBytesWide(outBytes)
if err != nil {
return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide")
}
return x, nil
}

View File

@ -0,0 +1,86 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestRangeProverHappyPath(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.Random(crand.Reader)
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
require.NotNil(t, proof)
require.Equal(t, 8, len(proof.ipp.capLs))
require.Equal(t, 8, len(proof.ipp.capRs))
}
func TestGetaL(t *testing.T) {
curve := curves.ED25519()
v := curve.Scalar.Random(crand.Reader)
aL, err := getaL(v, 256, *curve)
require.NoError(t, err)
twoN := get2nVector(256, *curve)
product, err := innerProduct(aL, twoN)
require.NoError(t, err)
require.Zero(t, product.Cmp(v))
}
func TestCmove(t *testing.T) {
curve := curves.ED25519()
two := curve.Scalar.One().Double()
four := two.Double()
out, err := cmoveScalar(two, four, 1, *curve)
require.NoError(t, err)
require.Zero(t, out.Cmp(four))
}
func TestRangeProverMarshal(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.Random(crand.Reader)
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
proofMarshaled := proof.MarshalBinary()
proofPrime := NewRangeProof(curve)
err = proofPrime.UnmarshalBinary(proofMarshaled)
require.NoError(t, err)
require.True(t, proof.capA.Equal(proofPrime.capA))
require.True(t, proof.capS.Equal(proofPrime.capS))
require.True(t, proof.capT1.Equal(proofPrime.capT1))
require.True(t, proof.capT2.Equal(proofPrime.capT2))
require.Zero(t, proof.taux.Cmp(proofPrime.taux))
require.Zero(t, proof.mu.Cmp(proofPrime.mu))
require.Zero(t, proof.tHat.Cmp(proofPrime.tHat))
}

View File

@ -0,0 +1,187 @@
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"github.com/onsonr/hway/crypto/core/curves"
)
// RangeVerifier is the struct used to verify RangeProofs
// It specifies which curve to use and holds precomputed generators
// See NewRangeVerifier() for verifier initialization.
type RangeVerifier struct {
curve curves.Curve
generators *ippGenerators
ippVerifier *InnerProductVerifier
}
// NewRangeVerifier initializes a new verifier
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A verifier can be used to verify range proofs for vectors of length less than or equal to maxVectorLength
// A verifier is defined by an explicit curve.
func NewRangeVerifier(maxVectorLength int, rangeDomain, ippDomain []byte, curve curves.Curve) (*RangeVerifier, error) {
generators, err := getGeneratorPoints(maxVectorLength, rangeDomain, curve)
if err != nil {
return nil, errors.Wrap(err, "range NewRangeProver")
}
ippVerifier, err := NewInnerProductVerifier(maxVectorLength, ippDomain, curve)
if err != nil {
return nil, errors.Wrap(err, "range NewRangeProver")
}
return &RangeVerifier{curve: curve, generators: generators, ippVerifier: ippVerifier}, nil
}
// Verify verifies the given range proof inputs
// It implements the checking of L65 on pg 20
// It also verifies the dot product of <l,r> using the inner product proof\
// capV is a commitment to v using blinding factor gamma
// n is the power that specifies the upper bound of the range, ie. 2^n
// g, h, u are unique points used as generators for the blinding factor
// transcript is a merlin transcript to be used for the fiat shamir heuristic.
func (verifier *RangeVerifier) Verify(proof *RangeProof, capV curves.Point, proofGenerators RangeProofGenerators, n int, transcript *merlin.Transcript) (bool, error) {
// Length of vectors must be less than the number of generators generated
if n > len(verifier.generators.G) {
return false, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := verifier.generators.G[0:n]
proofH := verifier.generators.H[0:n]
// Calc y,z,x from Fiat Shamir heuristic
y, z, err := calcyz(capV, proof.capA, proof.capS, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
x, err := calcx(proof.capT1, proof.capT2, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := verifier.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return false, errors.Wrap(err, "rangeproof prove")
}
// Calc delta(y,z)
deltayz, err := deltayz(y, z, n, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
// Check tHat: L65, pg20
tHatIsValid := verifier.checktHat(proof, capV, proofGenerators.g, proofGenerators.h, deltayz, x, z)
if !tHatIsValid {
return false, errors.New("rangeproof verify tHat is invalid")
}
// Verify IPP
hPrime, err := gethPrime(proofH, y, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
capPhmu, err := getPhmu(proofG, hPrime, proofGenerators.h, proof.capA, proof.capS, x, y, z, proof.mu, n, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
ippVerified, err := verifier.ippVerifier.VerifyFromRangeProof(proofG, hPrime, capPhmu, proofGenerators.u.Mul(w), proof.tHat, proof.ipp, transcript)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
return ippVerified, nil
}
// L65, pg20.
func (*RangeVerifier) checktHat(proof *RangeProof, capV, g, h curves.Point, deltayz, x, z curves.Scalar) bool {
// g^tHat * h^tau_x
gtHat := g.Mul(proof.tHat)
htaux := h.Mul(proof.taux)
lhs := gtHat.Add(htaux)
// V^z^2 * g^delta(y,z) * Tau_1^x * Tau_2^x^2
capVzsquare := capV.Mul(z.Square())
gdeltayz := g.Mul(deltayz)
capTau1x := proof.capT1.Mul(x)
capTau2xsquare := proof.capT2.Mul(x.Square())
rhs := capVzsquare.Add(gdeltayz).Add(capTau1x).Add(capTau2xsquare)
// Compare lhs =? rhs
return lhs.Equal(rhs)
}
// gethPrime calculates new h prime generators as defined in L64 on pg20.
func gethPrime(h []curves.Point, y curves.Scalar, curve curves.Curve) ([]curves.Point, error) {
hPrime := make([]curves.Point, len(h))
yInv, err := y.Invert()
yInvn := getknVector(yInv, len(h), curve)
if err != nil {
return nil, errors.Wrap(err, "gethPrime")
}
for i, hElem := range h {
hPrime[i] = hElem.Mul(yInvn[i])
}
return hPrime, nil
}
// Obtain P used for IPP verification
// See L67 on pg20
// Note P on L66 includes blinding factor hmu, this method removes that factor.
func getPhmu(proofG, proofHPrime []curves.Point, h, capA, capS curves.Point, x, y, z, mu curves.Scalar, n int, curve curves.Curve) (curves.Point, error) {
// h'^(z*y^n + z^2*2^n)
zyn := multiplyScalarToScalarVector(z, getknVector(y, n, curve))
zsquaretwon := multiplyScalarToScalarVector(z.Square(), get2nVector(n, curve))
elemLastExponent, err := addPairwiseScalarVectors(zyn, zsquaretwon)
if err != nil {
return nil, errors.Wrap(err, "getPhmu")
}
lastElem := curve.Point.SumOfProducts(proofHPrime, elemLastExponent)
// S^x
capSx := capS.Mul(x)
// g^-z --> -z*<1,g>
onen := get1nVector(n, curve)
zNeg := z.Neg()
zinvonen := multiplyScalarToScalarVector(zNeg, onen)
zgdotonen := curve.Point.SumOfProducts(proofG, zinvonen)
// L66 on pg20
P := capA.Add(capSx).Add(zgdotonen).Add(lastElem)
hmu := h.Mul(mu)
Phmu := P.Sub(hmu)
return Phmu, nil
}
// Delta function for delta(y,z), See (39) on pg18.
func deltayz(y, z curves.Scalar, n int, curve curves.Curve) (curves.Scalar, error) {
// z - z^2
zMinuszsquare := z.Sub(z.Square())
// 1^n
onen := get1nVector(n, curve)
// <1^n, y^n>
onendotyn, err := innerProduct(onen, getknVector(y, n, curve))
if err != nil {
return nil, errors.Wrap(err, "deltayz")
}
// (z - z^2)*<1^n, y^n>
termFirst := zMinuszsquare.Mul(onendotyn)
// <1^n, 2^n>
onendottwon, err := innerProduct(onen, get2nVector(n, curve))
if err != nil {
return nil, errors.Wrap(err, "deltayz")
}
// z^3*<1^n, 2^n>
termSecond := z.Cube().Mul(onendottwon)
// (z - z^2)*<1^n, y^n> - z^3*<1^n, 2^n>
out := termFirst.Sub(termSecond)
return out, nil
}

View File

@ -0,0 +1,87 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core/curves"
)
func TestRangeVerifyHappyPath(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.Random(crand.Reader)
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapV(v, gamma, g, h)
verified, err := verifier.Verify(proof, capV, proofGenerators, n, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}
func TestRangeVerifyNotInRange(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.Random(crand.Reader)
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(v, gamma, n, proofGenerators, transcript)
require.Error(t, err)
}
func TestRangeVerifyNonRandom(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.One()
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapV(v, gamma, g, h)
verified, err := verifier.Verify(proof, capV, proofGenerators, n, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}

14
crypto/core/README.md Executable file
View File

@ -0,0 +1,14 @@
---
aliases: [README]
tags: []
title: README
linter-yaml-title-alias: README
date created: Wednesday, April 17th 2024, 4:11:40 pm
date modified: Thursday, April 18th 2024, 8:19:25 am
---
## Core Package
The core package contains a set of primitives, including but not limited to various
elliptic curves, hashes, and commitment schemes. These primitives are used internally
and can also be used independently on their own externally.

115
crypto/core/commit.go Executable file
View File

@ -0,0 +1,115 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package core
import (
"crypto/hmac"
crand "crypto/rand"
"crypto/sha256"
"crypto/subtle"
"encoding/json"
"fmt"
"hash"
)
// Size of random values and hash outputs are determined by our hash function
const Size = sha256.Size
type (
// Commitment to a given message which can be later revealed.
// This is sent to and held by a verifier until the corresponding
// witness is provided.
Commitment []byte
// Witness is sent to and opened by the verifier. This proves that
// committed message hasn't been altered by later information.
Witness struct {
Msg []byte
r [Size]byte
}
// witnessJSON is used for un/marshaling.
witnessJSON struct {
Msg []byte
R [Size]byte
}
)
// MarshalJSON encodes Witness in JSON
func (w Witness) MarshalJSON() ([]byte, error) {
return json.Marshal(witnessJSON{w.Msg, w.r})
}
// UnmarshalJSON decodes JSON into a Witness struct
func (w *Witness) UnmarshalJSON(data []byte) error {
witness := &witnessJSON{}
err := json.Unmarshal(data, witness)
if err != nil {
return err
}
w.Msg = witness.Msg
w.r = witness.R
return nil
}
// Commit to a given message. Uses SHA256 as the hash function.
func Commit(msg []byte) (Commitment, *Witness, error) {
// Initialize our decommitment
d := Witness{msg, [Size]byte{}}
// Generate a random nonce of the required length
n, err := crand.Read(d.r[:])
// Ensure no errors retrieving nonce
if err != nil {
return nil, nil, err
}
// Ensure we read all the bytes expected
if n != Size {
return nil, nil, fmt.Errorf("failed to read %v bytes from crypto/rand: received %v bytes", Size, n)
}
// Compute the commitment: HMAC(Sha2, msg, key)
c, err := ComputeHMAC(sha256.New, msg, d.r[:])
if err != nil {
return nil, nil, err
}
return c, &d, nil
}
// Open a commitment and return true if the commitment/decommitment pair are valid.
// reference: spec.§2.4: Commitment Scheme
func Open(c Commitment, d Witness) (bool, error) {
// Ensure commitment is well-formed.
if len(c) != Size {
return false, fmt.Errorf("invalid commitment, wrong length. %v != %v", len(c), Size)
}
// Re-compute the commitment: HMAC(Sha2, msg, key)
cʹ, err := ComputeHMAC(sha256.New, d.Msg, d.r[:])
if err != nil {
return false, err
}
return subtle.ConstantTimeCompare(cʹ, c) == 1, nil
}
// ComputeHMAC computes HMAC(hash_fn, msg, key)
// Takes in a hash function to use for HMAC
func ComputeHMAC(f func() hash.Hash, msg []byte, k []byte) ([]byte, error) {
if f == nil {
return nil, fmt.Errorf("hash function cannot be nil")
}
mac := hmac.New(f, k)
w, err := mac.Write(msg)
if w != len(msg) {
return nil, fmt.Errorf("bytes written to hash doesn't match expected: %v != %v", w, len(msg))
} else if err != nil {
return nil, err
}
return mac.Sum(nil), nil
}

374
crypto/core/commit_test.go Executable file
View File

@ -0,0 +1,374 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package core
import (
"bytes"
"encoding/json"
"testing"
"github.com/stretchr/testify/require"
)
// An entry into our test table
type entry struct {
// Input
msg []byte
// Result (actual, not expected)
commit Commitment
decommit *Witness
err error
}
// Test inputs and placeholders for results that will be filled in
// during init()
var testResults = []entry{
{[]byte("This is a test message"), nil, nil, nil},
{[]byte("short msg"), nil, nil, nil},
{[]byte("This input field is intentionally longer than the SHA256 block size to ensure that the entire message is processed"),
nil, nil, nil},
{[]byte{0xFB, 0x1A, 0x18, 0x47, 0x39, 0x3C, 0x9F, 0x45, 0x5F, 0x29, 0x4C, 0x51, 0x42, 0x30, 0xA6, 0xB9},
nil, nil, nil},
// msg = \epsilon (empty string)
{[]byte{}, nil, nil, nil},
// msg == nil
{nil, nil, nil, nil},
}
// Run our inputs through commit and record the outputs
func init() {
for i := range testResults {
entry := &testResults[i]
entry.commit, entry.decommit, entry.err = Commit(entry.msg)
}
}
// Computing commitments should never produce errors
func TestCommitWithoutErrors(t *testing.T) {
for _, entry := range testResults {
if entry.err != nil {
t.Errorf("received Commit(%v): %v", entry.msg, entry.err)
}
}
}
// Commitments should be 256b == 64B in length
func TestCommitmentsAreExpectedLength(t *testing.T) {
const expLen = 256 / 8
for _, entry := range testResults {
if len(entry.commit) != expLen {
t.Errorf("commitment is not expected length: %v != %v", len(entry.commit), expLen)
}
}
}
// Decommit cannot be nil
func TestCommmitProducesDecommit(t *testing.T) {
for _, entry := range testResults {
if entry.decommit == nil {
t.Errorf("decommit cannot be nil: Commit(%v)", entry.msg)
}
}
}
// Decommit value should contain the same message
func TestCommmitProducesDecommitWithSameMessage(t *testing.T) {
for _, entry := range testResults {
if !bytes.Equal(entry.msg, entry.decommit.Msg) {
t.Errorf("decommit.msg != msg: %v != %v", entry.msg, entry.decommit.Msg)
}
}
}
// Commitments should be unique
func TestCommmitProducesDistinctCommitments(t *testing.T) {
seen := make(map[[Size]byte]bool)
// Check the pre-computed commitments for uniquness
for _, entry := range testResults {
// Slices cannot be used as hash keys, so we need to copy into
// an array. Oh, go-lang.
var cee [Size]byte
copy(cee[:], entry.commit)
// Ensure each commit is unique
if seen[cee] {
t.Errorf("duplicate commit found: %v", cee)
}
seen[cee] = true
}
}
// Commitments should be unique even for the same message since the nonce is
// randomly selected
func TestCommmitDistinctCommitments(t *testing.T) {
seen := make(map[[Size]byte]bool)
msg := []byte("black lives matter")
const iterations = 1000
// Check the pre-computed commitments for uniquness
for i := 0; i < iterations; i++ {
// Compute a commitment
c, _, err := Commit(msg)
if err != nil {
t.Error(err)
}
// Slices cannot be used as hash keys, so copy into an array
var cee [Size]byte
copy(cee[:], []byte(c))
// Ensure each commit is unique
if seen[cee] {
t.Errorf("duplicate commit found: %v", cee)
}
seen[cee] = true
}
}
// Nonces must be 256b = 64B
func TestCommmitNonceIsExpectedLength(t *testing.T) {
const expLen = 256 / 8
// Check the pre-computed nonces
for _, entry := range testResults {
if len(entry.decommit.r) != expLen {
t.Errorf("nonce is not expected length: %v != %v", len(entry.decommit.r), expLen)
}
}
}
// Randomly selected nonces will be unique with overwhelming probability
func TestCommmitProducesDistinctNonces(t *testing.T) {
seen := make(map[[Size]byte]bool)
msg := []byte("black lives matter")
const iterations = 1000
// Check the pre-computed commitments for uniquness
for i := 0; i < iterations; i++ {
// Compute a commitment
_, dee, err := Commit(msg)
if err != nil {
t.Error(err)
}
// Ensure each nonce is unique
if seen[dee.r] {
t.Errorf("duplicate nonce found: %v", dee.r)
}
seen[dee.r] = true
}
}
func TestOpenOnValidCommitments(t *testing.T) {
for _, entry := range testResults {
// Open each commitment
ok, err := Open(entry.commit, *entry.decommit)
// There should be no error
if err != nil {
t.Error(err)
}
// The commitments should verify
if !ok {
t.Errorf("commitment failed to open: %v", entry.msg)
}
}
}
func TestOpenOnModifiedNonce(t *testing.T) {
for _, entry := range testResults {
dʹ := copyWitness(entry.decommit)
// Modify the nonce
dʹ.r[0] ^= 0x40
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
func TestOpenOnZeroPrefixNonce(t *testing.T) {
for _, entry := range testResults {
dʹ := copyWitness(entry.decommit)
// Modify the nonce
dʹ.r[0] = 0x00
dʹ.r[1] = 0x00
dʹ.r[2] = 0x00
dʹ.r[3] = 0x00
dʹ.r[4] = 0x00
dʹ.r[5] = 0x00
dʹ.r[6] = 0x00
dʹ.r[7] = 0x00
dʹ.r[8] = 0x00
dʹ.r[9] = 0x00
dʹ.r[10] = 0x00
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
// Makes a deep copy of a Witness
func copyWitness(d *Witness) *Witness {
msg := make([]byte, len(d.Msg))
var r [Size]byte
copy(msg, d.Msg)
copy(r[:], d.r[:])
return &Witness{msg, r}
}
// Asserts that err != nil, and ok == false.
func assertFailedOpen(t *testing.T, ok bool, err error) {
// There should be no error
if err != nil {
t.Error(err)
}
// But the commitments should fail
if ok {
t.Error("commitment was verified but was expected to fail")
}
}
// An unrelated message should fail on open
func TestOpenOnNewMessage(t *testing.T) {
for _, entry := range testResults {
dʹ := copyWitness(entry.decommit)
// Use a distinct message
dʹ.Msg = []byte("no one expects the spanish inquisition")
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
// An appended message should fail on open
func TestOpenOnAppendedMessage(t *testing.T) {
for _, entry := range testResults {
dʹ := copyWitness(entry.decommit)
// Modify the message
dʹ.Msg = []byte("no one expects the spanish inquisition")
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
// A modified message should fail on open
func TestOpenOnModifiedMessage(t *testing.T) {
for _, entry := range testResults {
// Skip the empty string message for this test case
if len(entry.msg) == 0 {
continue
}
// Modify the message _in situ_
dʹ := copyWitness(entry.decommit)
dʹ.Msg[1] ^= 0x99
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
// A modified commitment should fail on open
func TestOpenOnModifiedCommitment(t *testing.T) {
for _, entry := range testResults {
// Copy and then modify the commitment
cʹ := make([]byte, Size)
copy(cʹ[:], entry.commit)
cʹ[6] ^= 0x33
// Open and check for failure
ok, err := Open(cʹ, *entry.decommit)
assertFailedOpen(t, ok, err)
}
}
// An empty decommit should fail to open
func TestOpenOnDefaultDecommitObject(t *testing.T) {
for _, entry := range testResults {
// Open and check for failure
ok, err := Open(entry.commit, Witness{})
assertFailedOpen(t, ok, err)
}
}
// A nil commit should return an error
func TestOpenOnNilCommitment(t *testing.T) {
_, err := Open(nil, Witness{})
assertError(t, err)
}
// Verifies that err != nil
func assertError(t *testing.T, err error) {
if err == nil {
t.Error("expected an error but received nil")
}
}
// Ill-formed commitment should produce an error
func TestOpenOnLongCommitment(t *testing.T) {
tooLong := make([]byte, Size+1)
_, err := Open(tooLong, Witness{})
assertError(t, err)
}
// Ill-formed commitment should produce an error
func TestOpenOnShortCommitment(t *testing.T) {
tooShort := make([]byte, Size-1)
_, err := Open(tooShort, Witness{})
assertError(t, err)
}
// Tests that marshal-unmarshal is the identity function
func TestWitnessMarshalRoundTrip(t *testing.T) {
expected := &Witness{
[]byte("I'm the dude. So that's what you call me"),
[Size]byte{0xAC},
}
// Marhal and test
jsonBytes, err := json.Marshal(expected)
require.NoError(t, err)
require.NotNil(t, jsonBytes)
// Unmarshal and test
actual := &Witness{}
require.NoError(t, json.Unmarshal(jsonBytes, actual))
require.Equal(t, expected.Msg, actual.Msg)
require.Equal(t, expected.r, actual.r)
}
// Tests that marshal-unmarshal is the identity function
func TestCommitmentMarshalRoundTrip(t *testing.T) {
expected := Commitment([]byte("That or uh his-dudeness or duder or el duderino."))
// Marhal and test
jsonBytes, err := json.Marshal(expected)
require.NoError(t, err)
require.NotNil(t, jsonBytes)
// Unmarshal and test
actual := Commitment{}
require.NoError(t, json.Unmarshal(jsonBytes, &actual))
require.Equal(t, []byte(expected), []byte(actual))
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

863
crypto/core/curves/curve.go Executable file
View File

@ -0,0 +1,863 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/elliptic"
"encoding/hex"
"encoding/json"
"fmt"
"hash"
"io"
"math/big"
"sync"
"github.com/onsonr/hway/crypto/core/curves/native/bls12381"
)
var (
k256Initonce sync.Once
k256 Curve
bls12381g1Initonce sync.Once
bls12381g1 Curve
bls12381g2Initonce sync.Once
bls12381g2 Curve
bls12377g1Initonce sync.Once
bls12377g1 Curve
bls12377g2Initonce sync.Once
bls12377g2 Curve
p256Initonce sync.Once
p256 Curve
ed25519Initonce sync.Once
ed25519 Curve
pallasInitonce sync.Once
pallas Curve
)
const (
K256Name = "secp256k1"
BLS12381G1Name = "BLS12381G1"
BLS12381G2Name = "BLS12381G2"
BLS12831Name = "BLS12831"
P256Name = "P-256"
ED25519Name = "ed25519"
PallasName = "pallas"
BLS12377G1Name = "BLS12377G1"
BLS12377G2Name = "BLS12377G2"
BLS12377Name = "BLS12377"
)
const scalarBytes = 32
// Scalar represents an element of the scalar field \mathbb{F}_q
// of the elliptic curve construction.
type Scalar interface {
// Random returns a random scalar using the provided reader
// to retrieve bytes
Random(reader io.Reader) Scalar
// Hash the specific bytes in a manner to yield a
// uniformly distributed scalar
Hash(bytes []byte) Scalar
// Zero returns the additive identity element
Zero() Scalar
// One returns the multiplicative identity element
One() Scalar
// IsZero returns true if this element is the additive identity element
IsZero() bool
// IsOne returns true if this element is the multiplicative identity element
IsOne() bool
// IsOdd returns true if this element is odd
IsOdd() bool
// IsEven returns true if this element is even
IsEven() bool
// New returns an element with the value equal to `value`
New(value int) Scalar
// Cmp returns
// -2 if this element is in a different field than rhs
// -1 if this element is less than rhs
// 0 if this element is equal to rhs
// 1 if this element is greater than rhs
Cmp(rhs Scalar) int
// Square returns element*element
Square() Scalar
// Double returns element+element
Double() Scalar
// Invert returns element^-1 mod p
Invert() (Scalar, error)
// Sqrt computes the square root of this element if it exists.
Sqrt() (Scalar, error)
// Cube returns element*element*element
Cube() Scalar
// Add returns element+rhs
Add(rhs Scalar) Scalar
// Sub returns element-rhs
Sub(rhs Scalar) Scalar
// Mul returns element*rhs
Mul(rhs Scalar) Scalar
// MulAdd returns element * y + z mod p
MulAdd(y, z Scalar) Scalar
// Div returns element*rhs^-1 mod p
Div(rhs Scalar) Scalar
// Neg returns -element mod p
Neg() Scalar
// SetBigInt returns this element set to the value of v
SetBigInt(v *big.Int) (Scalar, error)
// BigInt returns this element as a big integer
BigInt() *big.Int
// Point returns the associated point for this scalar
Point() Point
// Bytes returns the canonical byte representation of this scalar
Bytes() []byte
// SetBytes creates a scalar from the canonical representation expecting the exact number of bytes needed to represent the scalar
SetBytes(bytes []byte) (Scalar, error)
// SetBytesWide creates a scalar expecting double the exact number of bytes needed to represent the scalar which is reduced by the modulus
SetBytesWide(bytes []byte) (Scalar, error)
// Clone returns a cloned Scalar of this value
Clone() Scalar
}
type PairingScalar interface {
Scalar
SetPoint(p Point) PairingScalar
}
func unmarshalScalar(input []byte) (*Curve, []byte, error) {
sep := byte(':')
i := 0
for ; i < len(input); i++ {
if input[i] == sep {
break
}
}
name := string(input[:i])
curve := GetCurveByName(name)
if curve == nil {
return nil, nil, fmt.Errorf("unrecognized curve")
}
return curve, input[i+1:], nil
}
func scalarMarshalBinary(scalar Scalar) ([]byte, error) {
// All scalars are 32 bytes long
// The last 32 bytes are the actual value
// The first remaining bytes are the curve name
// separated by a colon
name := []byte(scalar.Point().CurveName())
output := make([]byte, len(name)+1+scalarBytes)
copy(output[:len(name)], name)
output[len(name)] = byte(':')
copy(output[len(name)+1:], scalar.Bytes())
return output, nil
}
func scalarUnmarshalBinary(input []byte) (Scalar, error) {
// All scalars are 32 bytes long
// The first 32 bytes are the actual value
// The remaining bytes are the curve name
if len(input) < scalarBytes+1+len(P256Name) {
return nil, fmt.Errorf("invalid byte sequence")
}
sc, data, err := unmarshalScalar(input)
if err != nil {
return nil, err
}
return sc.Scalar.SetBytes(data)
}
func scalarMarshalText(scalar Scalar) ([]byte, error) {
// All scalars are 32 bytes long
// For text encoding we put the curve name first for readability
// separated by a colon, then the hex encoding of the scalar
// which avoids the base64 weakness with strict mode or not
name := []byte(scalar.Point().CurveName())
output := make([]byte, len(name)+1+scalarBytes*2)
copy(output[:len(name)], name)
output[len(name)] = byte(':')
_ = hex.Encode(output[len(name)+1:], scalar.Bytes())
return output, nil
}
func scalarUnmarshalText(input []byte) (Scalar, error) {
if len(input) < scalarBytes*2+len(P256Name)+1 {
return nil, fmt.Errorf("invalid byte sequence")
}
curve, data, err := unmarshalScalar(input)
if err != nil {
return nil, err
}
var t [scalarBytes]byte
_, err = hex.Decode(t[:], data)
if err != nil {
return nil, err
}
return curve.Scalar.SetBytes(t[:])
}
func scalarMarshalJson(scalar Scalar) ([]byte, error) {
m := make(map[string]string, 2)
m["type"] = scalar.Point().CurveName()
m["value"] = hex.EncodeToString(scalar.Bytes())
return json.Marshal(m)
}
func scalarUnmarshalJson(input []byte) (Scalar, error) {
var m map[string]string
err := json.Unmarshal(input, &m)
if err != nil {
return nil, err
}
curve := GetCurveByName(m["type"])
if curve == nil {
return nil, fmt.Errorf("invalid type")
}
s, err := hex.DecodeString(m["value"])
if err != nil {
return nil, err
}
S, err := curve.Scalar.SetBytes(s)
if err != nil {
return nil, err
}
return S, nil
}
// Point represents an elliptic curve point
type Point interface {
Random(reader io.Reader) Point
Hash(bytes []byte) Point
Identity() Point
Generator() Point
IsIdentity() bool
IsNegative() bool
IsOnCurve() bool
Double() Point
Scalar() Scalar
Neg() Point
Add(rhs Point) Point
Sub(rhs Point) Point
Mul(rhs Scalar) Point
Equal(rhs Point) bool
Set(x, y *big.Int) (Point, error)
ToAffineCompressed() []byte
ToAffineUncompressed() []byte
FromAffineCompressed(bytes []byte) (Point, error)
FromAffineUncompressed(bytes []byte) (Point, error)
CurveName() string
SumOfProducts(points []Point, scalars []Scalar) Point
}
type PairingPoint interface {
Point
OtherGroup() PairingPoint
Pairing(rhs PairingPoint) Scalar
MultiPairing(...PairingPoint) Scalar
}
func pointMarshalBinary(point Point) ([]byte, error) {
// Always stores points in compressed form
// The first bytes are the curve name
// separated by a colon followed by the compressed point
// bytes
t := point.ToAffineCompressed()
name := []byte(point.CurveName())
output := make([]byte, len(name)+1+len(t))
copy(output[:len(name)], name)
output[len(name)] = byte(':')
copy(output[len(output)-len(t):], t)
return output, nil
}
func pointUnmarshalBinary(input []byte) (Point, error) {
if len(input) < scalarBytes+1+len(P256Name) {
return nil, fmt.Errorf("invalid byte sequence")
}
sep := byte(':')
i := 0
for ; i < len(input); i++ {
if input[i] == sep {
break
}
}
name := string(input[:i])
curve := GetCurveByName(name)
if curve == nil {
return nil, fmt.Errorf("unrecognized curve")
}
return curve.Point.FromAffineCompressed(input[i+1:])
}
func pointMarshalText(point Point) ([]byte, error) {
// Always stores points in compressed form
// The first bytes are the curve name
// separated by a colon followed by the compressed point
// bytes
t := point.ToAffineCompressed()
name := []byte(point.CurveName())
output := make([]byte, len(name)+1+len(t)*2)
copy(output[:len(name)], name)
output[len(name)] = byte(':')
hex.Encode(output[len(output)-len(t)*2:], t)
return output, nil
}
func pointUnmarshalText(input []byte) (Point, error) {
if len(input) < scalarBytes*2+1+len(P256Name) {
return nil, fmt.Errorf("invalid byte sequence")
}
sep := byte(':')
i := 0
for ; i < len(input); i++ {
if input[i] == sep {
break
}
}
name := string(input[:i])
curve := GetCurveByName(name)
if curve == nil {
return nil, fmt.Errorf("unrecognized curve")
}
buffer := make([]byte, (len(input)-i)/2)
_, err := hex.Decode(buffer, input[i+1:])
if err != nil {
return nil, err
}
return curve.Point.FromAffineCompressed(buffer)
}
func pointMarshalJson(point Point) ([]byte, error) {
m := make(map[string]string, 2)
m["type"] = point.CurveName()
m["value"] = hex.EncodeToString(point.ToAffineCompressed())
return json.Marshal(m)
}
func pointUnmarshalJson(input []byte) (Point, error) {
var m map[string]string
err := json.Unmarshal(input, &m)
if err != nil {
return nil, err
}
curve := GetCurveByName(m["type"])
if curve == nil {
return nil, fmt.Errorf("invalid type")
}
p, err := hex.DecodeString(m["value"])
if err != nil {
return nil, err
}
P, err := curve.Point.FromAffineCompressed(p)
if err != nil {
return nil, err
}
return P, nil
}
// Curve represents a named elliptic curve with a scalar field and point group
type Curve struct {
Scalar Scalar
Point Point
Name string
}
func (c Curve) ScalarBaseMult(sc Scalar) Point {
return c.Point.Generator().Mul(sc)
}
func (c Curve) NewGeneratorPoint() Point {
return c.Point.Generator()
}
func (c Curve) NewIdentityPoint() Point {
return c.Point.Identity()
}
func (c Curve) NewScalar() Scalar {
return c.Scalar.Zero()
}
// ToEllipticCurve returns the equivalent of this curve as the go interface `elliptic.Curve`
func (c Curve) ToEllipticCurve() (elliptic.Curve, error) {
err := fmt.Errorf("can't convert %s", c.Name)
switch c.Name {
case K256Name:
return K256Curve(), nil
case BLS12381G1Name:
return nil, err
case BLS12381G2Name:
return nil, err
case BLS12831Name:
return nil, err
case P256Name:
return NistP256Curve(), nil
case ED25519Name:
return nil, err
case PallasName:
return nil, err
case BLS12377G1Name:
return nil, err
case BLS12377G2Name:
return nil, err
case BLS12377Name:
return nil, err
default:
return nil, err
}
}
// PairingCurve represents a named elliptic curve
// that supports pairings
type PairingCurve struct {
Scalar PairingScalar
PointG1 PairingPoint
PointG2 PairingPoint
GT Scalar
Name string
}
func (c PairingCurve) ScalarG1BaseMult(sc Scalar) PairingPoint {
return c.PointG1.Generator().Mul(sc).(PairingPoint)
}
func (c PairingCurve) ScalarG2BaseMult(sc Scalar) PairingPoint {
return c.PointG2.Generator().Mul(sc).(PairingPoint)
}
func (c PairingCurve) NewG1GeneratorPoint() PairingPoint {
return c.PointG1.Generator().(PairingPoint)
}
func (c PairingCurve) NewG2GeneratorPoint() PairingPoint {
return c.PointG2.Generator().(PairingPoint)
}
func (c PairingCurve) NewG1IdentityPoint() PairingPoint {
return c.PointG1.Identity().(PairingPoint)
}
func (c PairingCurve) NewG2IdentityPoint() PairingPoint {
return c.PointG2.Identity().(PairingPoint)
}
func (c PairingCurve) NewScalar() PairingScalar {
return c.Scalar.Zero().(PairingScalar)
}
// GetCurveByName returns the correct `Curve` given the name
func GetCurveByName(name string) *Curve {
switch name {
case K256Name:
return K256()
case BLS12381G1Name:
return BLS12381G1()
case BLS12381G2Name:
return BLS12381G2()
case BLS12831Name:
return BLS12381G1()
case P256Name:
return P256()
case ED25519Name:
return ED25519()
case PallasName:
return PALLAS()
case BLS12377G1Name:
return BLS12377G1()
case BLS12377G2Name:
return BLS12377G2()
case BLS12377Name:
return BLS12377G1()
default:
return nil
}
}
func GetPairingCurveByName(name string) *PairingCurve {
switch name {
case BLS12381G1Name:
return BLS12381(BLS12381G1().NewIdentityPoint())
case BLS12381G2Name:
return BLS12381(BLS12381G2().NewIdentityPoint())
case BLS12831Name:
return BLS12381(BLS12381G1().NewIdentityPoint())
default:
return nil
}
}
// BLS12381G1 returns the BLS12-381 curve with points in G1
func BLS12381G1() *Curve {
bls12381g1Initonce.Do(bls12381g1Init)
return &bls12381g1
}
func bls12381g1Init() {
bls12381g1 = Curve{
Scalar: &ScalarBls12381{
Value: bls12381.Bls12381FqNew(),
point: new(PointBls12381G1),
},
Point: new(PointBls12381G1).Identity(),
Name: BLS12381G1Name,
}
}
// BLS12381G2 returns the BLS12-381 curve with points in G2
func BLS12381G2() *Curve {
bls12381g2Initonce.Do(bls12381g2Init)
return &bls12381g2
}
func bls12381g2Init() {
bls12381g2 = Curve{
Scalar: &ScalarBls12381{
Value: bls12381.Bls12381FqNew(),
point: new(PointBls12381G2),
},
Point: new(PointBls12381G2).Identity(),
Name: BLS12381G2Name,
}
}
func BLS12381(preferredPoint Point) *PairingCurve {
return &PairingCurve{
Scalar: &ScalarBls12381{
Value: bls12381.Bls12381FqNew(),
point: preferredPoint,
},
PointG1: &PointBls12381G1{
Value: new(bls12381.G1).Identity(),
},
PointG2: &PointBls12381G2{
Value: new(bls12381.G2).Identity(),
},
GT: &ScalarBls12381Gt{
Value: new(bls12381.Gt).SetOne(),
},
Name: BLS12831Name,
}
}
// BLS12377G1 returns the BLS12-377 curve with points in G1
func BLS12377G1() *Curve {
bls12377g1Initonce.Do(bls12377g1Init)
return &bls12377g1
}
func bls12377g1Init() {
bls12377g1 = Curve{
Scalar: &ScalarBls12377{
value: new(big.Int),
point: new(PointBls12377G1),
},
Point: new(PointBls12377G1).Identity(),
Name: BLS12377G1Name,
}
}
// BLS12377G2 returns the BLS12-377 curve with points in G2
func BLS12377G2() *Curve {
bls12377g2Initonce.Do(bls12377g2Init)
return &bls12377g2
}
func bls12377g2Init() {
bls12377g2 = Curve{
Scalar: &ScalarBls12377{
value: new(big.Int),
point: new(PointBls12377G2),
},
Point: new(PointBls12377G2).Identity(),
Name: BLS12377G2Name,
}
}
// K256 returns the secp256k1 curve
func K256() *Curve {
k256Initonce.Do(k256Init)
return &k256
}
func k256Init() {
k256 = Curve{
Scalar: new(ScalarK256).Zero(),
Point: new(PointK256).Identity(),
Name: K256Name,
}
}
func P256() *Curve {
p256Initonce.Do(p256Init)
return &p256
}
func p256Init() {
p256 = Curve{
Scalar: new(ScalarP256).Zero(),
Point: new(PointP256).Identity(),
Name: P256Name,
}
}
func ED25519() *Curve {
ed25519Initonce.Do(ed25519Init)
return &ed25519
}
func ed25519Init() {
ed25519 = Curve{
Scalar: new(ScalarEd25519).Zero(),
Point: new(PointEd25519).Identity(),
Name: ED25519Name,
}
}
func PALLAS() *Curve {
pallasInitonce.Do(pallasInit)
return &pallas
}
func pallasInit() {
pallas = Curve{
Scalar: new(ScalarPallas).Zero(),
Point: new(PointPallas).Identity(),
Name: PallasName,
}
}
// https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-11#appendix-G.2.1
func osswu3mod4(u *big.Int, p *sswuParams) (x, y *big.Int) {
params := p.Params
field := NewField(p.Params.P)
tv1 := field.NewElement(u)
tv1 = tv1.Mul(tv1) // tv1 = u^2
tv3 := field.NewElement(p.Z).Mul(tv1) // tv3 = Z * tv1
tv2 := tv3.Mul(tv3) // tv2 = tv3^2
xd := tv2.Add(tv3) // xd = tv2 + tv3
x1n := xd.Add(field.One()) // x1n = (xd + 1)
x1n = x1n.Mul(field.NewElement(p.B)) // x1n * B
aNeg := field.NewElement(p.A).Neg()
xd = xd.Mul(aNeg) // xd = -A * xd
if xd.Value.Cmp(big.NewInt(0)) == 0 {
xd = field.NewElement(p.Z).Mul(field.NewElement(p.A)) // xd = Z * A
}
tv2 = xd.Mul(xd) // tv2 = xd^2
gxd := tv2.Mul(xd) // gxd = tv2 * xd
tv2 = tv2.Mul(field.NewElement(p.A)) // tv2 = A * tv2
gx1 := x1n.Mul(x1n) // gx1 = x1n^2
gx1 = gx1.Add(tv2) // gx1 = gx1 + tv2
gx1 = gx1.Mul(x1n) // gx1 = gx1 * x1n
tv2 = gxd.Mul(field.NewElement(p.B)) // tv2 = B * gxd
gx1 = gx1.Add(tv2) // gx1 = gx1 + tv2
tv4 := gxd.Mul(gxd) // tv4 = gxd^2
tv2 = gx1.Mul(gxd) // tv2 = gx1 * gxd
tv4 = tv4.Mul(tv2) // tv4 = tv4 * tv2
y1 := tv4.Pow(field.NewElement(p.C1))
y1 = y1.Mul(tv2) // y1 = y1 * tv2
x2n := tv3.Mul(x1n) // x2n = tv3 * x1n
y2 := y1.Mul(field.NewElement(p.C2)) // y2 = y1 * c2
y2 = y2.Mul(tv1) // y2 = y2 * tv1
y2 = y2.Mul(field.NewElement(u)) // y2 = y2 * u
tv2 = y1.Mul(y1) // tv2 = y1^2
tv2 = tv2.Mul(gxd) // tv2 = tv2 * gxd
e2 := tv2.Value.Cmp(gx1.Value) == 0
// If e2, x = x1, else x = x2
if e2 {
x = x1n.Value
} else {
x = x2n.Value
}
// xn / xd
x.Mul(x, new(big.Int).ModInverse(xd.Value, params.P))
x.Mod(x, params.P)
// If e2, y = y1, else y = y2
if e2 {
y = y1.Value
} else {
y = y2.Value
}
uBytes := u.Bytes()
yBytes := y.Bytes()
usign := uBytes[len(uBytes)-1] & 1
ysign := yBytes[len(yBytes)-1] & 1
// Fix sign of y
if usign != ysign {
y.Neg(y)
y.Mod(y, params.P)
}
return
}
func expandMsgXmd(h hash.Hash, msg, domain []byte, outLen int) ([]byte, error) {
domainLen := uint8(len(domain))
if domainLen > 255 {
return nil, fmt.Errorf("invalid domain length")
}
// DST_prime = DST || I2OSP(len(DST), 1)
// b_0 = H(Z_pad || msg || l_i_b_str || I2OSP(0, 1) || DST_prime)
_, _ = h.Write(make([]byte, h.BlockSize()))
_, _ = h.Write(msg)
_, _ = h.Write([]byte{uint8(outLen >> 8), uint8(outLen)})
_, _ = h.Write([]byte{0})
_, _ = h.Write(domain)
_, _ = h.Write([]byte{domainLen})
b0 := h.Sum(nil)
// b_1 = H(b_0 || I2OSP(1, 1) || DST_prime)
h.Reset()
_, _ = h.Write(b0)
_, _ = h.Write([]byte{1})
_, _ = h.Write(domain)
_, _ = h.Write([]byte{domainLen})
b1 := h.Sum(nil)
// b_i = H(strxor(b_0, b_(i - 1)) || I2OSP(i, 1) || DST_prime)
ell := (outLen + h.Size() - 1) / h.Size()
bi := b1
out := make([]byte, outLen)
for i := 1; i < ell; i++ {
h.Reset()
// b_i = H(strxor(b_0, b_(i - 1)) || I2OSP(i, 1) || DST_prime)
tmp := make([]byte, h.Size())
for j := 0; j < h.Size(); j++ {
tmp[j] = b0[j] ^ bi[j]
}
_, _ = h.Write(tmp)
_, _ = h.Write([]byte{1 + uint8(i)})
_, _ = h.Write(domain)
_, _ = h.Write([]byte{domainLen})
// b_1 || ... || b_(ell - 1)
copy(out[(i-1)*h.Size():i*h.Size()], bi[:])
bi = h.Sum(nil)
}
// b_ell
copy(out[(ell-1)*h.Size():], bi[:])
return out[:outLen], nil
}
func bhex(s string) *big.Int {
r, _ := new(big.Int).SetString(s, 16)
return r
}
type sswuParams struct {
Params *elliptic.CurveParams
C1, C2, A, B, Z *big.Int
}
// sumOfProductsPippenger implements a version of Pippenger's algorithm.
//
// The algorithm works as follows:
//
// Let `n` be a number of point-scalar pairs.
// Let `w` be a window of bits (6..8, chosen based on `n`, see cost factor).
//
// 1. Prepare `2^(w-1) - 1` buckets with indices `[1..2^(w-1))` initialized with identity points.
// Bucket 0 is not needed as it would contain points multiplied by 0.
// 2. Convert scalars to a radix-`2^w` representation with signed digits in `[-2^w/2, 2^w/2]`.
// Note: only the last digit may equal `2^w/2`.
// 3. Starting with the last window, for each point `i=[0..n)` add it to a a bucket indexed by
// the point's scalar's value in the window.
// 4. Once all points in a window are sorted into buckets, add buckets by multiplying each
// by their index. Efficient way of doing it is to start with the last bucket and compute two sums:
// intermediate sum from the last to the first, and the full sum made of all intermediate sums.
// 5. Shift the resulting sum of buckets by `w` bits by using `w` doublings.
// 6. Add to the return value.
// 7. Repeat the loop.
//
// Approximate cost w/o wNAF optimizations (A = addition, D = doubling):
//
// ```ascii
// cost = (n*A + 2*(2^w/2)*A + w*D + A)*256/w
//
// | | | | |
// | | | | looping over 256/w windows
// | | | adding to the result
// sorting points | shifting the sum by w bits (to the next window, starting from last window)
// one by one |
// into buckets adding/subtracting all buckets
// multiplied by their indexes
// using a sum of intermediate sums
//
// ```
//
// For large `n`, dominant factor is (n*256/w) additions.
// However, if `w` is too big and `n` is not too big, then `(2^w/2)*A` could dominate.
// Therefore, the optimal choice of `w` grows slowly as `n` grows.
//
// # For constant time we use a fixed window of 6
//
// This algorithm is adapted from section 4 of <https://eprint.iacr.org/2012/549.pdf>.
// and https://cacr.uwaterloo.ca/techreports/2010/cacr2010-26.pdf
func sumOfProductsPippenger(points []Point, scalars []*big.Int) Point {
if len(points) != len(scalars) {
return nil
}
const w = 6
bucketSize := (1 << w) - 1
windows := make([]Point, 255/w+1)
for i := range windows {
windows[i] = points[0].Identity()
}
bucket := make([]Point, bucketSize)
for j := 0; j < len(windows); j++ {
for i := 0; i < bucketSize; i++ {
bucket[i] = points[0].Identity()
}
for i := 0; i < len(scalars); i++ {
index := bucketSize & int(new(big.Int).Rsh(scalars[i], uint(w*j)).Int64())
if index != 0 {
bucket[index-1] = bucket[index-1].Add(points[i])
}
}
acc, sum := windows[j].Identity(), windows[j].Identity()
for i := bucketSize - 1; i >= 0; i-- {
sum = sum.Add(bucket[i])
acc = acc.Add(sum)
}
windows[j] = acc
}
acc := windows[0].Identity()
for i := len(windows) - 1; i >= 0; i-- {
for j := 0; j < w; j++ {
acc = acc.Double()
}
acc = acc.Add(windows[i])
}
return acc
}

256
crypto/core/curves/ec_point.go Executable file
View File

@ -0,0 +1,256 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/elliptic"
"encoding/json"
"fmt"
"math/big"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/onsonr/hway/crypto/core"
"github.com/onsonr/hway/crypto/internal"
)
var curveNameToId = map[string]byte{
"secp256k1": 0,
"P-224": 1,
"P-256": 2,
"P-384": 3,
"P-521": 4,
}
var curveIdToName = map[byte]func() elliptic.Curve{
0: func() elliptic.Curve { return btcec.S256() },
1: elliptic.P224,
2: elliptic.P256,
3: elliptic.P384,
4: elliptic.P521,
}
var curveMapper = map[string]func() elliptic.Curve{
"secp256k1": func() elliptic.Curve { return btcec.S256() },
"P-224": elliptic.P224,
"P-256": elliptic.P256,
"P-384": elliptic.P384,
"P-521": elliptic.P521,
}
// EcPoint represents an elliptic curve Point
type EcPoint struct {
Curve elliptic.Curve
X, Y *big.Int
}
// EcPointJson encapsulates the data that is serialized to JSON
// used internally and not for external use. Public so other pieces
// can use for serialization
type EcPointJson struct {
CurveName string
X, Y *big.Int
}
// MarshalJSON serializes
func (a EcPoint) MarshalJSON() ([]byte, error) {
return json.Marshal(EcPointJson{
CurveName: a.Curve.Params().Name,
X: a.X,
Y: a.Y,
})
}
func (a *EcPoint) UnmarshalJSON(bytes []byte) error {
data := new(EcPointJson)
err := json.Unmarshal(bytes, data)
if err != nil {
return err
}
if mapper, ok := curveMapper[data.CurveName]; ok {
a.Curve = mapper()
a.X = data.X
a.Y = data.Y
return nil
}
return fmt.Errorf("unknown curve deserialized")
}
func (a *EcPoint) MarshalBinary() ([]byte, error) {
result := [65]byte{}
if code, ok := curveNameToId[a.Curve.Params().Name]; ok {
result[0] = code
a.X.FillBytes(result[1:33])
a.Y.FillBytes(result[33:65])
return result[:], nil
}
return nil, fmt.Errorf("unknown curve serialized")
}
func (a *EcPoint) UnmarshalBinary(data []byte) error {
if mapper, ok := curveIdToName[data[0]]; ok {
a.Curve = mapper()
a.X = new(big.Int).SetBytes(data[1:33])
a.Y = new(big.Int).SetBytes(data[33:65])
return nil
}
return fmt.Errorf("unknown curve deserialized")
}
func (a EcPoint) IsValid() bool {
return a.IsOnCurve() || a.IsIdentity()
}
func (a EcPoint) IsOnCurve() bool {
return a.Curve.IsOnCurve(a.X, a.Y)
}
// IsIdentity returns true if this Point is the Point at infinity
func (a EcPoint) IsIdentity() bool {
x := core.ConstantTimeEqByte(a.X, core.Zero)
y := core.ConstantTimeEqByte(a.Y, core.Zero)
return (x & y) == 1
}
// Equals return true if a + b have the same x,y coordinates
func (a EcPoint) Equals(b *EcPoint) bool {
if !sameCurve(&a, b) {
return false
}
// Next, compare coords to determine equality
x := core.ConstantTimeEqByte(a.X, b.X)
y := core.ConstantTimeEqByte(a.Y, b.Y)
return (x & y) == 1
}
// IsBasePoint returns true if this Point is curve's base Point
func (a EcPoint) IsBasePoint() bool {
p := a.Curve.Params()
x := core.ConstantTimeEqByte(a.X, p.Gx)
y := core.ConstantTimeEqByte(a.Y, p.Gy)
return (x & y) == 1
}
// Normalizes the Scalar to a positive element smaller than the base Point order.
func reduceModN(curve elliptic.Curve, k *big.Int) *big.Int {
return new(big.Int).Mod(k, curve.Params().N)
}
// Add performs elliptic curve addition on two points
func (a *EcPoint) Add(b *EcPoint) (*EcPoint, error) {
// Validate parameters
if a == nil || b == nil {
return nil, internal.ErrNilArguments
}
// Only add points from the same curve
if !sameCurve(a, b) {
return nil, internal.ErrPointsDistinctCurves
}
p := &EcPoint{Curve: a.Curve}
p.X, p.Y = a.Curve.Add(a.X, a.Y, b.X, b.Y)
if !p.IsValid() {
return nil, internal.ErrNotOnCurve
}
return p, nil
}
// Neg returns the negation of a Weierstrass Point.
func (a *EcPoint) Neg() (*EcPoint, error) {
// Validate parameters
if a == nil {
return nil, internal.ErrNilArguments
}
// Only add points from the same curve
p := &EcPoint{Curve: a.Curve, X: a.X, Y: new(big.Int).Sub(a.Curve.Params().P, a.Y)}
if !p.IsValid() {
return nil, internal.ErrNotOnCurve
}
return p, nil
}
// ScalarMult multiplies this Point by a Scalar
func (a *EcPoint) ScalarMult(k *big.Int) (*EcPoint, error) {
if a == nil || k == nil {
return nil, fmt.Errorf("cannot multiply nil Point or element")
}
n := reduceModN(a.Curve, k)
p := new(EcPoint)
p.Curve = a.Curve
p.X, p.Y = a.Curve.ScalarMult(a.X, a.Y, n.Bytes())
if !p.IsValid() {
return nil, fmt.Errorf("result not on the curve")
}
return p, nil
}
// NewScalarBaseMult creates a Point from the base Point multiplied by a field element
func NewScalarBaseMult(curve elliptic.Curve, k *big.Int) (*EcPoint, error) {
if curve == nil || k == nil {
return nil, fmt.Errorf("nil parameters are not supported")
}
n := reduceModN(curve, k)
p := new(EcPoint)
p.Curve = curve
p.X, p.Y = curve.ScalarBaseMult(n.Bytes())
if !p.IsValid() {
return nil, fmt.Errorf("result not on the curve")
}
return p, nil
}
// Bytes returns the bytes represented by this Point with x || y
func (a EcPoint) Bytes() []byte {
fieldSize := internal.CalcFieldSize(a.Curve)
out := make([]byte, fieldSize*2)
a.X.FillBytes(out[0:fieldSize])
a.Y.FillBytes(out[fieldSize : fieldSize*2])
return out
}
// PointFromBytesUncompressed outputs uncompressed X || Y similar to
// https://www.secg.org/sec1-v1.99.dif.pdf section 2.2 and 2.3
func PointFromBytesUncompressed(curve elliptic.Curve, b []byte) (*EcPoint, error) {
fieldSize := internal.CalcFieldSize(curve)
if len(b) != fieldSize*2 {
return nil, fmt.Errorf("invalid number of bytes")
}
p := &EcPoint{
Curve: curve,
X: new(big.Int).SetBytes(b[:fieldSize]),
Y: new(big.Int).SetBytes(b[fieldSize:]),
}
if !p.IsValid() {
return nil, fmt.Errorf("invalid Point")
}
return p, nil
}
// sameCurve determines if points a,b appear to be from the same curve
func sameCurve(a, b *EcPoint) bool {
// Handle identical pointers and double-nil
if a == b {
return true
}
// Handle one nil pointer
if a == nil || b == nil {
return false
}
aParams := a.Curve.Params()
bParams := b.Curve.Params()
// Use curve order and name
return aParams.P == bParams.P &&
aParams.N == bParams.N &&
aParams.B == bParams.B &&
aParams.BitSize == bParams.BitSize &&
aParams.Gx == bParams.Gx &&
aParams.Gy == bParams.Gy &&
aParams.Name == bParams.Name
}

View File

@ -0,0 +1,369 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"bytes"
"crypto/elliptic"
"math/big"
"testing"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/stretchr/testify/require"
"github.com/onsonr/hway/crypto/core"
tt "github.com/onsonr/hway/crypto/internal"
)
func TestIsIdentity(t *testing.T) {
// Should be Point at infinity
identity := &EcPoint{btcec.S256(), core.Zero, core.Zero}
require.True(t, identity.IsIdentity())
}
func TestNewScalarBaseMultZero(t *testing.T) {
// Should be Point at infinity
curve := btcec.S256()
num := big.NewInt(0)
p, err := NewScalarBaseMult(curve, num)
if err != nil {
t.Errorf("NewScalarBaseMult failed: %v", err)
}
if p == nil {
t.Errorf("NewScalarBaseMult failed when it should've succeeded.")
}
}
func TestNewScalarBaseMultOne(t *testing.T) {
// Should be base Point
curve := btcec.S256()
num := big.NewInt(1)
p, err := NewScalarBaseMult(curve, num)
if err != nil {
t.Errorf("NewScalarBaseMult failed: %v", err)
}
if p == nil {
t.Errorf("NewScalarBaseMult failed when it should've succeeded.")
t.FailNow()
}
if !bytes.Equal(p.Bytes(), append(curve.Gx.Bytes(), curve.Gy.Bytes()...)) {
t.Errorf("NewScalarBaseMult should've returned the base Point.")
}
}
func TestNewScalarBaseMultNeg(t *testing.T) {
curve := btcec.S256()
num := big.NewInt(-1)
p, err := NewScalarBaseMult(curve, num)
if err != nil {
t.Errorf("NewScalarBaseMult failed: %v", err)
}
if p == nil {
t.Errorf("NewScalarBaseMult failed when it should've succeeded.")
t.FailNow()
}
num.Mod(num, curve.N)
e, err := NewScalarBaseMult(curve, num)
if err != nil {
t.Errorf("NewScalarBaseMult failed: %v", err)
}
if e == nil {
t.Errorf("NewScalarBaseMult failed when it should've succeeded.")
t.FailNow()
}
if !bytes.Equal(p.Bytes(), e.Bytes()) {
t.Errorf("NewScalarBaseMult should've returned the %v, found: %v", e, p)
}
}
func TestScalarMultZero(t *testing.T) {
// Should be Point at infinity
curve := btcec.S256()
p := &EcPoint{
Curve: curve,
X: curve.Gx,
Y: curve.Gy,
}
num := big.NewInt(0)
q, err := p.ScalarMult(num)
if err != nil {
t.Errorf("ScalarMult failed: %v", err)
}
if q == nil {
t.Errorf("ScalarMult failed when it should've succeeded.")
t.FailNow()
}
if !q.IsIdentity() {
t.Errorf("ScalarMult should've returned the identity Point.")
}
}
func TestScalarMultOne(t *testing.T) {
// Should be base Point
curve := btcec.S256()
p := &EcPoint{
Curve: curve,
X: curve.Gx,
Y: curve.Gy,
}
num := big.NewInt(1)
q, err := p.ScalarMult(num)
if err != nil {
t.Errorf("ScalarMult failed: %v", err)
}
if q == nil {
t.Errorf("ScalarMult failed when it should've succeeded.")
t.FailNow()
}
if !bytes.Equal(q.Bytes(), append(curve.Gx.Bytes(), curve.Gy.Bytes()...)) {
t.Errorf("ScalarMult should've returned the base Point.")
}
}
func TestScalarMultNeg(t *testing.T) {
curve := btcec.S256()
p := &EcPoint{
Curve: curve,
X: curve.Gx,
Y: curve.Gy,
}
num := big.NewInt(-1)
q, err := p.ScalarMult(num)
if err != nil {
t.Errorf("ScalarMult failed: %v", err)
}
if q == nil {
t.Errorf("ScalarMult failed when it should've succeeded.")
}
num.Mod(num, curve.N)
e, err := p.ScalarMult(num)
if err != nil {
t.Errorf("ScalarMult failed: %v", err)
}
if e == nil {
t.Errorf("ScalarMult failed when it should've succeeded.")
t.FailNow()
}
if !bytes.Equal(q.Bytes(), e.Bytes()) {
t.Errorf("ScalarMult should've returned the %v, found: %v", e, p)
}
}
func TestEcPointAddSimple(t *testing.T) {
curve := btcec.S256()
num := big.NewInt(1)
p1, _ := NewScalarBaseMult(curve, num)
p2, _ := NewScalarBaseMult(curve, num)
p3, err := p1.Add(p2)
if err != nil {
t.Errorf("EcPoint.Add failed: %v", err)
}
num = big.NewInt(2)
ep, _ := NewScalarBaseMult(curve, num)
if !bytes.Equal(ep.Bytes(), p3.Bytes()) {
t.Errorf("EcPoint.Add failed: should equal %v, found: %v", ep, p3)
}
}
func TestEcPointAddCommunicative(t *testing.T) {
curve := btcec.S256()
a, _ := core.Rand(curve.Params().N)
b, _ := core.Rand(curve.Params().N)
p1, _ := NewScalarBaseMult(curve, a)
p2, _ := NewScalarBaseMult(curve, b)
p3, err := p1.Add(p2)
if err != nil {
t.Errorf("EcPoint.Add failed: %v", err)
}
p4, err := p2.Add(p1)
if err != nil {
t.Errorf("EcPoint.Add failed: %v", err)
}
if !bytes.Equal(p3.Bytes(), p4.Bytes()) {
t.Errorf("EcPoint.Add Communicative not valid")
}
}
func TestEcPointAddNeg(t *testing.T) {
curve := btcec.S256()
num := big.NewInt(-1)
p1, _ := NewScalarBaseMult(curve, num)
num.Abs(num)
p2, _ := NewScalarBaseMult(curve, num)
p3, err := p1.Add(p2)
if err != nil {
t.Errorf("EcPoint.Add failed: %v", err)
}
zero := make([]byte, 64)
if !bytes.Equal(zero, p3.Bytes()) {
t.Errorf("Expected value to be zero, found: %v", p3)
}
}
func TestEcPointBytes(t *testing.T) {
curve := btcec.S256()
point, err := NewScalarBaseMult(curve, big.NewInt(2))
require.NoError(t, err)
data := point.Bytes()
point2, err := PointFromBytesUncompressed(curve, data)
require.NoError(t, err)
if point.X.Cmp(point2.X) != 0 && point.Y.Cmp(point2.Y) != 0 {
t.Errorf("Points are not equal. Expected %v, found %v", point, point2)
}
curve2 := elliptic.P224()
p2, err := NewScalarBaseMult(curve2, big.NewInt(2))
require.NoError(t, err)
dta := p2.Bytes()
point3, err := PointFromBytesUncompressed(curve2, dta)
require.NoError(t, err)
if p2.X.Cmp(point3.X) != 0 && p2.Y.Cmp(point3.Y) != 0 {
t.Errorf("Points are not equal. Expected %v, found %v", p2, point3)
}
curve3 := elliptic.P521()
p3, err := NewScalarBaseMult(curve3, big.NewInt(2))
require.NoError(t, err)
data = p3.Bytes()
point4, err := PointFromBytesUncompressed(curve3, data)
require.NoError(t, err)
if p3.X.Cmp(point4.X) != 0 && p3.Y.Cmp(point4.Y) != 0 {
t.Errorf("Points are not equal. Expected %v, found %v", p3, point4)
}
}
func TestEcPointBytesDifferentCurves(t *testing.T) {
k256 := btcec.S256()
p224 := elliptic.P224()
p256 := elliptic.P256()
kp, err := NewScalarBaseMult(k256, big.NewInt(1))
require.NoError(t, err)
data := kp.Bytes()
_, err = PointFromBytesUncompressed(p224, data)
require.Error(t, err)
_, err = PointFromBytesUncompressed(p256, data)
require.Error(t, err)
}
func TestEcPointBytesInvalidNumberBytes(t *testing.T) {
curve := btcec.S256()
for i := 1; i < 64; i++ {
data := make([]byte, i)
_, err := PointFromBytesUncompressed(curve, data)
require.Error(t, err)
}
for i := 65; i < 128; i++ {
data := make([]byte, i)
_, err := PointFromBytesUncompressed(curve, data)
require.Error(t, err)
}
}
func TestEcPointMultRandom(t *testing.T) {
curve := btcec.S256()
r, err := core.Rand(curve.N)
require.NoError(t, err)
pt, err := NewScalarBaseMult(curve, r)
require.NoError(t, err)
require.NotNil(t, pt)
data := pt.Bytes()
pt2, err := PointFromBytesUncompressed(curve, data)
require.NoError(t, err)
if pt.X.Cmp(pt2.X) != 0 || pt.Y.Cmp(pt2.Y) != 0 {
t.Errorf("Points are not equal. Expected: %v, found: %v", pt, pt2)
}
}
func TestIsBasePoint(t *testing.T) {
k256 := btcec.S256()
p224 := elliptic.P224()
p256 := elliptic.P256()
notG_p224, err := NewScalarBaseMult(p224, tt.B10("9876453120"))
require.NoError(t, err)
tests := []struct {
name string
curve elliptic.Curve
x, y *big.Int
expected bool
}{
{"k256-positive", k256, k256.Gx, k256.Gy, true},
{"p224-positive", p224, p224.Params().Gx, p224.Params().Gy, true},
{"p256-positive", p256, p256.Params().Gx, p256.Params().Gy, true},
{"p224-negative", p224, notG_p224.X, notG_p224.Y, false},
{"p256-negative-wrong-curve", p256, notG_p224.X, notG_p224.Y, false},
{"k256-negative-doubleGx", k256, k256.Gx, k256.Gx, false},
{"k256-negative-doubleGy", k256, k256.Gy, k256.Gy, false},
{"k256-negative-xy-swap", k256, k256.Gy, k256.Gx, false},
{"k256-negative-oh-oh", k256, core.Zero, core.Zero, false},
}
// Run all the tests!
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual := EcPoint{test.curve, test.x, test.y}.IsBasePoint()
require.Equal(t, test.expected, actual)
})
}
}
func TestEquals(t *testing.T) {
k256 := btcec.S256()
p224 := elliptic.P224()
p256 := elliptic.P256()
P_p224, _ := NewScalarBaseMult(p224, tt.B10("9876453120"))
P1_p224, _ := NewScalarBaseMult(p224, tt.B10("9876453120"))
P_k256 := &EcPoint{k256, P_p224.X, P_p224.Y}
id_p224 := &EcPoint{p224, core.Zero, core.Zero}
id_k256 := &EcPoint{k256, core.Zero, core.Zero}
id_p256 := &EcPoint{p256, core.Zero, core.Zero}
tests := []struct {
name string
x, y *EcPoint
expected bool
}{
{"p224 same pointer", P_p224, P_p224, true},
{"p224 same Point", P_p224, P1_p224, true},
{"p224 identity", id_p224, id_p224, true},
{"p256 identity", id_p256, id_p256, true},
{"k256 identity", id_k256, id_k256, true},
{"negative-same x different y", P_p224, &EcPoint{p224, P_p224.X, core.One}, false},
{"negative-same y different x", P_p224, &EcPoint{p224, core.Two, P_k256.Y}, false},
{"negative-wrong curve", P_p224, P_k256, false},
{"negative-wrong curve reversed", P_k256, P_p224, false},
{"Point is not the identity", P_p224, id_p224, false},
{"negative nil", P1_p224, nil, false},
{"identities on wrong curve", id_p256, id_k256, false},
}
// Run all the tests!
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual := test.x.Equals(test.y)
require.Equal(t, test.expected, actual)
})
}
}

351
crypto/core/curves/ec_scalar.go Executable file
View File

@ -0,0 +1,351 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/elliptic"
crand "crypto/rand"
"crypto/sha512"
"fmt"
"io"
"math/big"
"filippo.io/edwards25519"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/bwesterb/go-ristretto"
"github.com/onsonr/hway/crypto/core"
"github.com/onsonr/hway/crypto/core/curves/native/bls12381"
"github.com/onsonr/hway/crypto/internal"
)
type EcScalar interface {
Add(x, y *big.Int) *big.Int
Sub(x, y *big.Int) *big.Int
Neg(x *big.Int) *big.Int
Mul(x, y *big.Int) *big.Int
Hash(input []byte) *big.Int
Div(x, y *big.Int) *big.Int
Random() (*big.Int, error)
IsValid(x *big.Int) bool
Bytes(x *big.Int) []byte // fixed-length byte array
}
type K256Scalar struct{}
// Static interface assertion
var _ EcScalar = (*K256Scalar)(nil)
// warning: the Euclidean alg which Mod uses is not constant-time.
func NewK256Scalar() *K256Scalar {
return &K256Scalar{}
}
func (k K256Scalar) Add(x, y *big.Int) *big.Int {
v := new(big.Int).Add(x, y)
v.Mod(v, btcec.S256().N)
return v
}
func (k K256Scalar) Sub(x, y *big.Int) *big.Int {
v := new(big.Int).Sub(x, y)
v.Mod(v, btcec.S256().N)
return v
}
func (k K256Scalar) Neg(x *big.Int) *big.Int {
v := new(big.Int).Sub(btcec.S256().N, x)
v.Mod(v, btcec.S256().N)
return v
}
func (k K256Scalar) Mul(x, y *big.Int) *big.Int {
v := new(big.Int).Mul(x, y)
v.Mod(v, btcec.S256().N)
return v
}
func (k K256Scalar) Div(x, y *big.Int) *big.Int {
t := new(big.Int).ModInverse(y, btcec.S256().N)
return k.Mul(x, t)
}
func (k K256Scalar) Hash(input []byte) *big.Int {
return new(ScalarK256).Hash(input).BigInt()
}
func (k K256Scalar) Random() (*big.Int, error) {
b := make([]byte, 48)
n, err := crand.Read(b)
if err != nil {
return nil, err
}
if n != 48 {
return nil, fmt.Errorf("insufficient bytes read")
}
v := new(big.Int).SetBytes(b)
v.Mod(v, btcec.S256().N)
return v, nil
}
func (k K256Scalar) IsValid(x *big.Int) bool {
return core.In(x, btcec.S256().N) == nil
}
func (k K256Scalar) Bytes(x *big.Int) []byte {
bytes := make([]byte, 32)
x.FillBytes(bytes) // big-endian; will left-pad.
return bytes
}
type P256Scalar struct{}
// Static interface assertion
var _ EcScalar = (*P256Scalar)(nil)
func NewP256Scalar() *P256Scalar {
return &P256Scalar{}
}
func (k P256Scalar) Add(x, y *big.Int) *big.Int {
v := new(big.Int).Add(x, y)
v.Mod(v, elliptic.P256().Params().N)
return v
}
func (k P256Scalar) Sub(x, y *big.Int) *big.Int {
v := new(big.Int).Sub(x, y)
v.Mod(v, elliptic.P256().Params().N)
return v
}
func (k P256Scalar) Neg(x *big.Int) *big.Int {
v := new(big.Int).Sub(elliptic.P256().Params().N, x)
v.Mod(v, elliptic.P256().Params().N)
return v
}
func (k P256Scalar) Mul(x, y *big.Int) *big.Int {
v := new(big.Int).Mul(x, y)
v.Mod(v, elliptic.P256().Params().N)
return v
}
func (k P256Scalar) Div(x, y *big.Int) *big.Int {
t := new(big.Int).ModInverse(y, elliptic.P256().Params().N)
return k.Mul(x, t)
}
func (k P256Scalar) Hash(input []byte) *big.Int {
return new(ScalarP256).Hash(input).BigInt()
}
func (k P256Scalar) Random() (*big.Int, error) {
b := make([]byte, 48)
n, err := crand.Read(b)
if err != nil {
return nil, err
}
if n != 48 {
return nil, fmt.Errorf("insufficient bytes read")
}
v := new(big.Int).SetBytes(b)
v.Mod(v, elliptic.P256().Params().N)
return v, nil
}
func (k P256Scalar) IsValid(x *big.Int) bool {
return core.In(x, elliptic.P256().Params().N) == nil
}
func (k P256Scalar) Bytes(x *big.Int) []byte {
bytes := make([]byte, 32)
x.FillBytes(bytes) // big-endian; will left-pad.
return bytes
}
type Bls12381Scalar struct{}
// Static interface assertion
var _ EcScalar = (*Bls12381Scalar)(nil)
func NewBls12381Scalar() *Bls12381Scalar {
return &Bls12381Scalar{}
}
func (k Bls12381Scalar) Add(x, y *big.Int) *big.Int {
a := bls12381.Bls12381FqNew().SetBigInt(x)
b := bls12381.Bls12381FqNew().SetBigInt(y)
return a.Add(a, b).BigInt()
}
func (k Bls12381Scalar) Sub(x, y *big.Int) *big.Int {
a := bls12381.Bls12381FqNew().SetBigInt(x)
b := bls12381.Bls12381FqNew().SetBigInt(y)
return a.Sub(a, b).BigInt()
}
func (k Bls12381Scalar) Neg(x *big.Int) *big.Int {
a := bls12381.Bls12381FqNew().SetBigInt(x)
return a.Neg(a).BigInt()
}
func (k Bls12381Scalar) Mul(x, y *big.Int) *big.Int {
a := bls12381.Bls12381FqNew().SetBigInt(x)
b := bls12381.Bls12381FqNew().SetBigInt(y)
return a.Mul(a, b).BigInt()
}
func (k Bls12381Scalar) Div(x, y *big.Int) *big.Int {
c := bls12381.Bls12381FqNew()
a := bls12381.Bls12381FqNew().SetBigInt(x)
b := bls12381.Bls12381FqNew().SetBigInt(y)
_, wasInverted := c.Invert(b)
c.Mul(a, c)
tt := map[bool]int{false: 0, true: 1}
return a.CMove(a, c, tt[wasInverted]).BigInt()
}
func (k Bls12381Scalar) Hash(input []byte) *big.Int {
return new(ScalarBls12381).Hash(input).BigInt()
}
func (k Bls12381Scalar) Random() (*big.Int, error) {
a := BLS12381G1().NewScalar().Random(crand.Reader)
if a == nil {
return nil, fmt.Errorf("invalid random value")
}
return a.BigInt(), nil
}
func (k Bls12381Scalar) Bytes(x *big.Int) []byte {
bytes := make([]byte, 32)
x.FillBytes(bytes) // big-endian; will left-pad.
return bytes
}
func (k Bls12381Scalar) IsValid(x *big.Int) bool {
a := bls12381.Bls12381FqNew().SetBigInt(x)
return a.BigInt().Cmp(x) == 0
}
// taken from https://datatracker.ietf.org/doc/html/rfc8032
var ed25519N, _ = new(big.Int).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16)
type Ed25519Scalar struct{}
// Static interface assertion
var _ EcScalar = (*Ed25519Scalar)(nil)
func NewEd25519Scalar() *Ed25519Scalar {
return &Ed25519Scalar{}
}
func (k Ed25519Scalar) Add(x, y *big.Int) *big.Int {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
b, err := internal.BigInt2Ed25519Scalar(y)
if err != nil {
panic(err)
}
a.Add(a, b)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Sub(x, y *big.Int) *big.Int {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
b, err := internal.BigInt2Ed25519Scalar(y)
if err != nil {
panic(err)
}
a.Subtract(a, b)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Neg(x *big.Int) *big.Int {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
a.Negate(a)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Mul(x, y *big.Int) *big.Int {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
b, err := internal.BigInt2Ed25519Scalar(y)
if err != nil {
panic(err)
}
a.Multiply(a, b)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Div(x, y *big.Int) *big.Int {
b, err := internal.BigInt2Ed25519Scalar(y)
if err != nil {
panic(err)
}
b.Invert(b)
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
a.Multiply(a, b)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Hash(input []byte) *big.Int {
v := new(ristretto.Scalar).Derive(input)
var data [32]byte
v.BytesInto(&data)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(data[:]))
}
func (k Ed25519Scalar) Bytes(x *big.Int) []byte {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
return internal.ReverseScalarBytes(a.Bytes())
}
func (k Ed25519Scalar) Random() (*big.Int, error) {
return k.RandomWithReader(crand.Reader)
}
func (k Ed25519Scalar) RandomWithReader(r io.Reader) (*big.Int, error) {
b := make([]byte, 64)
n, err := r.Read(b)
if err != nil {
return nil, err
}
if n != 64 {
return nil, fmt.Errorf("insufficient bytes read")
}
digest := sha512.Sum512(b)
var hBytes [32]byte
copy(hBytes[:], digest[:])
s, err := edwards25519.NewScalar().SetBytesWithClamping(hBytes[:])
if err != nil {
return nil, err
}
return new(big.Int).SetBytes(internal.ReverseScalarBytes(s.Bytes())), nil
}
func (k Ed25519Scalar) IsValid(x *big.Int) bool {
return x.Cmp(ed25519N) == -1
}

38
crypto/core/curves/ecdsa.go Executable file
View File

@ -0,0 +1,38 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/ecdsa"
"math/big"
)
// EcdsaVerify runs a curve- or algorithm-specific ECDSA verification function on input
// an ECDSA public (verification) key, a message digest, and an ECDSA signature.
// It must return true if all the parameters are sane and the ECDSA signature is valid,
// and false otherwise
type EcdsaVerify func(pubKey *EcPoint, hash []byte, signature *EcdsaSignature) bool
// EcdsaSignature represents a (composite) digital signature
type EcdsaSignature struct {
V int
R, S *big.Int
}
// Static type assertion
var _ EcdsaVerify = VerifyEcdsa
// Verifies ECDSA signature using core types.
func VerifyEcdsa(pk *EcPoint, hash []byte, sig *EcdsaSignature) bool {
return ecdsa.Verify(
&ecdsa.PublicKey{
Curve: pk.Curve,
X: pk.X,
Y: pk.Y,
},
hash, sig.R, sig.S)
}

Some files were not shown because too many files have changed in this diff Show More