refactor: simplify goreleaser configuration and remove unused build targets

This commit is contained in:
Prad Nukala 2025-01-04 19:07:31 -05:00
parent 9c6c28854c
commit 022d9d3097
30 changed files with 896 additions and 2209 deletions

View File

@ -11,170 +11,16 @@ builds:
goarch:
- wasm
- id: sonr
main: ./cmd/sonrd
binary: sonrd
mod_timestamp: "{{ .CommitTimestamp }}"
goos:
- linux
- darwin
goarch:
- amd64
- arm64
goamd64:
- v1
flags:
- -mod=readonly
- -trimpath
ldflags:
- -X github.com/cosmos/cosmos-sdk/version.Name=sonr
- -X github.com/cosmos/cosmos-sdk/version.AppName=sonrd
- -X github.com/cosmos/cosmos-sdk/version.Version={{.Version}}
- -X github.com/cosmos/cosmos-sdk/version.Commit={{.Commit}}
- -X "github.com/cosmos/cosmos-sdk/version.BuildTags=netgo,ledger"
tags:
- netgo
- ledger
- id: hway
main: ./cmd/hway
binary: hway
goos:
- linux
- darwin
goarch:
- amd64
- arm64
mod_timestamp: "{{ .CommitTimestamp }}"
flags:
- -mod=readonly
- -trimpath
goamd64:
- v1
tags:
- netgo
ldflags:
- -X main.version={{.Version}}
- -X main.commit={{.Commit}}
- -X main.date={{.Date}}
archives:
- id: sonr
builds: [sonr]
name_template: >-
sonr_{{ .Os }}_{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "386" }}i386
{{- else }}{{ .Arch }}{{ end }}
format: tar.gz
files:
- src: README*
wrap_in_directory: true
- id: hway
builds: [hway]
name_template: >-
hway_{{ .Os }}_{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "386" }}i386
{{- else }}{{ .Arch }}{{ end }}
format: tar.gz
files:
- src: README*
wrap_in_directory: true
nfpms:
- id: hway
package_name: hway
file_name_template: "hway_{{ .Os }}_{{ .Arch }}{{ .ConventionalExtension }}"
builds: [hway]
vendor: Sonr
homepage: "https://onsonr.dev"
maintainer: "Sonr <support@onsonr.dev>"
description: "Sonr Highway is a decentralized, permissionless, and censorship-resistant identity network proxy."
license: "Apache 2.0"
formats:
- rpm
- deb
- apk
dependencies:
- ipfs
contents:
- src: README*
dst: /usr/share/doc/hway
bindir: /usr/bin
section: net
priority: optional
# Add these lines to match build config
- id: sonr
package_name: sonrd
file_name_template: "sonrd_{{ .Os }}_{{ .Arch }}{{ .ConventionalExtension }}"
builds: [sonr]
vendor: Sonr
homepage: "https://onsonr.dev"
maintainer: "Sonr <support@onsonr.dev>"
description: "Sonr is a decentralized, permissionless, and censorship-resistant identity network."
license: "Apache 2.0"
formats:
- rpm
- deb
- apk
dependencies:
- ipfs
contents:
- src: README*
dst: /usr/share/doc/sonrd
bindir: /usr/bin
section: net
priority: optional
# Add these lines to match build config
brews:
- name: hway
ids: [hway]
commit_author:
name: goreleaserbot
email: bot@goreleaser.com
directory: Formula
caveats: "Run a local hway node and access it with the hway proxy"
homepage: "https://onsonr.dev"
description: "Sonr is a decentralized, permissionless, and censorship-resistant identity network."
dependencies:
- name: ipfs
repository:
owner: onsonr
name: homebrew-tap
branch: master
token: "{{ .Env.GITHUB_PERSONAL_AUTH_TOKEN }}"
- name: sonr
ids: [sonr]
commit_author:
name: goreleaserbot
email: bot@goreleaser.com
directory: Formula
caveats: "Run a local sonr node and access it with the hway proxy"
homepage: "https://onsonr.dev"
description: "Sonr is a decentralized, permissionless, and censorship-resistant identity network."
dependencies:
- name: ipfs
repository:
owner: onsonr
name: homebrew-tap
branch: master
token: "{{ .Env.GITHUB_PERSONAL_AUTH_TOKEN }}"
release:
github:
owner: onsonr
name: sonr
name_template: 'Release {{ .Env.RELEASE_DATE }}'
name: motr
name_template: '{{ .Tag }} | {{ .Env.RELEASE_DATE }}'
draft: false
replace_existing_draft: true
replace_existing_artifacts: true
extra_files:
- glob: ./README*
- glob: ./scripts/install.sh
- glob: ./scripts/test_node.sh
- glob: ./scripts/test_ics_node.sh
announce:
telegram:

13
.taskfiles/Go.yml Normal file
View File

@ -0,0 +1,13 @@
# https://taskfile.dev
version: '3'
vars:
GREETING: Hello, World!
tasks:
build:
desc: Build the project
cmds:
- echo "{{.GREETING}}"
silent: true

View File

@ -5,8 +5,10 @@ version: '3'
vars:
GREETING: Hello, World!
includes:
es: ./ES.yml
go: ./Go.yml
tasks:
default:
cmds:
- echo "{{.GREETING}}"
silent: true
cmd: "task -l"

View File

@ -6,7 +6,9 @@ vars:
GREETING: Hello, World!
tasks:
default:
install:
desc: Install the pnpm dependencies
dir: web/es-client
cmds:
- echo "{{.GREETING}}"
- pnpm install
silent: true

268
Makefile
View File

@ -1,114 +1,20 @@
#!/usr/bin/make -f
PACKAGES_SIMTEST=$(shell go list ./... | grep '/simulation')
VERSION := $(shell echo $(shell git describe --tags) | sed 's/^v//')
COMMIT := $(shell git log -1 --format='%H')
LEDGER_ENABLED ?= true
SDK_PACK := $(shell go list -m github.com/cosmos/cosmos-sdk | sed 's/ /\@/g')
BINDIR ?= $(GOPATH)/bin
SIMAPP = ./app
PC_PORT_NUM=42069
PC_LOG_FILE=./sonr.log
PC_SOCKET_PATH=/tmp/sonr-net.sock
# for dockerized protobuf tools
DOCKER := $(shell which docker)
HTTPS_GIT := github.com/onsonr/sonr.git
PROCESS_COMPOSE := $(shell which process-compose)
export GO111MODULE = on
# process build tags
build_tags = netgo
ifeq ($(LEDGER_ENABLED),true)
ifeq ($(OS),Windows_NT)
GCCEXE = $(shell where gcc.exe 2> NUL)
ifeq ($(GCCEXE),)
$(error gcc.exe not installed for ledger support, please install or set LEDGER_ENABLED=false)
else
build_tags += ledger
endif
else
UNAME_S = $(shell uname -s)
ifeq ($(UNAME_S),OpenBSD)
$(warning OpenBSD detected, disabling ledger support (https://github.com/cosmos/cosmos-sdk/issues/1988))
else
GCC = $(shell command -v gcc 2> /dev/null)
ifeq ($(GCC),)
$(error gcc not installed for ledger support, please install or set LEDGER_ENABLED=false)
else
build_tags += ledger
endif
endif
endif
endif
ifeq ($(WITH_CLEVELDB),yes)
build_tags += gcc
endif
build_tags += $(BUILD_TAGS)
build_tags := $(strip $(build_tags))
whitespace :=
empty = $(whitespace) $(whitespace)
comma := ,
build_tags_comma_sep := $(subst $(empty),$(comma),$(build_tags))
# process linker flags
ldflags = -X github.com/cosmos/cosmos-sdk/version.Name=sonr \
-X github.com/cosmos/cosmos-sdk/version.AppName=sonrd \
-X github.com/cosmos/cosmos-sdk/version.Version=$(VERSION) \
-X github.com/cosmos/cosmos-sdk/version.Commit=$(COMMIT) \
-X "github.com/cosmos/cosmos-sdk/version.BuildTags=$(build_tags_comma_sep)"
ifeq ($(WITH_CLEVELDB),yes)
ldflags += -X github.com/cosmos/cosmos-sdk/types.DBBackend=cleveldb
endif
ifeq ($(LINK_STATICALLY),true)
ldflags += -linkmode=external -extldflags "-Wl,-z,muldefs -static"
endif
ldflags += $(LDFLAGS)
ldflags := $(strip $(ldflags))
BUILD_FLAGS := -tags "$(build_tags_comma_sep)" -ldflags '$(ldflags)' -trimpath
# The below include contains the tools and runsim targets.
all: install lint test
all: test
build: go.sum
ifeq ($(OS),Windows_NT)
$(error wasmd server not supported. Use "make build-windows-client" for client)
exit 1
else
go build -mod=readonly $(BUILD_FLAGS) -o build/sonrd ./cmd/sonrd
endif
build-motr: go.sum
GOOS=js GOARCH=wasm go build -o static/wasm/app.wasm ./cmd/motr/main.go
build-hway: go.sum
go build -o build/hway ./cmd/hway
build-windows-client: go.sum
GOOS=windows GOARCH=amd64 go build -mod=readonly $(BUILD_FLAGS) -o build/sonrd.exe ./cmd/sonrd
build-contract-tests-hooks:
ifeq ($(OS),Windows_NT)
go build -mod=readonly $(BUILD_FLAGS) -o build/contract_tests.exe ./cmd/contract_tests
else
go build -mod=readonly $(BUILD_FLAGS) -o build/contract_tests ./cmd/contract_tests
endif
install: go.sum
go install -mod=readonly $(BUILD_FLAGS) ./cmd/sonrd
install-hway: go.sum
go install -mod=readonly ./cmd/hway
########################################
### Tools & dependencies
########################################
go-mod-cache: go.sum
@echo "--> Download go modules to local cache"
@ -123,23 +29,24 @@ draw-deps:
go install github.com/RobotsAndPencils/goviz@latest
@goviz -i ./cmd/sonrd -d 2 | dot -Tpng -o dependency-graph.png
install-deps:
echo "Installing dependencies"
@go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
@go install github.com/a-h/templ/cmd/templ@latest
@go install github.com/go-task/task/v3/cmd/task@latest
clean:
rm -rf .aider*
rm -rf static
rm -rf .out
rm -rf hway.db
rm -rf snapcraft-local.yaml build/
rm -rf build
distclean: clean
rm -rf vendor/
init-env:
@echo "Installing process-compose"
sh scripts/init_env.sh
########################################
### Testing
########################################
test: test-unit
test-all: test-race test-cover test-system
@ -156,160 +63,6 @@ test-cover:
benchmark:
@go test -mod=readonly -bench=. ./...
test-sim-import-export: runsim
@echo "Running application import/export simulation. This may take several minutes..."
@$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(SIMAPP) -ExitOnFail 50 5 TestAppImportExport
test-sim-multi-seed-short: runsim
@echo "Running short multi-seed application simulation. This may take awhile!"
@$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(SIMAPP) -ExitOnFail 50 5 TestFullAppSimulation
test-sim-deterministic: runsim
@echo "Running application deterministic simulation. This may take awhile!"
@$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(SIMAPP) -ExitOnFail 1 1 TestAppStateDeterminism
test-system: install
$(MAKE) -C tests/system/ test
###############################################################################
### Linting ###
###############################################################################
format-tools:
go install mvdan.cc/gofumpt@v0.4.0
go install github.com/client9/misspell/cmd/misspell@v0.3.4
go install github.com/daixiang0/gci@v0.11.2
lint: format-tools
golangci-lint run --tests=false
find . -name '*.go' -type f -not -path "./vendor*" -not -path "./tests/system/vendor*" -not -path "*.git*" -not -path "*_test.go" | xargs gofumpt -d
format: format-tools
find . -name '*.go' -type f -not -path "./vendor*" -not -path "./tests/system/vendor*" -not -path "*.git*" -not -path "./client/lcd/statik/statik.go" | xargs gofumpt -w
find . -name '*.go' -type f -not -path "./vendor*" -not -path "./tests/system/vendor*" -not -path "*.git*" -not -path "./client/lcd/statik/statik.go" | xargs misspell -w
find . -name '*.go' -type f -not -path "./vendor*" -not -path "./tests/system/vendor*" -not -path "*.git*" -not -path "./client/lcd/statik/statik.go" | xargs gci write --skip-generated -s standard -s default -s "prefix(cosmossdk.io)" -s "prefix(github.com/cosmos/cosmos-sdk)" -s "prefix(github.com/CosmWasm/wasmd)" --custom-order
mod-tidy:
go mod tidy
.PHONY: format-tools lint format mod-tidy
###############################################################################
### Protobuf ###
###############################################################################
protoVer=0.15.1
protoImageName=ghcr.io/cosmos/proto-builder:$(protoVer)
protoImage=$(DOCKER) run --rm -v $(CURDIR):/workspace --workdir /workspace $(protoImageName)
proto-gen:
@echo "Generating Protobuf files"
@go install cosmossdk.io/orm/cmd/protoc-gen-go-cosmos-orm@latest
@$(protoImage) sh ./scripts/protocgen.sh
spawn stub-gen
proto-format:
@echo "Formatting Protobuf files"
@$(protoImage) find ./ -name "*.proto" -exec clang-format -i {} \;
proto-lint:
@$(protoImage) buf lint --error-format=json
proto-check-breaking:
@$(protoImage) buf breaking --against $(HTTPS_GIT)#branch=master
.PHONY: all install install-debug \
go-mod-cache draw-deps clean build format \
test test-all test-build test-cover test-unit test-race \
test-sim-import-export build-windows-client \
test-system
## --- Testnet Utilities ---
get-localic:
@echo "Installing local-interchain"
git clone --branch v8.7.0 https://github.com/strangelove-ventures/interchaintest.git interchaintest-downloader
cd interchaintest-downloader/local-interchain && make install
@echo ✅ local-interchain installed $(shell which local-ic)
is-localic-installed:
ifeq (,$(shell which local-ic))
make get-localic
endif
get-heighliner:
git clone https://github.com/strangelove-ventures/heighliner.git
cd heighliner && go install
local-image:
ifeq (,$(shell which heighliner))
echo 'heighliner' binary not found. Consider running `make get-heighliner`
else
heighliner build -c sonrd --local -f chains.yaml
endif
.PHONY: get-heighliner local-image is-localic-installed
###############################################################################
### e2e ###
###############################################################################
ictest-basic:
@echo "Running basic interchain tests"
@cd interchaintest && go test -race -v -run TestBasicChain .
ictest-ibc:
@echo "Running IBC interchain tests"
@cd interchaintest && go test -race -v -run TestIBC .
ictest-wasm:
@echo "Running cosmwasm interchain tests"
@cd interchaintest && go test -race -v -run TestCosmWasmIntegration .
ictest-packetforward:
@echo "Running packet forward middleware interchain tests"
@cd interchaintest && go test -race -v -run TestPacketForwardMiddleware .
ictest-poa:
@echo "Running proof of authority interchain tests"
@cd interchaintest && go test -race -v -run TestPOA .
ictest-tokenfactory:
@echo "Running token factory interchain tests"
@cd interchaintest && go test -race -v -run TestTokenFactory .
###############################################################################
### testnet ###
###############################################################################
setup-ipfs:
./scripts/ipfs_config.sh
setup-testnet: mod-tidy is-localic-installed install local-image set-testnet-configs setup-testnet-keys
# Run this before testnet keys are added
# chainid-1 is used in the testnet.json
set-testnet-configs:
sonrd config set client chain-id sonr-testnet-1
sonrd config set client keyring-backend test
sonrd config set client output text
# import keys from testnet.json into test keyring
setup-testnet-keys:
-`echo "decorate bright ozone fork gallery riot bus exhaust worth way bone indoor calm squirrel merry zero scheme cotton until shop any excess stage laundry" | sonrd keys add acc0 --recover`
-`echo "wealth flavor believe regret funny network recall kiss grape useless pepper cram hint member few certain unveil rather brick bargain curious require crowd raise" | sonrd keys add acc1 --recover`
# default testnet is with IBC
testnet: setup-testnet
spawn local-ic start ibc-testnet
testnet-basic: setup-testnet
spawn local-ic start testnet
sh-testnet: mod-tidy
CHAIN_ID="sonr-testnet-1" BLOCK_TIME="1000ms" CLEAN=true sh scripts/test_node.sh
.PHONY: setup-testnet set-testnet-configs testnet testnet-basic sh-testnet dop-testnet
###############################################################################
### help ###
###############################################################################
@ -318,8 +71,6 @@ help:
@echo "Usage: make <target>"
@echo ""
@echo "Available targets:"
@echo " install : Install the binary"
@echo " local-image : Install the docker image"
@echo " proto-gen : Generate code from proto files"
@echo " testnet : Local devnet with IBC"
@echo " sh-testnet : Shell local devnet"
@ -327,3 +78,4 @@ help:
@echo " ictest-ibc : IBC end-to-end test"
.PHONY: help

48
embed/codec.go Normal file
View File

@ -0,0 +1,48 @@
package embed
import (
"encoding/json"
"github.com/ipfs/boxo/files"
"github.com/onsonr/sonr/internal/config/motr"
)
const SchemaVersion = 1
const (
AppManifestFileName = "app.webmanifest"
DWNConfigFileName = "dwn.json"
IndexHTMLFileName = "index.html"
MainJSFileName = "main.js"
ServiceWorkerFileName = "sw.js"
)
// spawnVaultDirectory creates a new directory with the default files
func NewVaultFS(cfg *motr.Config) (files.Directory, error) {
manifestBz, err := NewWebManifest()
if err != nil {
return nil, err
}
cnfBz, err := json.Marshal(cfg)
if err != nil {
return nil, err
}
return files.NewMapDirectory(map[string]files.Node{
AppManifestFileName: files.NewBytesFile(manifestBz),
DWNConfigFileName: files.NewBytesFile(cnfBz),
IndexHTMLFileName: files.NewBytesFile(IndexHTML),
MainJSFileName: files.NewBytesFile(MainJS),
ServiceWorkerFileName: files.NewBytesFile(WorkerJS),
}), nil
}
// NewVaultConfig returns the default vault config
func NewVaultConfig(addr string, ucanCID string) *motr.Config {
return &motr.Config{
MotrToken: ucanCID,
MotrAddress: addr,
IpfsGatewayUrl: "http://localhost:80",
SonrApiUrl: "http://localhost:1317",
SonrRpcUrl: "http://localhost:26657",
SonrChainId: "sonr-testnet-1",
}
}

138
embed/index.html Normal file
View File

@ -0,0 +1,138 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Sonr DWN</title>
<!-- HTMX -->
<script src="https://unpkg.com/htmx.org@1.9.10"></script>
<!-- WASM Support -->
<script src="https://cdn.jsdelivr.net/gh/golang/go@go1.22.5/misc/wasm/wasm_exec.js"></script>
<!-- Main JS -->
<script src="main.js"></script>
<!-- Tailwind (assuming you're using it based on your classes) -->
<script src="https://cdn.tailwindcss.com"></script>
<!-- Add manifest for PWA support -->
<link
rel="manifest"
href="/app.webmanifest"
crossorigin="use-credentials"
/>
<!-- Offline detection styles -->
<style>
.offline-indicator {
display: none;
}
body.offline .offline-indicator {
display: block;
background: #f44336;
color: white;
text-align: center;
padding: 0.5rem;
position: fixed;
top: 0;
left: 0;
right: 0;
z-index: 1000;
}
</style>
</head>
<body
class="flex items-center justify-center h-full bg-zinc-50 lg:p-24 md:16 p-4"
>
<!-- Offline indicator -->
<div class="offline-indicator">
You are currently offline. Some features may be limited.
</div>
<!-- Loading indicator -->
<div
id="loading-indicator"
class="fixed top-0 left-0 w-full h-1 bg-blue-200 transition-all duration-300"
style="display: none"
>
<div class="h-full bg-blue-600 w-0 transition-all duration-300"></div>
</div>
<main
class="flex-row items-center justify-center mx-auto w-fit max-w-screen-sm gap-y-3"
>
<div
id="content"
hx-get="/#"
hx-trigger="load"
hx-swap="outerHTML"
hx-indicator="#loading-indicator"
>
Loading...
</div>
</main>
<!-- WASM Ready Indicator (hidden) -->
<div
id="wasm-status"
class="hidden fixed bottom-4 right-4 p-2 rounded-md bg-green-500 text-white"
hx-swap-oob="true"
>
WASM Ready
</div>
<script>
// Initialize service worker
if ("serviceWorker" in navigator) {
window.addEventListener("load", async function () {
try {
const registration =
await navigator.serviceWorker.register("/sw.js");
console.log(
"Service Worker registered with scope:",
registration.scope,
);
} catch (error) {
console.error("Service Worker registration failed:", error);
}
});
}
// HTMX loading indicator
htmx.on("htmx:beforeRequest", function (evt) {
document.getElementById("loading-indicator").style.display = "block";
});
htmx.on("htmx:afterRequest", function (evt) {
document.getElementById("loading-indicator").style.display = "none";
});
// WASM ready event handler
document.addEventListener("wasm-ready", function () {
const status = document.getElementById("wasm-status");
status.classList.remove("hidden");
setTimeout(() => {
status.classList.add("hidden");
}, 3000);
});
// Offline status handler
window.addEventListener("offline", function () {
document.body.classList.add("offline");
});
window.addEventListener("online", function () {
document.body.classList.remove("offline");
});
// Initial offline check
if (!navigator.onLine) {
document.body.classList.add("offline");
}
</script>
</body>
</html>

152
embed/main.js Normal file
View File

@ -0,0 +1,152 @@
// MessageChannel for WASM communication
let wasmChannel;
let wasmPort;
async function initWasmChannel() {
wasmChannel = new MessageChannel();
wasmPort = wasmChannel.port1;
// Setup message handling from WASM
wasmPort.onmessage = (event) => {
const { type, data } = event.data;
switch (type) {
case 'WASM_READY':
console.log('WASM is ready');
document.dispatchEvent(new CustomEvent('wasm-ready'));
break;
case 'RESPONSE':
handleWasmResponse(data);
break;
case 'SYNC_COMPLETE':
handleSyncComplete(data);
break;
}
};
}
// Initialize WebAssembly and Service Worker
async function init() {
try {
// Register service worker
if ('serviceWorker' in navigator) {
const registration = await navigator.serviceWorker.register('./sw.js');
console.log('ServiceWorker registered');
// Wait for the service worker to be ready
await navigator.serviceWorker.ready;
// Initialize MessageChannel
await initWasmChannel();
// Send the MessageChannel port to the service worker
navigator.serviceWorker.controller.postMessage({
type: 'PORT_INITIALIZATION',
port: wasmChannel.port2
}, [wasmChannel.port2]);
// Register for periodic sync if available
if ('periodicSync' in registration) {
try {
await registration.periodicSync.register('wasm-sync', {
minInterval: 24 * 60 * 60 * 1000 // 24 hours
});
} catch (error) {
console.log('Periodic sync could not be registered:', error);
}
}
}
// Initialize HTMX with custom config
htmx.config.withCredentials = true;
htmx.config.wsReconnectDelay = 'full-jitter';
// Override HTMX's internal request handling
htmx.config.beforeRequest = function (config) {
// Add request ID for tracking
const requestId = 'req_' + Date.now();
config.headers['X-Wasm-Request-ID'] = requestId;
// If offline, handle through service worker
if (!navigator.onLine) {
return false; // Let service worker handle it
}
return true;
};
// Handle HTMX after request
htmx.config.afterRequest = function (config) {
// Additional processing after request if needed
};
// Handle HTMX errors
htmx.config.errorHandler = function (error) {
console.error('HTMX Error:', error);
};
} catch (error) {
console.error('Initialization failed:', error);
}
}
function handleWasmResponse(data) {
const { requestId, response } = data;
// Process the WASM response
// This might update the UI or trigger HTMX swaps
const targetElement = document.querySelector(`[data-request-id="${requestId}"]`);
if (targetElement) {
htmx.process(targetElement);
}
}
function handleSyncComplete(data) {
const { url } = data;
// Handle successful sync
// Maybe refresh the relevant part of the UI
htmx.trigger('body', 'sync:complete', { url });
}
// Handle offline status changes
window.addEventListener('online', () => {
document.body.classList.remove('offline');
// Trigger sync when back online
if (wasmPort) {
wasmPort.postMessage({ type: 'SYNC_REQUEST' });
}
});
window.addEventListener('offline', () => {
document.body.classList.add('offline');
});
// Custom event handlers for HTMX
document.addEventListener('htmx:beforeRequest', (event) => {
const { elt, xhr } = event.detail;
// Add request tracking
const requestId = xhr.headers['X-Wasm-Request-ID'];
elt.setAttribute('data-request-id', requestId);
});
document.addEventListener('htmx:afterRequest', (event) => {
const { elt, successful } = event.detail;
if (successful) {
elt.removeAttribute('data-request-id');
}
});
// Initialize everything when the page loads
document.addEventListener('DOMContentLoaded', init);
// Export functions that might be needed by WASM
window.wasmBridge = {
triggerUIUpdate: function (selector, content) {
const target = document.querySelector(selector);
if (target) {
htmx.process(htmx.parse(content).forEach(node => target.appendChild(node)));
}
},
showNotification: function (message, type = 'info') {
// Implement notification system
console.log(`${type}: ${message}`);
}
};

258
embed/sw.js Normal file
View File

@ -0,0 +1,258 @@
// Cache names for different types of resources
const CACHE_NAMES = {
wasm: 'wasm-cache-v1',
static: 'static-cache-v1',
dynamic: 'dynamic-cache-v1'
};
// Import required scripts
importScripts(
"https://cdn.jsdelivr.net/gh/golang/go@go1.22.5/misc/wasm/wasm_exec.js",
"https://cdn.jsdelivr.net/gh/nlepage/go-wasm-http-server@v1.1.0/sw.js",
);
// Initialize WASM HTTP listener
const wasmInstance = registerWasmHTTPListener("https://cdn.sonr.id/wasm/app.wasm");
// MessageChannel port for WASM communication
let wasmPort;
// Request queue for offline operations
let requestQueue = new Map();
// Setup message channel handler
self.addEventListener('message', async (event) => {
if (event.data.type === 'PORT_INITIALIZATION') {
wasmPort = event.data.port;
setupWasmCommunication();
}
});
function setupWasmCommunication() {
wasmPort.onmessage = async (event) => {
const { type, data } = event.data;
switch (type) {
case 'WASM_REQUEST':
handleWasmRequest(data);
break;
case 'SYNC_REQUEST':
processSyncQueue();
break;
}
};
// Notify that WASM is ready
wasmPort.postMessage({ type: 'WASM_READY' });
}
// Enhanced install event
self.addEventListener("install", (event) => {
event.waitUntil(
Promise.all([
skipWaiting(),
// Cache WASM binary and essential resources
caches.open(CACHE_NAMES.wasm).then(cache =>
cache.addAll([
'https://cdn.sonr.id/wasm/app.wasm',
'https://cdn.jsdelivr.net/gh/golang/go@go1.22.5/misc/wasm/wasm_exec.js'
])
)
])
);
});
// Enhanced activate event
self.addEventListener("activate", (event) => {
event.waitUntil(
Promise.all([
clients.claim(),
// Clean up old caches
caches.keys().then(keys =>
Promise.all(
keys.map(key => {
if (!Object.values(CACHE_NAMES).includes(key)) {
return caches.delete(key);
}
})
)
)
])
);
});
// Intercept fetch events
self.addEventListener('fetch', (event) => {
const request = event.request;
// Handle API requests differently from static resources
if (request.url.includes('/api/')) {
event.respondWith(handleApiRequest(request));
} else {
event.respondWith(handleStaticRequest(request));
}
});
async function handleApiRequest(request) {
try {
// Try to make the request
const response = await fetch(request.clone());
// If successful, pass through WASM handler
if (response.ok) {
return await processWasmResponse(request, response);
}
// If offline or failed, queue the request
await queueRequest(request);
// Return cached response if available
const cachedResponse = await caches.match(request);
if (cachedResponse) {
return cachedResponse;
}
// Return offline response
return new Response(
JSON.stringify({ error: 'Currently offline' }),
{
status: 503,
headers: { 'Content-Type': 'application/json' }
}
);
} catch (error) {
await queueRequest(request);
return new Response(
JSON.stringify({ error: 'Request failed' }),
{
status: 500,
headers: { 'Content-Type': 'application/json' }
}
);
}
}
async function handleStaticRequest(request) {
// Check cache first
const cachedResponse = await caches.match(request);
if (cachedResponse) {
return cachedResponse;
}
try {
const response = await fetch(request);
// Cache successful responses
if (response.ok) {
const cache = await caches.open(CACHE_NAMES.static);
cache.put(request, response.clone());
}
return response;
} catch (error) {
// Return offline page for navigation requests
if (request.mode === 'navigate') {
return caches.match('/offline.html');
}
throw error;
}
}
async function processWasmResponse(request, response) {
// Clone the response before processing
const responseClone = response.clone();
try {
// Process through WASM
const processedResponse = await wasmInstance.processResponse(responseClone);
// Notify client through message channel
if (wasmPort) {
wasmPort.postMessage({
type: 'RESPONSE',
requestId: request.headers.get('X-Wasm-Request-ID'),
response: processedResponse
});
}
return processedResponse;
} catch (error) {
console.error('WASM processing error:', error);
return response;
}
}
async function queueRequest(request) {
const serializedRequest = await serializeRequest(request);
requestQueue.set(request.url, serializedRequest);
// Register for background sync
try {
await self.registration.sync.register('wasm-sync');
} catch (error) {
console.error('Sync registration failed:', error);
}
}
async function serializeRequest(request) {
const headers = {};
for (const [key, value] of request.headers.entries()) {
headers[key] = value;
}
return {
url: request.url,
method: request.method,
headers,
body: await request.text(),
timestamp: Date.now()
};
}
// Handle background sync
self.addEventListener('sync', (event) => {
if (event.tag === 'wasm-sync') {
event.waitUntil(processSyncQueue());
}
});
async function processSyncQueue() {
const requests = Array.from(requestQueue.values());
for (const serializedRequest of requests) {
try {
const response = await fetch(new Request(serializedRequest.url, {
method: serializedRequest.method,
headers: serializedRequest.headers,
body: serializedRequest.body
}));
if (response.ok) {
requestQueue.delete(serializedRequest.url);
// Notify client of successful sync
if (wasmPort) {
wasmPort.postMessage({
type: 'SYNC_COMPLETE',
url: serializedRequest.url
});
}
}
} catch (error) {
console.error('Sync failed for request:', error);
}
}
}
// Handle payment requests
self.addEventListener("canmakepayment", function (e) {
e.respondWith(Promise.resolve(true));
});
// Handle periodic sync if available
self.addEventListener('periodicsync', (event) => {
if (event.tag === 'wasm-sync') {
event.waitUntil(processSyncQueue());
}
});

47
embed/utils.go Normal file
View File

@ -0,0 +1,47 @@
package embed
import (
_ "embed"
"reflect"
"strings"
)
//go:embed index.html
var IndexHTML []byte
//go:embed main.js
var MainJS []byte
//go:embed sw.js
var WorkerJS []byte
func getSchema(structType interface{}) string {
t := reflect.TypeOf(structType)
if t.Kind() == reflect.Ptr {
t = t.Elem()
}
if t.Kind() != reflect.Struct {
return ""
}
var fields []string
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
fieldName := toCamelCase(field.Name)
fields = append(fields, fieldName)
}
// Add "++" at the beginning, separated by a comma
return "++, " + strings.Join(fields, ", ")
}
func toCamelCase(s string) string {
if s == "" {
return s
}
if len(s) == 1 {
return strings.ToLower(s)
}
return strings.ToLower(s[:1]) + s[1:]
}

124
embed/webworker.go Normal file
View File

@ -0,0 +1,124 @@
package embed
import "encoding/json"
func NewWebManifest() ([]byte, error) {
return json.Marshal(baseWebManifest)
}
var baseWebManifest = WebManifest{
Name: "Sonr Vault",
ShortName: "Sonr.ID",
StartURL: "/index.html",
Display: "standalone",
DisplayOverride: []string{
"fullscreen",
"minimal-ui",
},
Icons: []IconDefinition{
{
Src: "/icons/icon-192x192.png",
Sizes: "192x192",
Type: "image/png",
},
},
ServiceWorker: ServiceWorker{
Scope: "/",
Src: "/sw.js",
UseCache: true,
},
ProtocolHandlers: []ProtocolHandler{
{
Scheme: "did.sonr",
URL: "/resolve/sonr/%s",
},
{
Scheme: "did.eth",
URL: "/resolve/eth/%s",
},
{
Scheme: "did.btc",
URL: "/resolve/btc/%s",
},
{
Scheme: "did.usdc",
URL: "/resolve/usdc/%s",
},
{
Scheme: "did.ipfs",
URL: "/resolve/ipfs/%s",
},
},
}
type WebManifest struct {
// Required fields
Name string `json:"name"` // Full name of the application
ShortName string `json:"short_name"` // Short version of the name
// Display and appearance
Description string `json:"description,omitempty"` // Purpose and features of the application
Display string `json:"display,omitempty"` // Preferred display mode: fullscreen, standalone, minimal-ui, browser
DisplayOverride []string `json:"display_override,omitempty"`
ThemeColor string `json:"theme_color,omitempty"` // Default theme color for the application
BackgroundColor string `json:"background_color,omitempty"` // Background color during launch
Orientation string `json:"orientation,omitempty"` // Default orientation: any, natural, landscape, portrait
// URLs and scope
StartURL string `json:"start_url"` // Starting URL when launching
Scope string `json:"scope,omitempty"` // Navigation scope of the web application
ServiceWorker ServiceWorker `json:"service_worker,omitempty"`
// Icons
Icons []IconDefinition `json:"icons,omitempty"`
// Optional features
RelatedApplications []RelatedApplication `json:"related_applications,omitempty"`
PreferRelatedApplications bool `json:"prefer_related_applications,omitempty"`
Shortcuts []Shortcut `json:"shortcuts,omitempty"`
// Experimental features (uncomment if needed)
FileHandlers []FileHandler `json:"file_handlers,omitempty"`
ProtocolHandlers []ProtocolHandler `json:"protocol_handlers,omitempty"`
}
type FileHandler struct {
Action string `json:"action"`
Accept map[string][]string `json:"accept"`
}
type LaunchHandler struct {
Action string `json:"action"`
}
type IconDefinition struct {
Src string `json:"src"`
Sizes string `json:"sizes"`
Type string `json:"type,omitempty"`
Purpose string `json:"purpose,omitempty"`
}
type ProtocolHandler struct {
Scheme string `json:"scheme"`
URL string `json:"url"`
}
type RelatedApplication struct {
Platform string `json:"platform"`
URL string `json:"url,omitempty"`
ID string `json:"id,omitempty"`
}
type Shortcut struct {
Name string `json:"name"`
ShortName string `json:"short_name,omitempty"`
Description string `json:"description,omitempty"`
URL string `json:"url"`
Icons []IconDefinition `json:"icons,omitempty"`
}
type ServiceWorker struct {
Scope string `json:"scope"`
Src string `json:"src"`
UseCache bool `json:"use_cache"`
}

View File

@ -1,32 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package hwayorm
import (
"context"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgconn"
)
type DBTX interface {
Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error)
Query(context.Context, string, ...interface{}) (pgx.Rows, error)
QueryRow(context.Context, string, ...interface{}) pgx.Row
}
func New(db DBTX) *Queries {
return &Queries{db: db}
}
type Queries struct {
db DBTX
}
func (q *Queries) WithTx(tx pgx.Tx) *Queries {
return &Queries{
db: tx,
}
}

View File

@ -1,68 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package hwayorm
import (
"github.com/jackc/pgx/v5/pgtype"
)
type Credential struct {
ID string `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Handle string `json:"handle"`
CredentialID string `json:"credential_id"`
AuthenticatorAttachment string `json:"authenticator_attachment"`
Origin string `json:"origin"`
Type string `json:"type"`
Transports string `json:"transports"`
}
type Profile struct {
ID string `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Address string `json:"address"`
Handle string `json:"handle"`
Origin string `json:"origin"`
Name string `json:"name"`
Status string `json:"status"`
}
type Session struct {
ID string `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
BrowserName string `json:"browser_name"`
BrowserVersion string `json:"browser_version"`
ClientIpaddr string `json:"client_ipaddr"`
Platform string `json:"platform"`
IsDesktop bool `json:"is_desktop"`
IsMobile bool `json:"is_mobile"`
IsTablet bool `json:"is_tablet"`
IsTv bool `json:"is_tv"`
IsBot bool `json:"is_bot"`
Challenge string `json:"challenge"`
IsHumanFirst bool `json:"is_human_first"`
IsHumanLast bool `json:"is_human_last"`
ProfileID string `json:"profile_id"`
}
type Vault struct {
ID int64 `json:"id"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
UpdatedAt pgtype.Timestamptz `json:"updated_at"`
DeletedAt pgtype.Timestamptz `json:"deleted_at"`
Handle string `json:"handle"`
Origin string `json:"origin"`
Address string `json:"address"`
Cid string `json:"cid"`
Config []byte `json:"config"`
SessionID int64 `json:"session_id"`
RedirectUri string `json:"redirect_uri"`
}

View File

@ -1,34 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package hwayorm
import (
"context"
)
type Querier interface {
CheckHandleExists(ctx context.Context, handle string) (bool, error)
CreateSession(ctx context.Context, arg CreateSessionParams) (*Session, error)
GetChallengeBySessionID(ctx context.Context, id string) (string, error)
GetCredentialByID(ctx context.Context, credentialID string) (*Credential, error)
GetCredentialsByHandle(ctx context.Context, handle string) ([]*Credential, error)
GetHumanVerificationNumbers(ctx context.Context, id string) (*GetHumanVerificationNumbersRow, error)
GetProfileByAddress(ctx context.Context, address string) (*Profile, error)
GetProfileByHandle(ctx context.Context, handle string) (*Profile, error)
GetProfileByID(ctx context.Context, id string) (*Profile, error)
GetSessionByClientIP(ctx context.Context, clientIpaddr string) (*Session, error)
GetSessionByID(ctx context.Context, id string) (*Session, error)
GetVaultConfigByCID(ctx context.Context, cid string) (*Vault, error)
GetVaultRedirectURIBySessionID(ctx context.Context, sessionID int64) (string, error)
InsertCredential(ctx context.Context, arg InsertCredentialParams) (*Credential, error)
InsertProfile(ctx context.Context, arg InsertProfileParams) (*Profile, error)
SoftDeleteCredential(ctx context.Context, credentialID string) error
SoftDeleteProfile(ctx context.Context, address string) error
UpdateProfile(ctx context.Context, arg UpdateProfileParams) (*Profile, error)
UpdateSessionHumanVerification(ctx context.Context, arg UpdateSessionHumanVerificationParams) (*Session, error)
UpdateSessionWithProfileID(ctx context.Context, arg UpdateSessionWithProfileIDParams) (*Session, error)
}
var _ Querier = (*Queries)(nil)

View File

@ -1,583 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
// source: query.sql
package hwayorm
import (
"context"
)
const checkHandleExists = `-- name: CheckHandleExists :one
SELECT COUNT(*) > 0 as handle_exists FROM profiles
WHERE handle = $1
AND deleted_at IS NULL
`
func (q *Queries) CheckHandleExists(ctx context.Context, handle string) (bool, error) {
row := q.db.QueryRow(ctx, checkHandleExists, handle)
var handle_exists bool
err := row.Scan(&handle_exists)
return handle_exists, err
}
const createSession = `-- name: CreateSession :one
INSERT INTO sessions (
id,
browser_name,
browser_version,
client_ipaddr,
platform,
is_desktop,
is_mobile,
is_tablet,
is_tv,
is_bot,
challenge,
is_human_first,
is_human_last,
profile_id
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
RETURNING id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id
`
type CreateSessionParams struct {
ID string `json:"id"`
BrowserName string `json:"browser_name"`
BrowserVersion string `json:"browser_version"`
ClientIpaddr string `json:"client_ipaddr"`
Platform string `json:"platform"`
IsDesktop bool `json:"is_desktop"`
IsMobile bool `json:"is_mobile"`
IsTablet bool `json:"is_tablet"`
IsTv bool `json:"is_tv"`
IsBot bool `json:"is_bot"`
Challenge string `json:"challenge"`
IsHumanFirst bool `json:"is_human_first"`
IsHumanLast bool `json:"is_human_last"`
ProfileID string `json:"profile_id"`
}
func (q *Queries) CreateSession(ctx context.Context, arg CreateSessionParams) (*Session, error) {
row := q.db.QueryRow(ctx, createSession,
arg.ID,
arg.BrowserName,
arg.BrowserVersion,
arg.ClientIpaddr,
arg.Platform,
arg.IsDesktop,
arg.IsMobile,
arg.IsTablet,
arg.IsTv,
arg.IsBot,
arg.Challenge,
arg.IsHumanFirst,
arg.IsHumanLast,
arg.ProfileID,
)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return &i, err
}
const getChallengeBySessionID = `-- name: GetChallengeBySessionID :one
SELECT challenge FROM sessions
WHERE id = $1 AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetChallengeBySessionID(ctx context.Context, id string) (string, error) {
row := q.db.QueryRow(ctx, getChallengeBySessionID, id)
var challenge string
err := row.Scan(&challenge)
return challenge, err
}
const getCredentialByID = `-- name: GetCredentialByID :one
SELECT id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports FROM credentials
WHERE credential_id = $1
AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetCredentialByID(ctx context.Context, credentialID string) (*Credential, error) {
row := q.db.QueryRow(ctx, getCredentialByID, credentialID)
var i Credential
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Handle,
&i.CredentialID,
&i.AuthenticatorAttachment,
&i.Origin,
&i.Type,
&i.Transports,
)
return &i, err
}
const getCredentialsByHandle = `-- name: GetCredentialsByHandle :many
SELECT id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports FROM credentials
WHERE handle = $1
AND deleted_at IS NULL
`
func (q *Queries) GetCredentialsByHandle(ctx context.Context, handle string) ([]*Credential, error) {
rows, err := q.db.Query(ctx, getCredentialsByHandle, handle)
if err != nil {
return nil, err
}
defer rows.Close()
var items []*Credential
for rows.Next() {
var i Credential
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Handle,
&i.CredentialID,
&i.AuthenticatorAttachment,
&i.Origin,
&i.Type,
&i.Transports,
); err != nil {
return nil, err
}
items = append(items, &i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getHumanVerificationNumbers = `-- name: GetHumanVerificationNumbers :one
SELECT is_human_first, is_human_last FROM sessions
WHERE id = $1 AND deleted_at IS NULL
LIMIT 1
`
type GetHumanVerificationNumbersRow struct {
IsHumanFirst bool `json:"is_human_first"`
IsHumanLast bool `json:"is_human_last"`
}
func (q *Queries) GetHumanVerificationNumbers(ctx context.Context, id string) (*GetHumanVerificationNumbersRow, error) {
row := q.db.QueryRow(ctx, getHumanVerificationNumbers, id)
var i GetHumanVerificationNumbersRow
err := row.Scan(&i.IsHumanFirst, &i.IsHumanLast)
return &i, err
}
const getProfileByAddress = `-- name: GetProfileByAddress :one
SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name, status FROM profiles
WHERE address = $1 AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetProfileByAddress(ctx context.Context, address string) (*Profile, error) {
row := q.db.QueryRow(ctx, getProfileByAddress, address)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
&i.Status,
)
return &i, err
}
const getProfileByHandle = `-- name: GetProfileByHandle :one
SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name, status FROM profiles
WHERE handle = $1
AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetProfileByHandle(ctx context.Context, handle string) (*Profile, error) {
row := q.db.QueryRow(ctx, getProfileByHandle, handle)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
&i.Status,
)
return &i, err
}
const getProfileByID = `-- name: GetProfileByID :one
SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name, status FROM profiles
WHERE id = $1 AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetProfileByID(ctx context.Context, id string) (*Profile, error) {
row := q.db.QueryRow(ctx, getProfileByID, id)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
&i.Status,
)
return &i, err
}
const getSessionByClientIP = `-- name: GetSessionByClientIP :one
SELECT id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id FROM sessions
WHERE client_ipaddr = $1 AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetSessionByClientIP(ctx context.Context, clientIpaddr string) (*Session, error) {
row := q.db.QueryRow(ctx, getSessionByClientIP, clientIpaddr)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return &i, err
}
const getSessionByID = `-- name: GetSessionByID :one
SELECT id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id FROM sessions
WHERE id = $1 AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetSessionByID(ctx context.Context, id string) (*Session, error) {
row := q.db.QueryRow(ctx, getSessionByID, id)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return &i, err
}
const getVaultConfigByCID = `-- name: GetVaultConfigByCID :one
SELECT id, created_at, updated_at, deleted_at, handle, origin, address, cid, config, session_id, redirect_uri FROM vaults
WHERE cid = $1
AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetVaultConfigByCID(ctx context.Context, cid string) (*Vault, error) {
row := q.db.QueryRow(ctx, getVaultConfigByCID, cid)
var i Vault
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Handle,
&i.Origin,
&i.Address,
&i.Cid,
&i.Config,
&i.SessionID,
&i.RedirectUri,
)
return &i, err
}
const getVaultRedirectURIBySessionID = `-- name: GetVaultRedirectURIBySessionID :one
SELECT redirect_uri FROM vaults
WHERE session_id = $1
AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetVaultRedirectURIBySessionID(ctx context.Context, sessionID int64) (string, error) {
row := q.db.QueryRow(ctx, getVaultRedirectURIBySessionID, sessionID)
var redirect_uri string
err := row.Scan(&redirect_uri)
return redirect_uri, err
}
const insertCredential = `-- name: InsertCredential :one
INSERT INTO credentials (
handle,
credential_id,
origin,
type,
transports
) VALUES ($1, $2, $3, $4, $5)
RETURNING id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports
`
type InsertCredentialParams struct {
Handle string `json:"handle"`
CredentialID string `json:"credential_id"`
Origin string `json:"origin"`
Type string `json:"type"`
Transports string `json:"transports"`
}
func (q *Queries) InsertCredential(ctx context.Context, arg InsertCredentialParams) (*Credential, error) {
row := q.db.QueryRow(ctx, insertCredential,
arg.Handle,
arg.CredentialID,
arg.Origin,
arg.Type,
arg.Transports,
)
var i Credential
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Handle,
&i.CredentialID,
&i.AuthenticatorAttachment,
&i.Origin,
&i.Type,
&i.Transports,
)
return &i, err
}
const insertProfile = `-- name: InsertProfile :one
INSERT INTO profiles (
address,
handle,
origin,
name
) VALUES ($1, $2, $3, $4)
RETURNING id, created_at, updated_at, deleted_at, address, handle, origin, name, status
`
type InsertProfileParams struct {
Address string `json:"address"`
Handle string `json:"handle"`
Origin string `json:"origin"`
Name string `json:"name"`
}
func (q *Queries) InsertProfile(ctx context.Context, arg InsertProfileParams) (*Profile, error) {
row := q.db.QueryRow(ctx, insertProfile,
arg.Address,
arg.Handle,
arg.Origin,
arg.Name,
)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
&i.Status,
)
return &i, err
}
const softDeleteCredential = `-- name: SoftDeleteCredential :exec
UPDATE credentials
SET deleted_at = CURRENT_TIMESTAMP
WHERE credential_id = $1
`
func (q *Queries) SoftDeleteCredential(ctx context.Context, credentialID string) error {
_, err := q.db.Exec(ctx, softDeleteCredential, credentialID)
return err
}
const softDeleteProfile = `-- name: SoftDeleteProfile :exec
UPDATE profiles
SET deleted_at = CURRENT_TIMESTAMP
WHERE address = $1
`
func (q *Queries) SoftDeleteProfile(ctx context.Context, address string) error {
_, err := q.db.Exec(ctx, softDeleteProfile, address)
return err
}
const updateProfile = `-- name: UpdateProfile :one
UPDATE profiles
SET
name = $1,
handle = $2,
updated_at = CURRENT_TIMESTAMP
WHERE address = $3
AND deleted_at IS NULL
RETURNING id, created_at, updated_at, deleted_at, address, handle, origin, name, status
`
type UpdateProfileParams struct {
Name string `json:"name"`
Handle string `json:"handle"`
Address string `json:"address"`
}
func (q *Queries) UpdateProfile(ctx context.Context, arg UpdateProfileParams) (*Profile, error) {
row := q.db.QueryRow(ctx, updateProfile, arg.Name, arg.Handle, arg.Address)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
&i.Status,
)
return &i, err
}
const updateSessionHumanVerification = `-- name: UpdateSessionHumanVerification :one
UPDATE sessions
SET
is_human_first = $1,
is_human_last = $2,
updated_at = CURRENT_TIMESTAMP
WHERE id = $3
RETURNING id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id
`
type UpdateSessionHumanVerificationParams struct {
IsHumanFirst bool `json:"is_human_first"`
IsHumanLast bool `json:"is_human_last"`
ID string `json:"id"`
}
func (q *Queries) UpdateSessionHumanVerification(ctx context.Context, arg UpdateSessionHumanVerificationParams) (*Session, error) {
row := q.db.QueryRow(ctx, updateSessionHumanVerification, arg.IsHumanFirst, arg.IsHumanLast, arg.ID)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return &i, err
}
const updateSessionWithProfileID = `-- name: UpdateSessionWithProfileID :one
UPDATE sessions
SET
profile_id = $1,
updated_at = CURRENT_TIMESTAMP
WHERE id = $2
RETURNING id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id
`
type UpdateSessionWithProfileIDParams struct {
ProfileID string `json:"profile_id"`
ID string `json:"id"`
}
func (q *Queries) UpdateSessionWithProfileID(ctx context.Context, arg UpdateSessionWithProfileIDParams) (*Session, error) {
row := q.db.QueryRow(ctx, updateSessionWithProfileID, arg.ProfileID, arg.ID)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return &i, err
}

View File

@ -1,31 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package motrorm
import (
"context"
"database/sql"
)
type DBTX interface {
ExecContext(context.Context, string, ...interface{}) (sql.Result, error)
PrepareContext(context.Context, string) (*sql.Stmt, error)
QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error)
QueryRowContext(context.Context, string, ...interface{}) *sql.Row
}
func New(db DBTX) *Queries {
return &Queries{db: db}
}
type Queries struct {
db DBTX
}
func (q *Queries) WithTx(tx *sql.Tx) *Queries {
return &Queries{
db: tx,
}
}

View File

@ -1,99 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package motrorm
import (
"database/sql"
"time"
)
type Account struct {
ID string `json:"id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
DeletedAt sql.NullTime `json:"deleted_at"`
Number int64 `json:"number"`
Sequence int64 `json:"sequence"`
Address string `json:"address"`
PublicKey string `json:"public_key"`
ChainID string `json:"chain_id"`
Controller string `json:"controller"`
IsSubsidiary bool `json:"is_subsidiary"`
IsValidator bool `json:"is_validator"`
IsDelegator bool `json:"is_delegator"`
IsAccountable bool `json:"is_accountable"`
}
type Asset struct {
ID string `json:"id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
DeletedAt sql.NullTime `json:"deleted_at"`
Name string `json:"name"`
Symbol string `json:"symbol"`
Decimals int64 `json:"decimals"`
ChainID string `json:"chain_id"`
Channel string `json:"channel"`
AssetType string `json:"asset_type"`
CoingeckoID sql.NullString `json:"coingecko_id"`
}
type Credential struct {
ID string `json:"id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
DeletedAt sql.NullTime `json:"deleted_at"`
Handle string `json:"handle"`
CredentialID string `json:"credential_id"`
AuthenticatorAttachment string `json:"authenticator_attachment"`
Origin string `json:"origin"`
Type string `json:"type"`
Transports string `json:"transports"`
}
type Profile struct {
ID string `json:"id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
DeletedAt sql.NullTime `json:"deleted_at"`
Address string `json:"address"`
Handle string `json:"handle"`
Origin string `json:"origin"`
Name string `json:"name"`
}
type Session struct {
ID string `json:"id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
DeletedAt sql.NullTime `json:"deleted_at"`
BrowserName string `json:"browser_name"`
BrowserVersion string `json:"browser_version"`
ClientIpaddr string `json:"client_ipaddr"`
Platform string `json:"platform"`
IsDesktop bool `json:"is_desktop"`
IsMobile bool `json:"is_mobile"`
IsTablet bool `json:"is_tablet"`
IsTv bool `json:"is_tv"`
IsBot bool `json:"is_bot"`
Challenge string `json:"challenge"`
IsHumanFirst bool `json:"is_human_first"`
IsHumanLast bool `json:"is_human_last"`
ProfileID int64 `json:"profile_id"`
}
type Vault struct {
ID string `json:"id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
DeletedAt sql.NullTime `json:"deleted_at"`
Handle string `json:"handle"`
Origin string `json:"origin"`
Address string `json:"address"`
Cid string `json:"cid"`
Config string `json:"config"`
SessionID string `json:"session_id"`
RedirectUri string `json:"redirect_uri"`
}

View File

@ -1,34 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
package motrorm
import (
"context"
)
type Querier interface {
CheckHandleExists(ctx context.Context, handle string) (bool, error)
CreateSession(ctx context.Context, arg CreateSessionParams) (Session, error)
GetChallengeBySessionID(ctx context.Context, id string) (string, error)
GetCredentialByID(ctx context.Context, credentialID string) (Credential, error)
GetCredentialsByHandle(ctx context.Context, handle string) ([]Credential, error)
GetHumanVerificationNumbers(ctx context.Context, id string) (GetHumanVerificationNumbersRow, error)
GetProfileByAddress(ctx context.Context, address string) (Profile, error)
GetProfileByHandle(ctx context.Context, handle string) (Profile, error)
GetProfileByID(ctx context.Context, id string) (Profile, error)
GetSessionByClientIP(ctx context.Context, clientIpaddr string) (Session, error)
GetSessionByID(ctx context.Context, id string) (Session, error)
GetVaultConfigByCID(ctx context.Context, cid string) (Vault, error)
GetVaultRedirectURIBySessionID(ctx context.Context, sessionID string) (string, error)
InsertCredential(ctx context.Context, arg InsertCredentialParams) (Credential, error)
InsertProfile(ctx context.Context, arg InsertProfileParams) (Profile, error)
SoftDeleteCredential(ctx context.Context, credentialID string) error
SoftDeleteProfile(ctx context.Context, address string) error
UpdateProfile(ctx context.Context, arg UpdateProfileParams) (Profile, error)
UpdateSessionHumanVerification(ctx context.Context, arg UpdateSessionHumanVerificationParams) (Session, error)
UpdateSessionWithProfileID(ctx context.Context, arg UpdateSessionWithProfileIDParams) (Session, error)
}
var _ Querier = (*Queries)(nil)

View File

@ -1,581 +0,0 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.27.0
// source: query.sql
package motrorm
import (
"context"
)
const checkHandleExists = `-- name: CheckHandleExists :one
SELECT COUNT(*) > 0 as handle_exists FROM profiles
WHERE handle = ?
AND deleted_at IS NULL
`
func (q *Queries) CheckHandleExists(ctx context.Context, handle string) (bool, error) {
row := q.db.QueryRowContext(ctx, checkHandleExists, handle)
var handle_exists bool
err := row.Scan(&handle_exists)
return handle_exists, err
}
const createSession = `-- name: CreateSession :one
INSERT INTO sessions (
id,
browser_name,
browser_version,
client_ipaddr,
platform,
is_desktop,
is_mobile,
is_tablet,
is_tv,
is_bot,
challenge,
is_human_first,
is_human_last,
profile_id
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )
RETURNING id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id
`
type CreateSessionParams struct {
ID string `json:"id"`
BrowserName string `json:"browser_name"`
BrowserVersion string `json:"browser_version"`
ClientIpaddr string `json:"client_ipaddr"`
Platform string `json:"platform"`
IsDesktop bool `json:"is_desktop"`
IsMobile bool `json:"is_mobile"`
IsTablet bool `json:"is_tablet"`
IsTv bool `json:"is_tv"`
IsBot bool `json:"is_bot"`
Challenge string `json:"challenge"`
IsHumanFirst bool `json:"is_human_first"`
IsHumanLast bool `json:"is_human_last"`
ProfileID int64 `json:"profile_id"`
}
func (q *Queries) CreateSession(ctx context.Context, arg CreateSessionParams) (Session, error) {
row := q.db.QueryRowContext(ctx, createSession,
arg.ID,
arg.BrowserName,
arg.BrowserVersion,
arg.ClientIpaddr,
arg.Platform,
arg.IsDesktop,
arg.IsMobile,
arg.IsTablet,
arg.IsTv,
arg.IsBot,
arg.Challenge,
arg.IsHumanFirst,
arg.IsHumanLast,
arg.ProfileID,
)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return i, err
}
const getChallengeBySessionID = `-- name: GetChallengeBySessionID :one
SELECT challenge FROM sessions
WHERE id = ? AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetChallengeBySessionID(ctx context.Context, id string) (string, error) {
row := q.db.QueryRowContext(ctx, getChallengeBySessionID, id)
var challenge string
err := row.Scan(&challenge)
return challenge, err
}
const getCredentialByID = `-- name: GetCredentialByID :one
SELECT id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports FROM credentials
WHERE credential_id = ?
AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetCredentialByID(ctx context.Context, credentialID string) (Credential, error) {
row := q.db.QueryRowContext(ctx, getCredentialByID, credentialID)
var i Credential
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Handle,
&i.CredentialID,
&i.AuthenticatorAttachment,
&i.Origin,
&i.Type,
&i.Transports,
)
return i, err
}
const getCredentialsByHandle = `-- name: GetCredentialsByHandle :many
SELECT id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports FROM credentials
WHERE handle = ?
AND deleted_at IS NULL
`
func (q *Queries) GetCredentialsByHandle(ctx context.Context, handle string) ([]Credential, error) {
rows, err := q.db.QueryContext(ctx, getCredentialsByHandle, handle)
if err != nil {
return nil, err
}
defer rows.Close()
var items []Credential
for rows.Next() {
var i Credential
if err := rows.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Handle,
&i.CredentialID,
&i.AuthenticatorAttachment,
&i.Origin,
&i.Type,
&i.Transports,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getHumanVerificationNumbers = `-- name: GetHumanVerificationNumbers :one
SELECT is_human_first, is_human_last FROM sessions
WHERE id = ? AND deleted_at IS NULL
LIMIT 1
`
type GetHumanVerificationNumbersRow struct {
IsHumanFirst bool `json:"is_human_first"`
IsHumanLast bool `json:"is_human_last"`
}
func (q *Queries) GetHumanVerificationNumbers(ctx context.Context, id string) (GetHumanVerificationNumbersRow, error) {
row := q.db.QueryRowContext(ctx, getHumanVerificationNumbers, id)
var i GetHumanVerificationNumbersRow
err := row.Scan(&i.IsHumanFirst, &i.IsHumanLast)
return i, err
}
const getProfileByAddress = `-- name: GetProfileByAddress :one
SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name FROM profiles
WHERE address = ? AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetProfileByAddress(ctx context.Context, address string) (Profile, error) {
row := q.db.QueryRowContext(ctx, getProfileByAddress, address)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
)
return i, err
}
const getProfileByHandle = `-- name: GetProfileByHandle :one
SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name FROM profiles
WHERE handle = ?
AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetProfileByHandle(ctx context.Context, handle string) (Profile, error) {
row := q.db.QueryRowContext(ctx, getProfileByHandle, handle)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
)
return i, err
}
const getProfileByID = `-- name: GetProfileByID :one
SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name FROM profiles
WHERE id = ? AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetProfileByID(ctx context.Context, id string) (Profile, error) {
row := q.db.QueryRowContext(ctx, getProfileByID, id)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
)
return i, err
}
const getSessionByClientIP = `-- name: GetSessionByClientIP :one
SELECT id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id FROM sessions
WHERE client_ipaddr = ? AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetSessionByClientIP(ctx context.Context, clientIpaddr string) (Session, error) {
row := q.db.QueryRowContext(ctx, getSessionByClientIP, clientIpaddr)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return i, err
}
const getSessionByID = `-- name: GetSessionByID :one
SELECT id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id FROM sessions
WHERE id = ? AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetSessionByID(ctx context.Context, id string) (Session, error) {
row := q.db.QueryRowContext(ctx, getSessionByID, id)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return i, err
}
const getVaultConfigByCID = `-- name: GetVaultConfigByCID :one
SELECT id, created_at, updated_at, deleted_at, handle, origin, address, cid, config, session_id, redirect_uri FROM vaults
WHERE cid = ?
AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetVaultConfigByCID(ctx context.Context, cid string) (Vault, error) {
row := q.db.QueryRowContext(ctx, getVaultConfigByCID, cid)
var i Vault
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Handle,
&i.Origin,
&i.Address,
&i.Cid,
&i.Config,
&i.SessionID,
&i.RedirectUri,
)
return i, err
}
const getVaultRedirectURIBySessionID = `-- name: GetVaultRedirectURIBySessionID :one
SELECT redirect_uri FROM vaults
WHERE session_id = ?
AND deleted_at IS NULL
LIMIT 1
`
func (q *Queries) GetVaultRedirectURIBySessionID(ctx context.Context, sessionID string) (string, error) {
row := q.db.QueryRowContext(ctx, getVaultRedirectURIBySessionID, sessionID)
var redirect_uri string
err := row.Scan(&redirect_uri)
return redirect_uri, err
}
const insertCredential = `-- name: InsertCredential :one
INSERT INTO credentials (
handle,
credential_id,
origin,
type,
transports
) VALUES (?, ?, ?, ?, ?)
RETURNING id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports
`
type InsertCredentialParams struct {
Handle string `json:"handle"`
CredentialID string `json:"credential_id"`
Origin string `json:"origin"`
Type string `json:"type"`
Transports string `json:"transports"`
}
func (q *Queries) InsertCredential(ctx context.Context, arg InsertCredentialParams) (Credential, error) {
row := q.db.QueryRowContext(ctx, insertCredential,
arg.Handle,
arg.CredentialID,
arg.Origin,
arg.Type,
arg.Transports,
)
var i Credential
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Handle,
&i.CredentialID,
&i.AuthenticatorAttachment,
&i.Origin,
&i.Type,
&i.Transports,
)
return i, err
}
const insertProfile = `-- name: InsertProfile :one
INSERT INTO profiles (
address,
handle,
origin,
name
) VALUES (?, ?, ?, ?)
RETURNING id, created_at, updated_at, deleted_at, address, handle, origin, name
`
type InsertProfileParams struct {
Address string `json:"address"`
Handle string `json:"handle"`
Origin string `json:"origin"`
Name string `json:"name"`
}
func (q *Queries) InsertProfile(ctx context.Context, arg InsertProfileParams) (Profile, error) {
row := q.db.QueryRowContext(ctx, insertProfile,
arg.Address,
arg.Handle,
arg.Origin,
arg.Name,
)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
)
return i, err
}
const softDeleteCredential = `-- name: SoftDeleteCredential :exec
UPDATE credentials
SET deleted_at = CURRENT_TIMESTAMP
WHERE credential_id = ?
`
func (q *Queries) SoftDeleteCredential(ctx context.Context, credentialID string) error {
_, err := q.db.ExecContext(ctx, softDeleteCredential, credentialID)
return err
}
const softDeleteProfile = `-- name: SoftDeleteProfile :exec
UPDATE profiles
SET deleted_at = CURRENT_TIMESTAMP
WHERE address = ?
`
func (q *Queries) SoftDeleteProfile(ctx context.Context, address string) error {
_, err := q.db.ExecContext(ctx, softDeleteProfile, address)
return err
}
const updateProfile = `-- name: UpdateProfile :one
UPDATE profiles
SET
name = ?,
handle = ?,
updated_at = CURRENT_TIMESTAMP
WHERE address = ?
AND deleted_at IS NULL
RETURNING id, created_at, updated_at, deleted_at, address, handle, origin, name
`
type UpdateProfileParams struct {
Name string `json:"name"`
Handle string `json:"handle"`
Address string `json:"address"`
}
func (q *Queries) UpdateProfile(ctx context.Context, arg UpdateProfileParams) (Profile, error) {
row := q.db.QueryRowContext(ctx, updateProfile, arg.Name, arg.Handle, arg.Address)
var i Profile
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.Address,
&i.Handle,
&i.Origin,
&i.Name,
)
return i, err
}
const updateSessionHumanVerification = `-- name: UpdateSessionHumanVerification :one
UPDATE sessions
SET
is_human_first = ?,
is_human_last = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
RETURNING id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id
`
type UpdateSessionHumanVerificationParams struct {
IsHumanFirst bool `json:"is_human_first"`
IsHumanLast bool `json:"is_human_last"`
ID string `json:"id"`
}
func (q *Queries) UpdateSessionHumanVerification(ctx context.Context, arg UpdateSessionHumanVerificationParams) (Session, error) {
row := q.db.QueryRowContext(ctx, updateSessionHumanVerification, arg.IsHumanFirst, arg.IsHumanLast, arg.ID)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return i, err
}
const updateSessionWithProfileID = `-- name: UpdateSessionWithProfileID :one
UPDATE sessions
SET
profile_id = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
RETURNING id, created_at, updated_at, deleted_at, browser_name, browser_version, client_ipaddr, platform, is_desktop, is_mobile, is_tablet, is_tv, is_bot, challenge, is_human_first, is_human_last, profile_id
`
type UpdateSessionWithProfileIDParams struct {
ProfileID int64 `json:"profile_id"`
ID string `json:"id"`
}
func (q *Queries) UpdateSessionWithProfileID(ctx context.Context, arg UpdateSessionWithProfileIDParams) (Session, error) {
row := q.db.QueryRowContext(ctx, updateSessionWithProfileID, arg.ProfileID, arg.ID)
var i Session
err := row.Scan(
&i.ID,
&i.CreatedAt,
&i.UpdatedAt,
&i.DeletedAt,
&i.BrowserName,
&i.BrowserVersion,
&i.ClientIpaddr,
&i.Platform,
&i.IsDesktop,
&i.IsMobile,
&i.IsTablet,
&i.IsTv,
&i.IsBot,
&i.Challenge,
&i.IsHumanFirst,
&i.IsHumanLast,
&i.ProfileID,
)
return i, err
}

View File

@ -1,151 +0,0 @@
-- name: InsertCredential :one
INSERT INTO credentials (
id,
created_at,
updated_at,
deleted_at,
handle,
credential_id,
authenticator_attachment,
origin,
type,
transports
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
RETURNING *;
-- name: InsertProfile :one
INSERT INTO profiles (
id,
created_at,
updated_at,
deleted_at,
address,
handle,
origin,
name,
status
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
RETURNING *;
-- name: GetProfileByID :one
SELECT * FROM profiles
WHERE id = ? AND deleted_at IS NULL
LIMIT 1;
-- name: GetProfileByAddress :one
SELECT * FROM profiles
WHERE address = ? AND deleted_at IS NULL
LIMIT 1;
-- name: GetChallengeBySessionID :one
SELECT challenge FROM sessions
WHERE id = ? AND deleted_at IS NULL
LIMIT 1;
-- name: GetHumanVerificationNumbers :one
SELECT is_human_first, is_human_last FROM sessions
WHERE id = ? AND deleted_at IS NULL
LIMIT 1;
-- name: GetSessionByID :one
SELECT * FROM sessions
WHERE id = ? AND deleted_at IS NULL
LIMIT 1;
-- name: GetSessionByClientIP :one
SELECT * FROM sessions
WHERE client_ipaddr = ? AND deleted_at IS NULL
LIMIT 1;
-- name: UpdateSessionHumanVerification :one
UPDATE sessions
SET
is_human_first = ?,
is_human_last = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
RETURNING *;
-- name: UpdateSessionWithProfileID :one
UPDATE sessions
SET
profile_id = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
RETURNING *;
-- name: CheckHandleExists :one
SELECT COUNT(*) > 0 as handle_exists FROM profiles
WHERE handle = ?
AND deleted_at IS NULL;
-- name: GetCredentialsByHandle :many
SELECT * FROM credentials
WHERE handle = ?
AND deleted_at IS NULL;
-- name: GetCredentialByID :one
SELECT * FROM credentials
WHERE credential_id = ?
AND deleted_at IS NULL
LIMIT 1;
-- name: SoftDeleteCredential :exec
UPDATE credentials
SET deleted_at = CURRENT_TIMESTAMP
WHERE credential_id = ?;
-- name: SoftDeleteProfile :exec
UPDATE profiles
SET deleted_at = CURRENT_TIMESTAMP
WHERE id = ?;
-- name: UpdateProfile :one
UPDATE profiles
SET
name = ?,
handle = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?
AND deleted_at IS NULL
RETURNING *;
-- name: GetProfileByHandle :one
SELECT * FROM profiles
WHERE handle = ?
AND deleted_at IS NULL
LIMIT 1;
-- name: GetVaultConfigByCID :one
SELECT * FROM vaults
WHERE cid = ?
AND deleted_at IS NULL
LIMIT 1;
-- name: GetVaultRedirectURIBySessionID :one
SELECT redirect_uri FROM vaults
WHERE session_id = ?
AND deleted_at IS NULL
LIMIT 1;
-- name: CreateSession :one
INSERT INTO sessions (
id,
created_at,
updated_at,
deleted_at,
browser_name,
browser_version,
client_ipaddr,
platform,
is_desktop,
is_mobile,
is_tablet,
is_tv,
is_bot,
challenge,
is_human_first,
is_human_last,
profile_id
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
RETURNING *;

View File

@ -1,122 +0,0 @@
-- Profiles represent user identities
CREATE TABLE profiles (
id TEXT PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at DATETIME,
address TEXT NOT NULL,
handle TEXT NOT NULL UNIQUE,
origin TEXT NOT NULL,
name TEXT NOT NULL,
status TEXT NOT NULL,
UNIQUE(address, origin)
);
-- Accounts represent blockchain accounts
CREATE TABLE accounts (
id TEXT PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at DATETIME,
number INTEGER NOT NULL,
sequence INTEGER NOT NULL DEFAULT 0,
address TEXT NOT NULL UNIQUE,
public_key TEXT NOT NULL,
chain_id TEXT NOT NULL,
controller TEXT NOT NULL,
is_subsidiary INTEGER NOT NULL DEFAULT 0,
is_validator INTEGER NOT NULL DEFAULT 0,
is_delegator INTEGER NOT NULL DEFAULT 0,
is_accountable INTEGER NOT NULL DEFAULT 1
);
-- Assets represent tokens and coins
CREATE TABLE assets (
id TEXT PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at DATETIME,
name TEXT NOT NULL,
symbol TEXT NOT NULL,
decimals INTEGER NOT NULL CHECK(decimals >= 0),
chain_id TEXT NOT NULL,
channel TEXT NOT NULL,
asset_type TEXT NOT NULL,
coingecko_id TEXT,
UNIQUE(chain_id, symbol)
);
-- Credentials store WebAuthn credentials
CREATE TABLE credentials (
id TEXT PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at DATETIME,
handle TEXT NOT NULL,
credential_id TEXT NOT NULL UNIQUE,
authenticator_attachment TEXT NOT NULL,
origin TEXT NOT NULL,
type TEXT NOT NULL,
transports TEXT NOT NULL
);
-- Sessions track user authentication state
CREATE TABLE sessions (
id TEXT PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at DATETIME,
browser_name TEXT NOT NULL,
browser_version TEXT NOT NULL,
client_ipaddr TEXT NOT NULL,
platform TEXT NOT NULL,
is_desktop INTEGER NOT NULL DEFAULT 0,
is_mobile INTEGER NOT NULL DEFAULT 0,
is_tablet INTEGER NOT NULL DEFAULT 0,
is_tv INTEGER NOT NULL DEFAULT 0,
is_bot INTEGER NOT NULL DEFAULT 0,
challenge TEXT NOT NULL,
is_human_first INTEGER NOT NULL DEFAULT 0,
is_human_last INTEGER NOT NULL DEFAULT 0,
profile_id TEXT NOT NULL,
FOREIGN KEY (profile_id) REFERENCES profiles(id)
);
-- Vaults store encrypted data
CREATE TABLE vaults (
id TEXT PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
deleted_at DATETIME,
handle TEXT NOT NULL,
origin TEXT NOT NULL,
address TEXT NOT NULL,
cid TEXT NOT NULL UNIQUE,
config TEXT NOT NULL,
session_id TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
FOREIGN KEY (session_id) REFERENCES sessions(id)
);
-- Indexes for common queries
CREATE INDEX idx_profiles_handle ON profiles(handle);
CREATE INDEX idx_profiles_address ON profiles(address);
CREATE INDEX idx_profiles_origin ON profiles(origin);
CREATE INDEX idx_profiles_status ON profiles(status);
CREATE INDEX idx_profiles_deleted_at ON profiles(deleted_at);
CREATE INDEX idx_accounts_address ON accounts(address);
CREATE INDEX idx_accounts_chain_id ON accounts(chain_id);
CREATE INDEX idx_accounts_deleted_at ON accounts(deleted_at);
CREATE INDEX idx_assets_symbol ON assets(symbol);
CREATE INDEX idx_assets_chain_id ON assets(chain_id);
CREATE INDEX idx_assets_deleted_at ON assets(deleted_at);
CREATE INDEX idx_credentials_handle ON credentials(handle);
CREATE INDEX idx_credentials_origin ON credentials(origin);
CREATE INDEX idx_credentials_deleted_at ON credentials(deleted_at);
CREATE INDEX idx_sessions_profile_id ON sessions(profile_id);
CREATE INDEX idx_sessions_client_ipaddr ON sessions(client_ipaddr);
CREATE INDEX idx_sessions_deleted_at ON sessions(deleted_at);

View File

@ -1,26 +0,0 @@
version: "2"
sql:
- engine: "sqlite"
queries: "./sink/vault/query.sql"
schema: "./sink/vault/schema.sql"
gen:
go:
emit_interface: true
emit_json_tags: true
package: "motrorm"
out: "motrorm"
- engine: "postgresql"
queries: "./sink/highway/query.sql"
schema: "./sink/highway/schema.sql"
gen:
go:
emit_all_enum_values: true
emit_enum_valid_method: true
emit_json_tags: true
emit_interface: true
emit_result_struct_pointers: true
omit_unused_structs: true
package: "hwayorm"
out: "hwayorm"
sql_package: "pgx/v5"

View File

@ -4,5 +4,5 @@ import (
_ "embed"
)
//go:embed vault/schema.sql
//go:embed schema.sql
var SchemaVaultSQL string

11
internal/sqlc.yaml Normal file
View File

@ -0,0 +1,11 @@
version: "2"
sql:
- engine: "sqlite"
queries: "./sink/query.sql"
schema: "./sink/schema.sql"
gen:
go:
emit_interface: true
emit_json_tags: true
package: "models"
out: "models"

81
pkl/App.pkl Normal file
View File

@ -0,0 +1,81 @@
@go.Package { name = "github.com/onsonr/motr/internal/config" }
module sonr.net.Motr
import "package://pkg.pkl-lang.org/pkl-go/pkl.golang@0.5.0#/go.pkl"
typealias JSON = String
class JsonField extends go.Field {
structTags {
["json"] = "%{name},omitempty"
}
}
class Config {
@JsonField
ipfsGatewayUrl: String
@JsonField
motrToken: String
@JsonField
motrAddress: String
@JsonField
sonrApiUrl: String
@JsonField
sonrRpcUrl: String
@JsonField
sonrChainId: String
@JsonField
vaultSchema: Schema
}
class Environment {
@JsonField
isDevelopment: Boolean
@JsonField
cacheVersion: String
@JsonField
httpserverPath: String
@JsonField
wasmExecPath: String
@JsonField
wasmPath: String
}
class Schema {
version: Int
@JsonField
account: String
@JsonField
asset: String
@JsonField
chain: String
@JsonField
credential: String
@JsonField
jwk: String
@JsonField
grant: String
@JsonField
keyshare: String
@JsonField
profile: String
}

View File

@ -1,26 +0,0 @@
version: "2"
sql:
- engine: "sqlite"
queries: "./sink/vault/query.sql"
schema: "./sink/vault/schema.sql"
gen:
go:
emit_interface: true
emit_json_tags: true
package: "motrorm"
out: "motrorm"
- engine: "postgresql"
queries: "./sink/highway/query.sql"
schema: "./sink/highway/schema.sql"
gen:
go:
emit_all_enum_values: true
emit_enum_valid_method: true
emit_json_tags: true
emit_interface: true
emit_result_struct_pointers: true
omit_unused_structs: true
package: "hwayorm"
out: "hwayorm"
sql_package: "pgx/v5"

View File

@ -4,7 +4,7 @@ plugins:
- plugin: es
opt: target=ts
out: .
- plugin: @onsonr/es
path: ./scripts/protoc-gen-@onsonr/es.mjs
- plugin: cosmes
path: ./scripts/protoc-gen-cosmes.mjs
opt: target=ts
out: .