mirror of
https://github.com/cf-sonr/motr.git
synced 2026-01-12 02:59:13 +00:00
Refactor/orm (#7)
* feat/refactor-motr * feat/refactor-motr * feat: introduce Cloudflare Workers gateway application * chore: migrate build system from Makefile to Taskfile for improved automation * feat: streamline task management with Taskfile consolidation * feat: consolidate build and install tasks for simplified workflows * <no value> * feat: enable publishing for core packages * feat/refactor-motr * feat: integrate updated crypto library for enhanced security * ci: Configure CI/CD to build and test WASM signer * refactor: streamline build process using npm scripts * fix: Correct template file paths for accurate error reporting * refactor: standardize templ file paths for improved maintainability * chore: remove package documentation * build: exclude documentation artifacts from version control * feat: serve static assets from cloudflare worker * feat: introduce Motr controller service * refactor: move UI components to separate ui module * refactor: move resolver middleware to top-level middleware directory * feat: introduce modular middleware architecture * refactor: improve separation of concerns by relocating endpoint definitions * build: simplify codebase by deleting unused info types * refactor: decouple middleware configurations * feat: integrate request middleware for enhanced processing * feat: implement register and login pages * feat: integrate WASM signer for enhanced security * refactor: move type definitions to models package * refactor: rename ipfs middleware to vault middleware * feat: Add Dockerfile for controller service * feat: Add Dockerfile for controller with multi-stage build * chore: Update Golang version to 1.24.2 in Dockerfile * feat: introduce Docker support for application deployment * feat: Implement WASM-based signing service * feat: migrate build system to Taskfile for improved automation * feat: enable docker-based development for controller and resolver * feat: Add docker-compose services for resolver and controller workers * refactor: streamline build and deployment processes with Devbox * chore: standardize development environment with Devbox * chore: standardize container entrypoint for improved consistency * feat: introduce docker-compose setup for local development * feat: remove initial placeholder code * refactor: restructure project modules for improved organization * feat: integrate motr UI library for enhanced components * chore: upgrade motr dependency to v0.0.3 * refactor: restructure project layout for improved modularity * refactor: consolidate data models and options into directory * feat: integrate sqlc for database interaction * feat: integrate D1 database for improved data management * refactor: improve naming consistency across project * feat: enhance context with HTTP header utilities * refactor: restructure project layout for improved maintainability * refactor: centralize rendering logic into middleware * chore: update motr dependency to v0.0.5 to address data consistency issues * feat: consolidate handler logic into root * refactor: relocate handlers and middleware to internal packages * chore: update dependency to v0.9.0 * refactor: Improve code structure and add comprehensive documentation for WebAssembly signer module * feat: implement WASM-based signing functionality * chore: remove build system configuration * feat: integrate D1 database for persistent data storage * feat: enable D1 database integration for Cloudflare Workers * feat: enhance task execution with docker-compose integration * refactor: centralize database queries and models * refactor: improve sqlc code generation and project structure * docs: Update README with Docker support and new project architecture * refactor: centralize Sonr configuration via middleware * chore: improve build task definitions in Taskfile * chore: remove docker deployment configuration * feat: upgrade crypto module to v0.11.0 * refactor: migrate to a configuration-driven architecture * refactor: inject database common queries into index handler * feat: streamline worker initialization * refactor: standardize package versioning process * build: prepare vault package for public release * feat: enable WASM-based vault signing with durable objects * feat: introduce Vault service for enhanced security * feat: upgrade crypto library to v0.12.1 for enhanced security features * build: update middleware build constraints * feat: introduce struct-based handlers for improved organization * feat: centralize database configuration * build: update database seeding to target remote instance * feat: enhance asset handling with shared coin type * feat: decouple build process from database initialization * refactor: move base UI components to a dedicated directory * refactor: improve component structure for enhanced maintainability * refactor: rename binding for clarity and consistency * feat: introduce development task and environment configurations * feat: introduce mprocs for simplified local development * <no value> * refactor: rename Vault DB to Controller DB for clarity * refactor: simplify configuration loading and database connections * feat: introduce MotrMode configuration to differentiate service roles * refactor: restructure base components and consolidate metadata handling * feat: introduce session context middleware for request handling * feat: enhance task management with database migration support * feat: Add database controller middleware and context retrieval methods * refactor: Improve session context handling and add full database controller middleware * feat: integrate Helia and Extism for Vault durable object * refactor: rename resolver to frontend * feat: streamline infrastructure and data management * refactor: improve task management and dependency loading * fix: remove legacy fetch handler from vault worker * feat: integrate WebAuthn for secure authentication * build: add hx-trigger for enhanced interaction * refactor: centralize session ID handling in middleware * refactor: simplify session context initialization * feat/split workers (#6) * refactor: rename claim route to register for clarity * refactor: rename demo view to dashboard view * feat: automate build and publish process * refactor: unify database queries and models for vault operations * feat: implement WASM compatible server and routing * feat: introduce controller-based routing * feat: implement passkey-based authentication * feat: implement Cloudflare cache middleware for improved performance * refactor: move ui components to ui package * feat: add handler functions for login and registration * refactor: centralize route registration logic * feat: implement user registration and login with passkey support * refactor: restructure view components for improved organization * feat: enable handle-based authentication and session management * refactor: improve config access and remove unused dependencies * refactor: restructure middleware and configuration for clarity * refactor: Remove external libraries * docs: update architecture to reflect removal of WASM build * refactor: centralize metadata and coin info types * refactor: move auth types to auth package * refactor: remove session management from vault service * refactor: reorganize UI components and middleware for clarity * refactor: move UI components to internal directory * feat: add session expiry configuration * refactor: streamline Taskfile and remove deprecated options * feat: introduce account entities and UI components * refactor: streamline asset representation with entity interfaces * feat: introduce asset pricing and market data * feat: enhance radar with session and cache middleware * fix: update dependencies and resolve peer dependency conflicts * feat: add blockchain entity and data model * feat: introduce entity-specific query interfaces * feat: Implement transaction creation, signing, and broadcasting methods * feat: implement durable object vault with RPC and IPFS support * refactor: align protobuf imports with updated cosm-orc package * feat: simplify auth components by removing options * feat: enhance data models for comprehensive crypto analysis * feat: implement account lookup by handle * refactor: simplify account card rendering * feat: enhance UI rendering with middleware * feat: remove helia file operations * build: scaffold navigation component with templ * feat: introduce UI layout components and dependency management * feat: remove unused assets for templated html * feat: implement authentication flow with WebAuthn and handle registration * feat: remove radar cmd * feat: enhance application architecture with module-based structure * refactor: relocate config and middleware to internal packages * feat: integrate session management into landing page * feat: improve landing page handler setup and session handling * feat: streamline initial setup and configuration process * chore: reorganize project layout for clarity and maintainability * chore: reorganize project structure by moving workers to 'cmd' * refactor/orm * feat: enhance task automation with root directory task * refactor: relocate chart components to dedicated directory
This commit is contained in:
5
cmd/front/.gitignore
vendored
Normal file
5
cmd/front/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
build
|
||||
node_modules
|
||||
.wrangler
|
||||
|
||||
.dev.vars*
|
||||
27
cmd/front/main.go
Normal file
27
cmd/front/main.go
Normal file
@@ -0,0 +1,27 @@
|
||||
//go:build js && wasm
|
||||
// +build js,wasm
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/sonr-io/motr/handlers/auth"
|
||||
"github.com/sonr-io/motr/handlers/landing"
|
||||
"github.com/sonr-io/motr/internal/config"
|
||||
"github.com/sonr-io/motr/internal/middleware"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Setup config
|
||||
e, c := config.New()
|
||||
e.Use(middleware.UseSession(c), middleware.UseCloudflareCache(c))
|
||||
|
||||
// Register controllers
|
||||
if err := landing.Register(c, e); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if err := auth.Register(c, e); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
// Start server
|
||||
e.Serve()
|
||||
}
|
||||
1540
cmd/front/package-lock.json
generated
Normal file
1540
cmd/front/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
16
cmd/front/package.json
Normal file
16
cmd/front/package.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "@sonr-io/motr-front",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "npm run clean && npm run assets && npm run wasm",
|
||||
"assets": "go run github.com/syumai/workers/cmd/workers-assets-gen -mode=go",
|
||||
"wasm": "GOOS=js GOARCH=wasm go build -o ./build/app.wasm .",
|
||||
"deploy": "wrangler deploy",
|
||||
"start": "wrangler dev",
|
||||
"clean": "rm -rf ./build && rm -rf .wrangler && rm -rf ./dist && rm -rf ./node_modules && npm install"
|
||||
},
|
||||
"devDependencies": {
|
||||
"wrangler": "^4.10.0"
|
||||
}
|
||||
}
|
||||
47
cmd/front/wrangler.toml
Normal file
47
cmd/front/wrangler.toml
Normal file
@@ -0,0 +1,47 @@
|
||||
# Top-level configuration
|
||||
name = "motr-front"
|
||||
main = "build/worker.mjs"
|
||||
compatibility_date = "2025-04-14"
|
||||
|
||||
routes = [
|
||||
{ pattern = "sonr.id", custom_domain = true },
|
||||
]
|
||||
|
||||
[build]
|
||||
command = "npm run build"
|
||||
|
||||
[dev]
|
||||
port = 8787
|
||||
|
||||
[vars]
|
||||
SONR_CHAIN_ID = 'sonr-testnet-1'
|
||||
IPFS_GATEWAY = 'https://ipfs.sonr.land'
|
||||
SONR_API_URL = 'https://api.sonr.land'
|
||||
SONR_RPC_URL = 'https://rpc.sonr.land'
|
||||
SONR_GRPC_URL = 'https://grpc.sonr.id'
|
||||
MATRIX_SERVER = 'https://bm.chat'
|
||||
MOTR_GATEWAY = 'https://sonr.id'
|
||||
MOTR_VAULT = 'https://did.run'
|
||||
MOTR_MODE = 'resolver'
|
||||
|
||||
[observability]
|
||||
enabled = true
|
||||
logpush = true
|
||||
|
||||
[[r2_buckets]]
|
||||
binding = 'PROFILES'
|
||||
bucket_name = 'profiles'
|
||||
|
||||
[[d1_databases]]
|
||||
binding = "DB" # available in your Worker on env.DB
|
||||
database_name = "motr-db"
|
||||
database_id = "abc70ab3-32ce-4600-9b15-a452f92b7987"
|
||||
|
||||
[[kv_namespaces]]
|
||||
binding = "SESSIONS" # available in your Worker on env.KV
|
||||
id = "ea5de66fcfc14b5eba170395e29432ee"
|
||||
|
||||
[[kv_namespaces]]
|
||||
binding = "HANDLES" # available in your Worker on env.KV
|
||||
id = "271d47087a8842b2aac5ee79cf7bb203"
|
||||
|
||||
5
cmd/vault/.gitignore
vendored
Normal file
5
cmd/vault/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
build
|
||||
node_modules
|
||||
.wrangler
|
||||
|
||||
.dev.vars*
|
||||
24
cmd/vault/main.go
Normal file
24
cmd/vault/main.go
Normal file
@@ -0,0 +1,24 @@
|
||||
//go:build js && wasm
|
||||
// +build js,wasm
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/sonr-io/motr/handlers/auth"
|
||||
"github.com/sonr-io/motr/internal/config"
|
||||
"github.com/sonr-io/motr/internal/middleware"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Setup config
|
||||
e, c := config.New()
|
||||
e.Use(middleware.UseSession(c), middleware.UseCloudflareCache(c))
|
||||
|
||||
// Register controllers
|
||||
if err := auth.Register(c, e); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Start server
|
||||
e.Serve()
|
||||
}
|
||||
6433
cmd/vault/package-lock.json
generated
Normal file
6433
cmd/vault/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
cmd/vault/package.json
Normal file
27
cmd/vault/package.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"name": "@sonr-io/motr-vault",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "npm run clean && npm run assets && npm run wasm",
|
||||
"assets": "go run github.com/syumai/workers/cmd/workers-assets-gen -mode=go",
|
||||
"wasm": "GOOS=js GOARCH=wasm go build -o ./build/app.wasm .",
|
||||
"migrate": "npx wrangler d1 execute motr-db --remote --file=./schema.sql",
|
||||
"deploy": "npx wrangler deploy",
|
||||
"start": "npx wrangler dev",
|
||||
"clean": "rm -rf ./build && rm -rf .wrangler && rm -rf ./dist && rm -rf ./node_modules && npm install"
|
||||
},
|
||||
"dependencies": {
|
||||
"@extism/extism": "^2.0.0-rc11",
|
||||
"@helia/dag-cbor": "^1.0.1",
|
||||
"@helia/dag-json": "^1.0.1",
|
||||
"@helia/json": "^1.0.1",
|
||||
"@helia/strings": "^1.0.1",
|
||||
"@helia/unixfs": "^1.4.1",
|
||||
"helia": "^2.1.0",
|
||||
"sonr-cosmes": "^0.0.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"wrangler": "^4.10.0"
|
||||
}
|
||||
}
|
||||
1026
cmd/vault/query.sql
Normal file
1026
cmd/vault/query.sql
Normal file
File diff suppressed because it is too large
Load Diff
356
cmd/vault/schema.sql
Normal file
356
cmd/vault/schema.sql
Normal file
@@ -0,0 +1,356 @@
|
||||
-- Assets represent tokens and coins
|
||||
CREATE TABLE assets (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
name TEXT NOT NULL,
|
||||
symbol TEXT NOT NULL,
|
||||
decimals INTEGER NOT NULL CHECK(decimals >= 0),
|
||||
chain_id TEXT NOT NULL,
|
||||
channel TEXT NOT NULL,
|
||||
asset_type TEXT NOT NULL,
|
||||
coingecko_id TEXT,
|
||||
UNIQUE(chain_id, symbol)
|
||||
);
|
||||
|
||||
-- Credentials store WebAuthn credentials
|
||||
CREATE TABLE credentials (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
handle TEXT NOT NULL,
|
||||
credential_id TEXT NOT NULL UNIQUE,
|
||||
authenticator_attachment TEXT NOT NULL,
|
||||
origin TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
transports TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- Accounts represent blockchain accounts
|
||||
CREATE TABLE accounts (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
number INTEGER NOT NULL,
|
||||
sequence INTEGER NOT NULL DEFAULT 0,
|
||||
address TEXT NOT NULL UNIQUE,
|
||||
public_key TEXT NOT NULL CHECK(json_valid(public_key)),
|
||||
chain_id TEXT NOT NULL,
|
||||
block_created INTEGER NOT NULL,
|
||||
controller TEXT NOT NULL,
|
||||
label TEXT NOT NULL,
|
||||
handle TEXT NOT NULL,
|
||||
is_subsidiary BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_subsidiary IN (0,1)),
|
||||
is_validator BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_validator IN (0,1)),
|
||||
is_delegator BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_delegator IN (0,1)),
|
||||
is_accountable BOOLEAN NOT NULL DEFAULT TRUE CHECK(is_accountable IN (0,1))
|
||||
);
|
||||
|
||||
-- Profiles represent user identities
|
||||
CREATE TABLE profiles (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
address TEXT NOT NULL,
|
||||
handle TEXT NOT NULL UNIQUE,
|
||||
origin TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
UNIQUE(address, origin)
|
||||
);
|
||||
|
||||
-- Vaults store encrypted data
|
||||
CREATE TABLE vaults (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
handle TEXT NOT NULL,
|
||||
origin TEXT NOT NULL,
|
||||
address TEXT NOT NULL,
|
||||
cid TEXT NOT NULL UNIQUE,
|
||||
config TEXT NOT NULL CHECK(json_valid(config)),
|
||||
session_id TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- Prices entity based on the Alternative.me API for crypto prices
|
||||
CREATE TABLE prices (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
asset_id TEXT NOT NULL,
|
||||
price_usd REAL,
|
||||
price_btc REAL,
|
||||
volume_24h_usd REAL,
|
||||
market_cap_usd REAL,
|
||||
available_supply REAL,
|
||||
total_supply REAL,
|
||||
max_supply REAL,
|
||||
percent_change_1h REAL,
|
||||
percent_change_24h REAL,
|
||||
percent_change_7d REAL,
|
||||
rank INTEGER,
|
||||
last_updated TIMESTAMP NOT NULL,
|
||||
FOREIGN KEY (asset_id) REFERENCES assets(id)
|
||||
);
|
||||
|
||||
-- Currency conversion rates for crypto prices
|
||||
CREATE TABLE price_conversions (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
price_id TEXT NOT NULL,
|
||||
currency_code TEXT NOT NULL,
|
||||
price REAL,
|
||||
volume_24h REAL,
|
||||
market_cap REAL,
|
||||
last_updated TIMESTAMP NOT NULL,
|
||||
FOREIGN KEY (price_id) REFERENCES prices(id),
|
||||
UNIQUE(price_id, currency_code)
|
||||
);
|
||||
|
||||
-- Global market data from Alternative.me API
|
||||
CREATE TABLE global_market (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
total_market_cap_usd REAL,
|
||||
total_24h_volume_usd REAL,
|
||||
bitcoin_percentage_of_market_cap REAL,
|
||||
active_currencies INTEGER,
|
||||
active_assets INTEGER,
|
||||
active_markets INTEGER,
|
||||
last_updated TIMESTAMP NOT NULL
|
||||
);
|
||||
|
||||
-- Fear and Greed Index data from Alternative.me
|
||||
CREATE TABLE fear_greed_index (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
value INTEGER NOT NULL,
|
||||
value_classification TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL,
|
||||
time_until_update TEXT
|
||||
);
|
||||
|
||||
-- Listings data from Alternative.me API
|
||||
CREATE TABLE crypto_listings (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
api_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
symbol TEXT NOT NULL,
|
||||
website_slug TEXT NOT NULL,
|
||||
UNIQUE(api_id)
|
||||
);
|
||||
|
||||
-- Service for Service Records sourced on chain
|
||||
CREATE TABLE services (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
chain_id TEXT NOT NULL,
|
||||
address TEXT NOT NULL,
|
||||
owner_address TEXT NOT NULL,
|
||||
metadata TEXT CHECK(json_valid(metadata)),
|
||||
status TEXT NOT NULL,
|
||||
block_height INTEGER NOT NULL,
|
||||
FOREIGN KEY (chain_id) REFERENCES assets(chain_id),
|
||||
UNIQUE(chain_id, address)
|
||||
);
|
||||
|
||||
-- Activity table for basic transaction broadcast activity
|
||||
CREATE TABLE activities (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
account_id TEXT NOT NULL,
|
||||
tx_hash TEXT,
|
||||
tx_type TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
amount TEXT,
|
||||
fee TEXT,
|
||||
gas_used INTEGER,
|
||||
gas_wanted INTEGER,
|
||||
memo TEXT,
|
||||
block_height INTEGER,
|
||||
timestamp TIMESTAMP NOT NULL,
|
||||
raw_log TEXT,
|
||||
error TEXT,
|
||||
FOREIGN KEY (account_id) REFERENCES accounts(id)
|
||||
);
|
||||
|
||||
-- Health table for scheduled checks for API endpoints
|
||||
CREATE TABLE health (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
endpoint_url TEXT NOT NULL,
|
||||
endpoint_type TEXT NOT NULL,
|
||||
chain_id TEXT,
|
||||
status TEXT NOT NULL,
|
||||
response_time_ms INTEGER,
|
||||
last_checked TIMESTAMP NOT NULL,
|
||||
next_check TIMESTAMP,
|
||||
failure_count INTEGER NOT NULL DEFAULT 0,
|
||||
success_count INTEGER NOT NULL DEFAULT 0,
|
||||
response_data TEXT,
|
||||
error_message TEXT,
|
||||
FOREIGN KEY (chain_id) REFERENCES assets(chain_id)
|
||||
);
|
||||
|
||||
-- Blockchains table to store chain configuration parameters
|
||||
CREATE TABLE blockchains (
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
deleted_at TIMESTAMP,
|
||||
|
||||
-- Basic chain information
|
||||
chain_name TEXT NOT NULL,
|
||||
chain_id_cosmos TEXT,
|
||||
chain_id_evm TEXT,
|
||||
api_name TEXT,
|
||||
bech_account_prefix TEXT,
|
||||
bech_validator_prefix TEXT,
|
||||
|
||||
-- Chain assets
|
||||
main_asset_symbol TEXT,
|
||||
main_asset_denom TEXT,
|
||||
staking_asset_symbol TEXT,
|
||||
staking_asset_denom TEXT,
|
||||
is_stake_enabled BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_stake_enabled IN (0,1)),
|
||||
|
||||
-- Chain images
|
||||
chain_image TEXT,
|
||||
main_asset_image TEXT,
|
||||
staking_asset_image TEXT,
|
||||
|
||||
-- Chain types and features
|
||||
chain_type TEXT NOT NULL CHECK(json_valid(chain_type)),
|
||||
is_support_mobile_wallet BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_mobile_wallet IN (0,1)),
|
||||
is_support_extension_wallet BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_extension_wallet IN (0,1)),
|
||||
is_support_erc20 BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_erc20 IN (0,1)),
|
||||
|
||||
-- Descriptions in multiple languages
|
||||
description_en TEXT,
|
||||
description_ko TEXT,
|
||||
description_ja TEXT,
|
||||
|
||||
-- Genesis information
|
||||
origin_genesis_time TIMESTAMP,
|
||||
|
||||
-- Account types configuration
|
||||
account_type TEXT NOT NULL CHECK(json_valid(account_type)),
|
||||
|
||||
-- BTC staking specific
|
||||
btc_staking TEXT CHECK(json_valid(btc_staking)),
|
||||
|
||||
-- Cosmos fee information
|
||||
cosmos_fee_info TEXT CHECK(json_valid(cosmos_fee_info)),
|
||||
|
||||
-- EVM fee information
|
||||
evm_fee_info TEXT CHECK(json_valid(evm_fee_info)),
|
||||
|
||||
-- Endpoints
|
||||
lcd_endpoint TEXT CHECK(json_valid(lcd_endpoint)),
|
||||
grpc_endpoint TEXT CHECK(json_valid(grpc_endpoint)),
|
||||
evm_rpc_endpoint TEXT CHECK(json_valid(evm_rpc_endpoint)),
|
||||
|
||||
-- Explorer information
|
||||
explorer TEXT CHECK(json_valid(explorer)),
|
||||
|
||||
-- Social and documentation links
|
||||
about TEXT CHECK(json_valid(about)),
|
||||
forum TEXT CHECK(json_valid(forum))
|
||||
);
|
||||
|
||||
-- Add all necessary indexes
|
||||
CREATE INDEX idx_assets_symbol ON assets(symbol);
|
||||
CREATE INDEX idx_assets_chain_id ON assets(chain_id);
|
||||
CREATE INDEX idx_assets_deleted_at ON assets(deleted_at);
|
||||
|
||||
CREATE INDEX idx_credentials_handle ON credentials(handle);
|
||||
CREATE INDEX idx_credentials_origin ON credentials(origin);
|
||||
CREATE INDEX idx_credentials_deleted_at ON credentials(deleted_at);
|
||||
|
||||
CREATE INDEX idx_accounts_address ON accounts(address);
|
||||
CREATE INDEX idx_accounts_chain_id ON accounts(chain_id);
|
||||
CREATE INDEX idx_accounts_block_created ON accounts(block_created);
|
||||
CREATE INDEX idx_accounts_label ON accounts(label);
|
||||
CREATE INDEX idx_accounts_controller ON accounts(controller);
|
||||
CREATE INDEX idx_accounts_deleted_at ON accounts(deleted_at);
|
||||
|
||||
CREATE INDEX idx_profiles_handle ON profiles(handle);
|
||||
CREATE INDEX idx_profiles_address ON profiles(address);
|
||||
CREATE INDEX idx_profiles_deleted_at ON profiles(deleted_at);
|
||||
|
||||
CREATE INDEX idx_vaults_handle ON vaults(handle);
|
||||
CREATE INDEX idx_vaults_session_id ON vaults(session_id);
|
||||
CREATE INDEX idx_vaults_deleted_at ON vaults(deleted_at);
|
||||
|
||||
CREATE INDEX idx_prices_asset_id ON prices(asset_id);
|
||||
CREATE INDEX idx_prices_rank ON prices(rank);
|
||||
CREATE INDEX idx_prices_last_updated ON prices(last_updated);
|
||||
CREATE INDEX idx_prices_deleted_at ON prices(deleted_at);
|
||||
|
||||
CREATE INDEX idx_price_conversions_price_id ON price_conversions(price_id);
|
||||
CREATE INDEX idx_price_conversions_currency_code ON price_conversions(currency_code);
|
||||
CREATE INDEX idx_price_conversions_deleted_at ON price_conversions(deleted_at);
|
||||
|
||||
CREATE INDEX idx_global_market_last_updated ON global_market(last_updated);
|
||||
CREATE INDEX idx_global_market_deleted_at ON global_market(deleted_at);
|
||||
|
||||
CREATE INDEX idx_fear_greed_index_timestamp ON fear_greed_index(timestamp);
|
||||
CREATE INDEX idx_fear_greed_index_value ON fear_greed_index(value);
|
||||
CREATE INDEX idx_fear_greed_index_deleted_at ON fear_greed_index(deleted_at);
|
||||
|
||||
CREATE INDEX idx_crypto_listings_api_id ON crypto_listings(api_id);
|
||||
CREATE INDEX idx_crypto_listings_symbol ON crypto_listings(symbol);
|
||||
CREATE INDEX idx_crypto_listings_website_slug ON crypto_listings(website_slug);
|
||||
CREATE INDEX idx_crypto_listings_deleted_at ON crypto_listings(deleted_at);
|
||||
|
||||
CREATE INDEX idx_services_name ON services(name);
|
||||
CREATE INDEX idx_services_chain_id ON services(chain_id);
|
||||
CREATE INDEX idx_services_address ON services(address);
|
||||
CREATE INDEX idx_services_owner_address ON services(owner_address);
|
||||
CREATE INDEX idx_services_status ON services(status);
|
||||
CREATE INDEX idx_services_deleted_at ON services(deleted_at);
|
||||
|
||||
CREATE INDEX idx_activities_account_id ON activities(account_id);
|
||||
CREATE INDEX idx_activities_tx_hash ON activities(tx_hash);
|
||||
CREATE INDEX idx_activities_tx_type ON activities(tx_type);
|
||||
CREATE INDEX idx_activities_status ON activities(status);
|
||||
CREATE INDEX idx_activities_timestamp ON activities(timestamp);
|
||||
CREATE INDEX idx_activities_block_height ON activities(block_height);
|
||||
CREATE INDEX idx_activities_deleted_at ON activities(deleted_at);
|
||||
|
||||
CREATE INDEX idx_health_endpoint_url ON health(endpoint_url);
|
||||
CREATE INDEX idx_health_endpoint_type ON health(endpoint_type);
|
||||
CREATE INDEX idx_health_chain_id ON health(chain_id);
|
||||
CREATE INDEX idx_health_status ON health(status);
|
||||
CREATE INDEX idx_health_last_checked ON health(last_checked);
|
||||
CREATE INDEX idx_health_next_check ON health(next_check);
|
||||
CREATE INDEX idx_health_deleted_at ON health(deleted_at);
|
||||
|
||||
CREATE INDEX idx_blockchains_chain_name ON blockchains(chain_name);
|
||||
CREATE INDEX idx_blockchains_chain_id_cosmos ON blockchains(chain_id_cosmos);
|
||||
CREATE INDEX idx_blockchains_chain_id_evm ON blockchains(chain_id_evm);
|
||||
CREATE INDEX idx_blockchains_main_asset_symbol ON blockchains(main_asset_symbol);
|
||||
CREATE INDEX idx_blockchains_deleted_at ON blockchains(deleted_at);
|
||||
686
cmd/vault/worker.mjs
Normal file
686
cmd/vault/worker.mjs
Normal file
@@ -0,0 +1,686 @@
|
||||
export { default } from "./build/worker.mjs";
|
||||
import { DurableObject } from "cloudflare:workers";
|
||||
import createPlugin from "@extism/extism";
|
||||
import { createHelia } from "helia";
|
||||
import { strings } from "@helia/strings";
|
||||
import { json } from "@helia/json";
|
||||
import { dagJson } from "@helia/dag-json";
|
||||
import { dagCbor } from "@helia/dag-cbor";
|
||||
import { unixfs } from "@helia/unixfs";
|
||||
import { serialiseSignDoc } from "sonr-cosmes/codec";
|
||||
import { broadcastTx, RpcClient } from "sonr-cosmes/client";
|
||||
import {
|
||||
DidV1MsgRegisterController,
|
||||
DwnV1QuerySpawnRequest,
|
||||
} from "sonr-cosmes/protobufs";
|
||||
|
||||
export class Vault extends DurableObject {
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
this.state = ctx;
|
||||
this.env = env;
|
||||
|
||||
// Initialize node state
|
||||
this.nodeState = {
|
||||
status: "offline",
|
||||
nodeId: null,
|
||||
discoveredPeers: new Map(),
|
||||
connectedPeers: new Map(),
|
||||
logs: [],
|
||||
};
|
||||
|
||||
// Initialize Helia node and related components when the DO is created
|
||||
this.initializeHelia();
|
||||
this.initializePlugins();
|
||||
}
|
||||
|
||||
// Helper method to add a log entry
|
||||
addLog(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const logEntry = { timestamp, message };
|
||||
this.nodeState.logs.push(logEntry);
|
||||
|
||||
// Keep only the last 100 log entries
|
||||
if (this.nodeState.logs.length > 100) {
|
||||
this.nodeState.logs.shift();
|
||||
}
|
||||
|
||||
return logEntry;
|
||||
}
|
||||
|
||||
async initializeHelia() {
|
||||
try {
|
||||
this.addLog("Initializing Helia node...");
|
||||
|
||||
// Create a Helia node with memory blockstore and datastore
|
||||
this.helia = await createHelia({
|
||||
blockstore: { type: "memory" },
|
||||
datastore: { type: "memory" },
|
||||
// Configure libp2p for Cloudflare Workers environment
|
||||
libp2p: {
|
||||
start: true,
|
||||
addresses: { listen: [] },
|
||||
connectionManager: {
|
||||
minConnections: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Initialize various data handlers
|
||||
this.stringHandler = strings(this.helia);
|
||||
this.jsonHandler = json(this.helia);
|
||||
this.dagJsonHandler = dagJson(this.helia);
|
||||
this.dagCborHandler = dagCbor(this.helia);
|
||||
this.fsHandler = unixfs(this.helia);
|
||||
|
||||
// Update node state
|
||||
this.nodeState.status = this.helia.libp2p.status;
|
||||
this.nodeState.nodeId = this.helia.libp2p.peerId.toString();
|
||||
|
||||
// Set up event listeners
|
||||
this.helia.libp2p.addEventListener("peer:discovery", (evt) => {
|
||||
const peerId = evt.detail.id.toString();
|
||||
this.nodeState.discoveredPeers.set(peerId, {
|
||||
id: peerId,
|
||||
multiaddrs: evt.detail.multiaddrs
|
||||
? evt.detail.multiaddrs.map((ma) => ma.toString())
|
||||
: [],
|
||||
discoveredAt: new Date().toISOString(),
|
||||
});
|
||||
this.addLog(`Discovered peer ${peerId}`);
|
||||
});
|
||||
|
||||
this.helia.libp2p.addEventListener("peer:connect", (evt) => {
|
||||
const peerId = evt.detail.toString();
|
||||
this.nodeState.connectedPeers.set(peerId, {
|
||||
id: peerId,
|
||||
connectedAt: new Date().toISOString(),
|
||||
});
|
||||
this.addLog(`Connected to ${peerId}`);
|
||||
});
|
||||
|
||||
this.helia.libp2p.addEventListener("peer:disconnect", (evt) => {
|
||||
const peerId = evt.detail.toString();
|
||||
this.nodeState.connectedPeers.delete(peerId);
|
||||
this.addLog(`Disconnected from ${peerId}`);
|
||||
});
|
||||
|
||||
this.addLog("Helia node initialized successfully");
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to initialize Helia node: ${error.message}`);
|
||||
console.error("Failed to initialize Helia node:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// initializePlugins initializes the signer plugin
|
||||
async initializePlugins() {
|
||||
try {
|
||||
// 1. Initialize the enclave plugin
|
||||
this.addLog("Initializing enclave plugin...");
|
||||
this.enclavePlugin = await createPlugin(
|
||||
"https://cdn.sonr.io/bin/enclave.wasm",
|
||||
{
|
||||
useWasi: true,
|
||||
},
|
||||
);
|
||||
this.addLog("Enclave plugin initialized successfully");
|
||||
|
||||
// 2. Initialize the signer plugin
|
||||
this.addLog("Initializing signer plugin...");
|
||||
this.signerPlugin = await createPlugin(
|
||||
"https://cdn.sonr.io/bin/signer.wasm",
|
||||
{
|
||||
useWasi: true,
|
||||
},
|
||||
);
|
||||
this.addLog("Signer plugin initialized successfully");
|
||||
|
||||
// 3. Initialize the verifier plugin
|
||||
this.addLog("Initializing verifier plugin...");
|
||||
this.verifierPlugin = await createPlugin(
|
||||
"https://cdn.sonr.io/bin/verifier.wasm",
|
||||
{
|
||||
useWasi: true,
|
||||
},
|
||||
);
|
||||
this.addLog("Verifier plugin initialized successfully");
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to initialize plugin: ${error.message}`);
|
||||
console.error("Failed to initialize plugin:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// RPC method to get node status
|
||||
async getNodeStatus() {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
return {
|
||||
status: this.nodeState.status,
|
||||
nodeId: this.nodeState.nodeId,
|
||||
discoveredPeersCount: this.nodeState.discoveredPeers.size,
|
||||
connectedPeersCount: this.nodeState.connectedPeers.size,
|
||||
};
|
||||
}
|
||||
|
||||
// RPC method to get node ID
|
||||
async getNodeId() {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
return {
|
||||
nodeId: this.nodeState.nodeId,
|
||||
};
|
||||
}
|
||||
|
||||
// RPC method to get discovered peers
|
||||
async getDiscoveredPeers() {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
return {
|
||||
count: this.nodeState.discoveredPeers.size,
|
||||
peers: Array.from(this.nodeState.discoveredPeers.values()),
|
||||
};
|
||||
}
|
||||
|
||||
// RPC method to get connected peers
|
||||
async getConnectedPeers() {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
return {
|
||||
count: this.nodeState.connectedPeers.size,
|
||||
peers: Array.from(this.nodeState.connectedPeers.values()),
|
||||
};
|
||||
}
|
||||
|
||||
// RPC method to get logs
|
||||
async getLogs(limit = 50) {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
const logs = [...this.nodeState.logs];
|
||||
return {
|
||||
count: logs.length,
|
||||
logs: logs.slice(-limit), // Return the most recent logs up to the limit
|
||||
};
|
||||
}
|
||||
|
||||
// RPC method to sign data
|
||||
async sign(data) {
|
||||
if (!this.plugin) {
|
||||
await this.initializePlugin();
|
||||
}
|
||||
try {
|
||||
let out = await this.plugin.call("sign", JSON.stringify(data));
|
||||
return JSON.parse(out.text());
|
||||
} catch (error) {
|
||||
throw new Error(`Signing failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Creates and signs a transaction
|
||||
async createAndSignTx(msg, signer, options = {}) {
|
||||
this.addLog(`Creating transaction with message type: ${msg.typeUrl}`);
|
||||
|
||||
try {
|
||||
// Default options
|
||||
const defaultOptions = {
|
||||
memo: "",
|
||||
fee: {
|
||||
amount: [{ denom: "usnr", amount: "1000" }],
|
||||
gas_limit: "200000",
|
||||
},
|
||||
chainId: this.env.SONR_CHAIN_ID || "sonr-testnet-1",
|
||||
};
|
||||
|
||||
const txOptions = { ...defaultOptions, ...options };
|
||||
|
||||
// Create the sign doc
|
||||
const signDoc = {
|
||||
chainId: txOptions.chainId,
|
||||
accountNumber: options.accountNumber || "0",
|
||||
sequence: options.sequence || "0",
|
||||
fee: txOptions.fee,
|
||||
msgs: [msg],
|
||||
memo: txOptions.memo,
|
||||
};
|
||||
|
||||
// Serialize the sign doc
|
||||
const signBytes = serialiseSignDoc(signDoc);
|
||||
|
||||
// Sign the transaction
|
||||
this.addLog(`Signing transaction for ${signer}`);
|
||||
const signature = await this.sign({
|
||||
bytes: Buffer.from(signBytes).toString("base64"),
|
||||
publicKey: options.publicKey,
|
||||
});
|
||||
|
||||
// Create the signed transaction
|
||||
const signedTx = {
|
||||
signDoc,
|
||||
signature: {
|
||||
signature: signature.signature,
|
||||
pub_key: {
|
||||
type: "tendermint/PubKeySecp256k1",
|
||||
value: signature.publicKey,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
this.addLog("Transaction created and signed successfully");
|
||||
return signedTx;
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to create and sign transaction: ${error.message}`);
|
||||
throw new Error(`Transaction creation failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Broadcasts a signed transaction to the network
|
||||
async broadcastTransaction(signedTx, broadcastMode = "BROADCAST_MODE_SYNC") {
|
||||
this.addLog("Broadcasting transaction to network");
|
||||
|
||||
try {
|
||||
const rpcUrl = this.env.SONR_RPC_URL || "https://rpc.sonr.io";
|
||||
this.addLog(`Using RPC URL: ${rpcUrl}`);
|
||||
|
||||
const response = await broadcastTx(rpcUrl, signedTx, broadcastMode);
|
||||
|
||||
if (response.tx_response && response.tx_response.code === 0) {
|
||||
this.addLog(
|
||||
`Transaction broadcast successful. Hash: ${response.tx_response.txhash}`,
|
||||
);
|
||||
} else {
|
||||
this.addLog(
|
||||
`Transaction broadcast failed: ${JSON.stringify(response.tx_response)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to broadcast transaction: ${error.message}`);
|
||||
throw new Error(`Transaction broadcast failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// RPC method to add string content to IPFS
|
||||
async addString(content) {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
if (!this.helia) {
|
||||
throw new Error("Helia node not available");
|
||||
}
|
||||
|
||||
try {
|
||||
const cid = await this.stringHandler.add(content);
|
||||
this.addLog(`Added string content with CID: ${cid.toString()}`);
|
||||
return { cid: cid.toString() };
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to add string: ${error.message}`);
|
||||
throw new Error(`Failed to add string: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// RPC method to add JSON content to IPFS
|
||||
async addJson(content) {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
if (!this.helia) {
|
||||
throw new Error("Helia node not available");
|
||||
}
|
||||
|
||||
try {
|
||||
const cid = await this.jsonHandler.add(content);
|
||||
this.addLog(`Added JSON content with CID: ${cid.toString()}`);
|
||||
return { cid: cid.toString() };
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to add JSON: ${error.message}`);
|
||||
throw new Error(`Failed to add JSON: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// RPC method to get content from IPFS by CID
|
||||
async getContent(cid) {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
if (!this.helia) {
|
||||
throw new Error("Helia node not available");
|
||||
}
|
||||
|
||||
try {
|
||||
this.addLog(`Retrieving content for CID: ${cid}`);
|
||||
// Try to get as JSON first
|
||||
try {
|
||||
const jsonData = await this.jsonHandler.get(cid);
|
||||
return { type: "json", content: jsonData };
|
||||
} catch (e) {
|
||||
// Fall back to string if JSON fails
|
||||
try {
|
||||
const stringData = await this.stringHandler.get(cid);
|
||||
return { type: "string", content: stringData };
|
||||
} catch (e2) {
|
||||
this.addLog(`Failed to retrieve content for CID: ${cid}`);
|
||||
throw new Error("Failed to retrieve content");
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.addLog(`Error getting content: ${error.message}`);
|
||||
throw new Error(`Failed to get content: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// RPC method to add DAG-JSON content to IPFS
|
||||
async addDagJson(content) {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
if (!this.helia) {
|
||||
throw new Error("Helia node not available");
|
||||
}
|
||||
|
||||
try {
|
||||
const cid = await this.dagJsonHandler.add(content);
|
||||
this.addLog(`Added DAG-JSON content with CID: ${cid.toString()}`);
|
||||
return { cid: cid.toString() };
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to add DAG-JSON: ${error.message}`);
|
||||
throw new Error(`Failed to add DAG-JSON: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// RPC method to add DAG-CBOR content to IPFS
|
||||
async addDagCbor(content) {
|
||||
if (!this.helia) {
|
||||
await this.initializeHelia();
|
||||
}
|
||||
|
||||
if (!this.helia) {
|
||||
throw new Error("Helia node not available");
|
||||
}
|
||||
|
||||
try {
|
||||
const cid = await this.dagCborHandler.add(content);
|
||||
this.addLog(`Added DAG-CBOR content with CID: ${cid.toString()}`);
|
||||
return { cid: cid.toString() };
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to add DAG-CBOR: ${error.message}`);
|
||||
throw new Error(`Failed to add DAG-CBOR: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async registerDidController(did, controller, signer, options = {}) {
|
||||
this.addLog(`Registering DID controller: ${controller} for DID: ${did}`);
|
||||
|
||||
try {
|
||||
// Create the message
|
||||
const msg = DidV1MsgRegisterController.create({
|
||||
did: did,
|
||||
controller: controller,
|
||||
});
|
||||
|
||||
// Create and sign transaction
|
||||
const signedTx = await this.createAndSignTx(msg, signer, options);
|
||||
|
||||
// Broadcast transaction
|
||||
const response = await this.broadcastTransaction(signedTx);
|
||||
|
||||
this.addLog(
|
||||
`DID controller registration response: ${JSON.stringify(response)}`,
|
||||
);
|
||||
return response;
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to register DID controller: ${error.message}`);
|
||||
throw new Error(`DID controller registration failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async spawnDwnVault(address, redirect, options = {}) {
|
||||
this.addLog(
|
||||
`Spawning DWN vault for address: ${address}, redirect: ${redirect}`,
|
||||
);
|
||||
|
||||
try {
|
||||
// Use RPC client to make the query
|
||||
const rpcUrl =
|
||||
options.rpcUrl || this.env.SONR_RPC_URL || "https://rpc.sonr.io";
|
||||
this.addLog(`Using RPC URL: ${rpcUrl}`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
RpcClient.newBatchQuery(rpcUrl)
|
||||
.add(
|
||||
DwnV1QuerySpawnRequest,
|
||||
{
|
||||
cid: address,
|
||||
redirect: redirect,
|
||||
},
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
this.addLog(`Spawn DWN vault failed: ${err.message}`);
|
||||
reject(err);
|
||||
} else {
|
||||
this.addLog(`Spawn DWN vault response: ${JSON.stringify(res)}`);
|
||||
resolve(res);
|
||||
}
|
||||
},
|
||||
)
|
||||
.send();
|
||||
});
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to spawn DWN vault: ${error.message}`);
|
||||
throw new Error(`DWN vault spawn failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieves account information needed for transaction signing
|
||||
async getAccountInfo(address) {
|
||||
this.addLog(`Getting account info for address: ${address}`);
|
||||
|
||||
try {
|
||||
const rpcUrl = this.env.SONR_RPC_URL || "https://rpc.sonr.io";
|
||||
|
||||
const client = new RpcClient(rpcUrl);
|
||||
const accountInfo = await client.getAccount(address);
|
||||
|
||||
this.addLog(`Account info retrieved successfully`);
|
||||
return {
|
||||
accountNumber: accountInfo.account_number,
|
||||
sequence: accountInfo.sequence,
|
||||
};
|
||||
} catch (error) {
|
||||
this.addLog(`Failed to get account info: ${error.message}`);
|
||||
throw new Error(`Account info retrieval failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch handler for backward compatibility and Go WASM server interaction
|
||||
async fetch(request) {
|
||||
const url = new URL(request.url);
|
||||
const path = url.pathname;
|
||||
const method = request.method;
|
||||
|
||||
try {
|
||||
// Handle different API endpoints based on the path
|
||||
|
||||
// Helia node status endpoints
|
||||
if (path === "/helia/status") {
|
||||
return new Response(JSON.stringify(await this.getNodeStatus()), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
if (path === "/helia/node-id") {
|
||||
return new Response(JSON.stringify(await this.getNodeId()), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
if (path === "/helia/discovered-peers") {
|
||||
return new Response(JSON.stringify(await this.getDiscoveredPeers()), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
if (path === "/helia/connected-peers") {
|
||||
return new Response(JSON.stringify(await this.getConnectedPeers()), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
if (path === "/helia/logs") {
|
||||
const limitParam = url.searchParams.get("limit");
|
||||
const limit = limitParam ? parseInt(limitParam, 10) : 50;
|
||||
return new Response(JSON.stringify(await this.getLogs(limit)), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
// IPFS content endpoints
|
||||
if (path === "/ipfs/add/string" && method === "POST") {
|
||||
const content = await request.text();
|
||||
return new Response(JSON.stringify(await this.addString(content)), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
if (path === "/ipfs/add/json" && method === "POST") {
|
||||
const content = await request.json();
|
||||
return new Response(JSON.stringify(await this.addJson(content)), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
if (path.startsWith("/ipfs/get/") && method === "GET") {
|
||||
const cid = path.replace("/ipfs/get/", "");
|
||||
const result = await this.getContent(cid);
|
||||
|
||||
if (result.type === "json") {
|
||||
return new Response(JSON.stringify(result.content), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
} else {
|
||||
return new Response(result.content, {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "text/plain" },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (path === "/ipfs/dag/json" && method === "POST") {
|
||||
const content = await request.json();
|
||||
return new Response(JSON.stringify(await this.addDagJson(content)), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
if (path === "/ipfs/dag/cbor" && method === "POST") {
|
||||
const content = await request.json();
|
||||
return new Response(JSON.stringify(await this.addDagCbor(content)), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
// Signing endpoint
|
||||
if (path === "/vault/sign" && method === "POST") {
|
||||
const data = await request.json();
|
||||
return new Response(JSON.stringify(await this.sign(data)), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
// DID controller registration endpoint
|
||||
if (path === "/did/register-controller" && method === "POST") {
|
||||
const data = await request.json();
|
||||
const { did, controller, signer, options } = data;
|
||||
|
||||
return new Response(
|
||||
JSON.stringify(
|
||||
await this.registerDidController(did, controller, signer, options),
|
||||
),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// DWN vault spawning endpoint
|
||||
if (path === "/dwn/spawn" && method === "POST") {
|
||||
const data = await request.json();
|
||||
const { address, redirect, options } = data;
|
||||
|
||||
return new Response(
|
||||
JSON.stringify(await this.spawnDwnVault(address, redirect, options)),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// Account info endpoint
|
||||
if (path === "/account/info" && method === "GET") {
|
||||
const address = url.searchParams.get("address");
|
||||
if (!address) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Address parameter is required" }),
|
||||
{
|
||||
status: 400,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return new Response(
|
||||
JSON.stringify(await this.getAccountInfo(address)),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// Default response for unhandled paths
|
||||
return new Response(JSON.stringify({ error: "Not found" }), {
|
||||
status: 404,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
} catch (error) {
|
||||
// Log the error
|
||||
this.addLog(`Error handling request to ${path}: ${error.message}`);
|
||||
|
||||
// Return error response
|
||||
return new Response(JSON.stringify({ error: error.message }), {
|
||||
status: 500,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
53
cmd/vault/wrangler.toml
Normal file
53
cmd/vault/wrangler.toml
Normal file
@@ -0,0 +1,53 @@
|
||||
# Top-level configuration
|
||||
name = "motr-vault"
|
||||
main = "./worker.mjs"
|
||||
compatibility_date = "2025-04-14"
|
||||
|
||||
routes = [
|
||||
{ pattern = "did.run", custom_domain = true },
|
||||
]
|
||||
|
||||
[build]
|
||||
command = "npm run build"
|
||||
|
||||
[dev]
|
||||
port = 6969
|
||||
|
||||
[observability]
|
||||
enabled = true
|
||||
logpush = true
|
||||
|
||||
[[d1_databases]]
|
||||
binding = "DB" # available in your Worker on env.DB
|
||||
database_name = "motr-controller-db"
|
||||
database_id = "872a4b08-7e07-4978-b227-5b60940238ed"
|
||||
|
||||
[[kv_namespaces]]
|
||||
binding = "SESSIONS" # available in your Worker on env.KV
|
||||
id = "ea5de66fcfc14b5eba170395e29432ee"
|
||||
|
||||
[[kv_namespaces]]
|
||||
binding = "HANDLES" # available in your Worker on env.KV
|
||||
id = "271d47087a8842b2aac5ee79cf7bb203"
|
||||
|
||||
[[r2_buckets]]
|
||||
binding = 'PROFILES'
|
||||
bucket_name = 'profiles'
|
||||
|
||||
[vars]
|
||||
SONR_CHAIN_ID = 'sonr-testnet-1'
|
||||
IPFS_GATEWAY = 'https://ipfs.sonr.land'
|
||||
SONR_API_URL = 'https://api.sonr.land'
|
||||
SONR_RPC_URL = 'https://rpc.sonr.land'
|
||||
SONR_GRPC_URL = 'https://grpc.sonr.land'
|
||||
MATRIX_SERVER = 'https://bm.chat'
|
||||
MOTR_GATEWAY = 'https://sonr.id'
|
||||
MOTR_VAULT = 'https://did.run'
|
||||
MOTR_MODE = 'controller'
|
||||
|
||||
[durable_objects]
|
||||
bindings = [{name = "VAULT", class_name = "Vault"}]
|
||||
|
||||
[[migrations]]
|
||||
tag = "v1" # Should be unique for each entry
|
||||
new_classes = ["Vault"] # List the classes that should be created
|
||||
Reference in New Issue
Block a user