From 2f8cd28ee77ebb9efb02256e118093f235db43bc Mon Sep 17 00:00:00 2001 From: Prad N Date: Sat, 7 Jun 2025 10:40:04 +0800 Subject: [PATCH] feat: implement account, profile, network, and activity management --- Makefile | 13 + config/config.go | 69 + config/routes.go | 28 + handlers/auth_handler.go | 40 + handlers/form_handler.go | 4 + handlers/page_handler.go | 24 + handlers/status_handler.go | 38 + internal/api/client.go | 35 + internal/api/market.go | 117 + internal/api/utils.go | 43 + internal/db/activity/db.go | 31 + internal/db/activity/models.go | 99 + internal/db/activity/querier.go | 63 + internal/db/activity/query.sql | 338 +++ internal/db/activity/query.sql.go | 1640 +++++++++++ internal/db/activity/schema.sql | 136 + internal/db/network/db.go | 31 + internal/db/network/models.go | 96 + internal/db/network/querier.go | 64 + internal/db/network/query.sql | 453 +++ internal/db/network/query.sql.go | 2583 +++++++++++++++++ internal/db/network/schema.sql | 139 + internal/db/sqlc.yaml | 34 + internal/db/users/db.go | 31 + internal/db/users/models.go | 68 + internal/db/users/querier.go | 53 + internal/db/users/query.sql | 234 ++ internal/db/users/query.sql.go | 1177 ++++++++ internal/db/users/schema.sql | 81 + internal/jobs/cron.go | 1 + internal/jobs/events.go | 12 + internal/jobs/tasks.go | 1 + internal/meta/metadata.templ | 97 + internal/meta/metadata_templ.go | 351 +++ ...4ad246d43f6c9251fb531fb640a87ba6029.sqlite | Bin 0 -> 4096 bytes ...46d43f6c9251fb531fb640a87ba6029.sqlite-shm | Bin 0 -> 32768 bytes ...46d43f6c9251fb531fb640a87ba6029.sqlite-wal | Bin 0 -> 251352 bytes ...6e7a8b420c5faa6382a5016d4645bcd0518.sqlite | Bin 0 -> 4096 bytes ...8b420c5faa6382a5016d4645bcd0518.sqlite-shm | Bin 0 -> 32768 bytes ...8b420c5faa6382a5016d4645bcd0518.sqlite-wal | Bin 0 -> 144232 bytes ...6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite | Bin 0 -> 4096 bytes ...10ecb9c167bf5f298f50c3fca22f7bf.sqlite-shm | Bin 0 -> 32768 bytes ...10ecb9c167bf5f298f50c3fca22f7bf.sqlite-wal | Bin 0 -> 156592 bytes .../migrations/001_accounts_table.down.sql | 1 + internal/migrations/001_accounts_table.up.sql | 30 + .../migrations/002_credentials_table.down.sql | 1 + .../migrations/002_credentials_table.up.sql | 19 + .../migrations/003_profiles_table.down.sql | 1 + internal/migrations/003_profiles_table.up.sql | 18 + internal/migrations/004_vaults_table.down.sql | 1 + internal/migrations/004_vaults_table.up.sql | 19 + internal/migrations/005_assets_table.down.sql | 1 + internal/migrations/005_assets_table.up.sql | 21 + internal/migrations/006_prices_table.down.sql | 1 + internal/migrations/006_prices_table.up.sql | 28 + .../007_price_conversions_table.down.sql | 1 + .../007_price_conversions_table.up.sql | 21 + .../migrations/008_blockchains_table.down.sql | 1 + .../migrations/008_blockchains_table.up.sql | 71 + .../migrations/009_services_table.down.sql | 1 + internal/migrations/009_services_table.up.sql | 24 + .../migrations/010_activities_table.down.sql | 1 + .../migrations/010_activities_table.up.sql | 32 + internal/migrations/011_health_table.down.sql | 1 + internal/migrations/011_health_table.up.sql | 28 + .../012_global_market_table.down.sql | 1 + .../migrations/012_global_market_table.up.sql | 19 + .../013_fear_greed_index_table.down.sql | 1 + .../013_fear_greed_index_table.up.sql | 15 + .../014_crypto_listings_table.down.sql | 1 + .../014_crypto_listings_table.up.sql | 18 + internal/migrations/Taskfile.yml | 200 ++ .../.cache/wrangler/wrangler-account.json | 6 + internal/migrations/node_modules/.mf/cf.json | 1 + internal/migrations/wrangler.toml | 64 + internal/ui/charts/area_chart.templ | 20 + internal/ui/charts/area_chart_templ.go | 75 + internal/ui/charts/bar_chart.templ | 21 + internal/ui/charts/bar_chart_templ.go | 76 + internal/ui/charts/candle_chart.templ | 106 + internal/ui/charts/candle_chart_templ.go | 137 + internal/ui/charts/line_chart.templ | 15 + internal/ui/charts/line_chart_templ.go | 70 + internal/ui/charts/pie_chart.templ | 22 + internal/ui/charts/pie_chart_templ.go | 77 + internal/ui/layout.templ | 113 + internal/ui/layout_templ.go | 633 ++++ internal/ui/providers.templ | 139 + internal/ui/providers_templ.go | 371 +++ internal/ui/styles.templ | 54 + internal/ui/styles_templ.go | 70 + main.go | 72 + ...4ad246d43f6c9251fb531fb640a87ba6029.sqlite | Bin 0 -> 4096 bytes ...46d43f6c9251fb531fb640a87ba6029.sqlite-shm | Bin 0 -> 32768 bytes ...46d43f6c9251fb531fb640a87ba6029.sqlite-wal | Bin 0 -> 251352 bytes ...6e7a8b420c5faa6382a5016d4645bcd0518.sqlite | Bin 0 -> 4096 bytes ...8b420c5faa6382a5016d4645bcd0518.sqlite-shm | Bin 0 -> 32768 bytes ...8b420c5faa6382a5016d4645bcd0518.sqlite-wal | Bin 0 -> 144232 bytes ...6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite | Bin 0 -> 4096 bytes ...10ecb9c167bf5f298f50c3fca22f7bf.sqlite-shm | Bin 0 -> 32768 bytes ...10ecb9c167bf5f298f50c3fca22f7bf.sqlite-wal | Bin 0 -> 156592 bytes migrations/001_accounts_table.down.sql | 1 + migrations/001_accounts_table.up.sql | 30 + migrations/002_credentials_table.down.sql | 1 + migrations/002_credentials_table.up.sql | 19 + migrations/003_profiles_table.down.sql | 1 + migrations/003_profiles_table.up.sql | 18 + migrations/004_vaults_table.down.sql | 1 + migrations/004_vaults_table.up.sql | 19 + migrations/005_assets_table.down.sql | 1 + migrations/005_assets_table.up.sql | 21 + migrations/006_prices_table.down.sql | 1 + migrations/006_prices_table.up.sql | 28 + .../007_price_conversions_table.down.sql | 1 + migrations/007_price_conversions_table.up.sql | 21 + migrations/008_blockchains_table.down.sql | 1 + migrations/008_blockchains_table.up.sql | 71 + migrations/009_services_table.down.sql | 1 + migrations/009_services_table.up.sql | 24 + migrations/010_activities_table.down.sql | 1 + migrations/010_activities_table.up.sql | 32 + migrations/011_health_table.down.sql | 1 + migrations/011_health_table.up.sql | 28 + migrations/012_global_market_table.down.sql | 1 + migrations/012_global_market_table.up.sql | 19 + .../013_fear_greed_index_table.down.sql | 1 + migrations/013_fear_greed_index_table.up.sql | 15 + migrations/014_crypto_listings_table.down.sql | 1 + migrations/014_crypto_listings_table.up.sql | 18 + migrations/Taskfile.yml | 200 ++ .../.cache/wrangler/wrangler-account.json | 6 + migrations/node_modules/.mf/cf.json | 1 + migrations/wrangler.toml | 64 + package.json | 15 + wrangler.toml | 59 + 135 files changed, 11665 insertions(+) create mode 100644 Makefile create mode 100644 config/config.go create mode 100644 config/routes.go create mode 100644 handlers/auth_handler.go create mode 100644 handlers/form_handler.go create mode 100644 handlers/page_handler.go create mode 100644 handlers/status_handler.go create mode 100644 internal/api/client.go create mode 100644 internal/api/market.go create mode 100644 internal/api/utils.go create mode 100644 internal/db/activity/db.go create mode 100644 internal/db/activity/models.go create mode 100644 internal/db/activity/querier.go create mode 100644 internal/db/activity/query.sql create mode 100644 internal/db/activity/query.sql.go create mode 100644 internal/db/activity/schema.sql create mode 100644 internal/db/network/db.go create mode 100644 internal/db/network/models.go create mode 100644 internal/db/network/querier.go create mode 100644 internal/db/network/query.sql create mode 100644 internal/db/network/query.sql.go create mode 100644 internal/db/network/schema.sql create mode 100644 internal/db/sqlc.yaml create mode 100644 internal/db/users/db.go create mode 100644 internal/db/users/models.go create mode 100644 internal/db/users/querier.go create mode 100644 internal/db/users/query.sql create mode 100644 internal/db/users/query.sql.go create mode 100644 internal/db/users/schema.sql create mode 100644 internal/jobs/cron.go create mode 100644 internal/jobs/events.go create mode 100644 internal/jobs/tasks.go create mode 100644 internal/meta/metadata.templ create mode 100644 internal/meta/metadata_templ.go create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite-shm create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite-wal create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-shm create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-wal create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite-shm create mode 100644 internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite-wal create mode 100644 internal/migrations/001_accounts_table.down.sql create mode 100644 internal/migrations/001_accounts_table.up.sql create mode 100644 internal/migrations/002_credentials_table.down.sql create mode 100644 internal/migrations/002_credentials_table.up.sql create mode 100644 internal/migrations/003_profiles_table.down.sql create mode 100644 internal/migrations/003_profiles_table.up.sql create mode 100644 internal/migrations/004_vaults_table.down.sql create mode 100644 internal/migrations/004_vaults_table.up.sql create mode 100644 internal/migrations/005_assets_table.down.sql create mode 100644 internal/migrations/005_assets_table.up.sql create mode 100644 internal/migrations/006_prices_table.down.sql create mode 100644 internal/migrations/006_prices_table.up.sql create mode 100644 internal/migrations/007_price_conversions_table.down.sql create mode 100644 internal/migrations/007_price_conversions_table.up.sql create mode 100644 internal/migrations/008_blockchains_table.down.sql create mode 100644 internal/migrations/008_blockchains_table.up.sql create mode 100644 internal/migrations/009_services_table.down.sql create mode 100644 internal/migrations/009_services_table.up.sql create mode 100644 internal/migrations/010_activities_table.down.sql create mode 100644 internal/migrations/010_activities_table.up.sql create mode 100644 internal/migrations/011_health_table.down.sql create mode 100644 internal/migrations/011_health_table.up.sql create mode 100644 internal/migrations/012_global_market_table.down.sql create mode 100644 internal/migrations/012_global_market_table.up.sql create mode 100644 internal/migrations/013_fear_greed_index_table.down.sql create mode 100644 internal/migrations/013_fear_greed_index_table.up.sql create mode 100644 internal/migrations/014_crypto_listings_table.down.sql create mode 100644 internal/migrations/014_crypto_listings_table.up.sql create mode 100644 internal/migrations/Taskfile.yml create mode 100644 internal/migrations/node_modules/.cache/wrangler/wrangler-account.json create mode 100644 internal/migrations/node_modules/.mf/cf.json create mode 100644 internal/migrations/wrangler.toml create mode 100644 internal/ui/charts/area_chart.templ create mode 100644 internal/ui/charts/area_chart_templ.go create mode 100644 internal/ui/charts/bar_chart.templ create mode 100644 internal/ui/charts/bar_chart_templ.go create mode 100644 internal/ui/charts/candle_chart.templ create mode 100644 internal/ui/charts/candle_chart_templ.go create mode 100644 internal/ui/charts/line_chart.templ create mode 100644 internal/ui/charts/line_chart_templ.go create mode 100644 internal/ui/charts/pie_chart.templ create mode 100644 internal/ui/charts/pie_chart_templ.go create mode 100644 internal/ui/layout.templ create mode 100644 internal/ui/layout_templ.go create mode 100644 internal/ui/providers.templ create mode 100644 internal/ui/providers_templ.go create mode 100644 internal/ui/styles.templ create mode 100644 internal/ui/styles_templ.go create mode 100644 main.go create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite-shm create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite-wal create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-shm create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-wal create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite-shm create mode 100644 migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite-wal create mode 100644 migrations/001_accounts_table.down.sql create mode 100644 migrations/001_accounts_table.up.sql create mode 100644 migrations/002_credentials_table.down.sql create mode 100644 migrations/002_credentials_table.up.sql create mode 100644 migrations/003_profiles_table.down.sql create mode 100644 migrations/003_profiles_table.up.sql create mode 100644 migrations/004_vaults_table.down.sql create mode 100644 migrations/004_vaults_table.up.sql create mode 100644 migrations/005_assets_table.down.sql create mode 100644 migrations/005_assets_table.up.sql create mode 100644 migrations/006_prices_table.down.sql create mode 100644 migrations/006_prices_table.up.sql create mode 100644 migrations/007_price_conversions_table.down.sql create mode 100644 migrations/007_price_conversions_table.up.sql create mode 100644 migrations/008_blockchains_table.down.sql create mode 100644 migrations/008_blockchains_table.up.sql create mode 100644 migrations/009_services_table.down.sql create mode 100644 migrations/009_services_table.up.sql create mode 100644 migrations/010_activities_table.down.sql create mode 100644 migrations/010_activities_table.up.sql create mode 100644 migrations/011_health_table.down.sql create mode 100644 migrations/011_health_table.up.sql create mode 100644 migrations/012_global_market_table.down.sql create mode 100644 migrations/012_global_market_table.up.sql create mode 100644 migrations/013_fear_greed_index_table.down.sql create mode 100644 migrations/013_fear_greed_index_table.up.sql create mode 100644 migrations/014_crypto_listings_table.down.sql create mode 100644 migrations/014_crypto_listings_table.up.sql create mode 100644 migrations/Taskfile.yml create mode 100644 migrations/node_modules/.cache/wrangler/wrangler-account.json create mode 100644 migrations/node_modules/.mf/cf.json create mode 100644 migrations/wrangler.toml create mode 100644 package.json create mode 100644 wrangler.toml diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..e405a68 --- /dev/null +++ b/Makefile @@ -0,0 +1,13 @@ +generate: + @gum spin --show-error --title "(1/3) Running templ generate" -- sh -c "templ generate" + @gum log --level info --time kitchen "[GENERATE] Completed templ generate successfully." + +assets: + @gum spin --show-error --title "(2/3) Running workers-assets-gen" -- sh -c "go run github.com/syumai/workers/cmd/workers-assets-gen -mode=go" + @gum log --level info --time kitchen "[GENERATE] Completed workers-assets-gen successfully." + +build: generate assets + @GOOS=js GOARCH=wasm go build -ldflags="-s -w" -o ./build/app.wasm . + @gum log --level info --time kitchen "[BUILD] Completed Go WASM Build successfully." + + diff --git a/config/config.go b/config/config.go new file mode 100644 index 0000000..b3840c4 --- /dev/null +++ b/config/config.go @@ -0,0 +1,69 @@ +//go:build js && wasm +// +build js,wasm + +package config + +import ( + "time" + + "github.com/syumai/workers/cloudflare" +) + +type Config struct { + Cache CacheSettings `json:"cache"` // Added Cache configuration + Sonr NetworkParams `json:"network"` + DefaultExpiry time.Duration `json:"expiry"` +} + +type NetworkParams struct { + SonrChainID string `json:"sonr_chain_id"` + SonrAPIURL string `json:"sonr_api_url"` + SonrRPCURL string `json:"sonr_rpc_url"` + IPFSGateway string `json:"ipfs_gateway"` +} + +// CacheSettings defines the configuration for Cloudflare cache +type CacheSettings struct { + Enabled bool `json:"enabled"` + DefaultMaxAge int `json:"default_max_age"` + BypassHeader string `json:"bypass_header"` + BypassValue string `json:"bypass_value"` + CacheableStatusCodes []int `json:"cacheable_status_codes"` + CacheableContentTypes []string `json:"cacheable_content_types"` +} + +func Get() Config { + cache := CacheSettings{ + Enabled: true, + DefaultMaxAge: 60, // 1 minute by default + BypassHeader: "X-Cache-Bypass", + BypassValue: "true", + CacheableStatusCodes: []int{ + 200, 301, 302, + }, + CacheableContentTypes: []string{ + "text/html", + "text/css", + "text/javascript", + "application/javascript", + "application/json", + "image/jpeg", + "image/png", + "image/gif", + "image/webp", + }, + } + + sonr := NetworkParams{ + SonrChainID: cloudflare.Getenv("SONR_CHAIN_ID"), + SonrAPIURL: cloudflare.Getenv("SONR_API_URL"), + SonrRPCURL: cloudflare.Getenv("SONR_RPC_URL"), + IPFSGateway: cloudflare.Getenv("IPFS_GATEWAY"), + } + c := Config{ + Sonr: sonr, + Cache: cache, + DefaultExpiry: time.Hour * 1, + } + return c +} diff --git a/config/routes.go b/config/routes.go new file mode 100644 index 0000000..526cd79 --- /dev/null +++ b/config/routes.go @@ -0,0 +1,28 @@ +//go:build js && wasm +// +build js,wasm + +package config + +import ( + "github.com/labstack/echo/v4" + "github.com/sonr-io/motr/handlers" +) + +// ╭────────────────────────────────────────────────╮ +// │ HTTP Routes │ +// ╰────────────────────────────────────────────────╯ + +func RegisterViews(e *echo.Echo) { + e.GET("/", handlers.RenderHomePage) + e.GET("/login", handlers.RenderLoginPage) + e.GET("/register", handlers.RenderRegisterPage) +} + +func RegisterPartials(e *echo.Echo) { + e.POST("/login/:handle/check", handlers.HandleLoginCheck) + e.POST("/login/:handle/finish", handlers.HandleLoginFinish) + e.POST("/register/:handle", handlers.HandleRegisterStart) + e.POST("/register/:handle/check", handlers.HandleRegisterCheck) + e.POST("/register/:handle/finish", handlers.HandleRegisterFinish) + e.POST("/status", handlers.HandleStatusCheck) +} diff --git a/handlers/auth_handler.go b/handlers/auth_handler.go new file mode 100644 index 0000000..2e19da6 --- /dev/null +++ b/handlers/auth_handler.go @@ -0,0 +1,40 @@ +package handlers + +import ( + "github.com/labstack/echo/v4" + "github.com/sonr-io/motr/pkg/render" + "github.com/sonr-io/motr/ui/login" + "github.com/sonr-io/motr/ui/register" +) + +func HandleLoginCheck(c echo.Context) error { + return render.Component(c, login.LoginView()) +} + +func HandleLoginInitial(c echo.Context) error { + return render.Component(c, login.LoginView()) +} + +func HandleLoginFinish(c echo.Context) error { + return render.Component(c, login.LoginView()) +} + +func HandleLoginStart(c echo.Context) error { + return render.Component(c, login.LoginView()) +} + +func HandleRegisterInitial(c echo.Context) error { + return render.Component(c, register.RegisterView()) +} + +func HandleRegisterCheck(c echo.Context) error { + return render.Component(c, register.RegisterView()) +} + +func HandleRegisterFinish(c echo.Context) error { + return render.Component(c, register.RegisterView()) +} + +func HandleRegisterStart(c echo.Context) error { + return render.Component(c, register.RegisterView()) +} diff --git a/handlers/form_handler.go b/handlers/form_handler.go new file mode 100644 index 0000000..9059290 --- /dev/null +++ b/handlers/form_handler.go @@ -0,0 +1,4 @@ +//go:build js && wasm +// +build js,wasm + +package handlers diff --git a/handlers/page_handler.go b/handlers/page_handler.go new file mode 100644 index 0000000..1830285 --- /dev/null +++ b/handlers/page_handler.go @@ -0,0 +1,24 @@ +//go:build js && wasm +// +build js,wasm + +package handlers + +import ( + "github.com/labstack/echo/v4" + "github.com/sonr-io/motr/ui/register" + "github.com/sonr-io/motr/pkg/render" + "github.com/sonr-io/motr/ui/home" + "github.com/sonr-io/motr/ui/login" +) + +func RenderHomePage(c echo.Context) error { + return render.Component(c, home.HomeView()) +} + +func RenderLoginPage(c echo.Context) error { + return render.Component(c, login.LoginView()) +} + +func RenderRegisterPage(c echo.Context) error { + return render.Component(c, register.RegisterView()) +} diff --git a/handlers/status_handler.go b/handlers/status_handler.go new file mode 100644 index 0000000..b5eb84a --- /dev/null +++ b/handlers/status_handler.go @@ -0,0 +1,38 @@ +package handlers + +import ( + "github.com/labstack/echo/v4" +) + +// StatusCheck is a struct that represents the status of the application +type StatusCheck struct { + Ok bool `json:"ok"` + Services []struct { + Name string `json:"name"` + Ok bool `json:"ok"` + } `json:"services"` +} + +// HandleStatusCheck is a handler that checks the status of the application +func HandleStatusCheck(c echo.Context) error { + return c.JSON(200, StatusCheck{ + Ok: true, + Services: []struct { + Name string `json:"name"` + Ok bool `json:"ok"` + }{ + { + Name: "IPFS", + Ok: true, + }, + { + Name: "IBC", + Ok: true, + }, + { + Name: "Sonr", + Ok: true, + }, + }, + }) +} diff --git a/internal/api/client.go b/internal/api/client.go new file mode 100644 index 0000000..8d7fa8f --- /dev/null +++ b/internal/api/client.go @@ -0,0 +1,35 @@ +//go:build js && wasm +// +build js,wasm + +package api + +import ( + "context" + + "github.com/syumai/workers/cloudflare/fetch" +) + +type Response interface { + UnmarshalJSON(data []byte) error +} + +type Client interface { + MarketAPI +} + +type client struct { + fc *fetch.Client + ctx context.Context + MarketAPI +} + +func NewClient(ctx context.Context) *client { + fc := fetch.NewClient() + c := &client{ + fc: fc, + ctx: ctx, + } + marketAPI := NewMarketAPI(c, ctx) + c.MarketAPI = marketAPI + return c +} diff --git a/internal/api/market.go b/internal/api/market.go new file mode 100644 index 0000000..14c182c --- /dev/null +++ b/internal/api/market.go @@ -0,0 +1,117 @@ +//go:build js && wasm +// +build js,wasm + +package api + +import ( + "context" + "encoding/json" + "fmt" +) + +const ( + kCryptoAPIURL = "https://api.alternative.me" + kCryptoAPIListings = "/v2/listings" + kCryptoAPITickers = "/v2/ticker" + kCryptoAPIGlobal = "/v2/global" +) + +type MarketAPI interface { + Listings(symbol string) (*ListingsResponse, error) + Ticker(symbol string) (*TickersResponse, error) + GlobalMarket() (*GlobalMarketResponse, error) +} + +type marketAPI struct { + client *client + ctx context.Context +} + +func NewMarketAPI(c *client, ctx context.Context) *marketAPI { + return &marketAPI{ + client: c, + ctx: ctx, + } +} + +func (m *marketAPI) Listings(symbol string) (*ListingsResponse, error) { + r := buildRequest(m.ctx, fmt.Sprintf("%s/%s", kCryptoAPIListings, symbol)) + v := &ListingsResponse{} + err := doFetch(m.client.fc, r, v) + if err != nil { + return nil, err + } + return v, nil +} + +func (m *marketAPI) Ticker(symbol string) (*TickersResponse, error) { + r := buildRequest(m.ctx, fmt.Sprintf("%s/%s", kCryptoAPITickers, symbol)) + v := &TickersResponse{} + err := doFetch(m.client.fc, r, v) + if err != nil { + return nil, err + } + return v, nil +} + +func (m *marketAPI) GlobalMarket() (*GlobalMarketResponse, error) { + r := buildRequest(m.ctx, kCryptoAPIGlobal) + v := &GlobalMarketResponse{} + err := doFetch(m.client.fc, r, v) + if err != nil { + return nil, err + } + return v, nil +} + +type ListingsResponse struct { + Data []struct { + ID string `json:"id"` + Name string `json:"name"` + Symbol string `json:"symbol"` + WebsiteSlug string `json:"website_slug"` + } `json:"data"` + Metadata struct { + Timestamp int `json:"timestamp"` + NumCryptocurrencies int `json:"num_cryptocurrencies"` + Error any `json:"error"` + } `json:"metadata"` +} + +func (r *ListingsResponse) UnmarshalJSON(data []byte) error { + return json.Unmarshal(data, r) +} + +type TickersResponse struct { + Data []struct { + Symbol string `json:"symbol"` + Price struct { + USD float64 `json:"USD"` + } `json:"price"` + } `json:"data"` + Metadata struct { + Timestamp int `json:"timestamp"` + Error any `json:"error"` + } `json:"metadata"` +} + +func (r *TickersResponse) UnmarshalJSON(data []byte) error { + return json.Unmarshal(data, r) +} + +type GlobalMarketResponse struct { + Data []struct { + Symbol string `json:"symbol"` + Price struct { + USD float64 `json:"USD"` + } `json:"price"` + } `json:"data"` + Metadata struct { + Timestamp int `json:"timestamp"` + Error any `json:"error"` + } `json:"metadata"` +} + +func (r *GlobalMarketResponse) UnmarshalJSON(data []byte) error { + return json.Unmarshal(data, r) +} diff --git a/internal/api/utils.go b/internal/api/utils.go new file mode 100644 index 0000000..088b003 --- /dev/null +++ b/internal/api/utils.go @@ -0,0 +1,43 @@ +//go:build js && wasm +// +build js,wasm + +package api + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "github.com/syumai/workers/cloudflare/fetch" +) + +func buildRequest(c context.Context, url string) *fetch.Request { + r, err := fetch.NewRequest(c, http.MethodGet, url, nil) + if err != nil { + fmt.Println(err) + return nil + } + r.Header.Set("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:109.0) Gecko/20100101 Firefox/111.0") + return r +} + +func doFetch(c *fetch.Client, r *fetch.Request, v Response) error { + resp, err := c.Do(r, nil) + if err != nil { + return fmt.Errorf("request failed: %w", err) + } + defer resp.Body.Close() // Ensure body is always closed + + // Check for non-200 status codes + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return fmt.Errorf("unexpected status code: %d", resp.StatusCode) + } + + // Directly decode JSON into the response struct + if err := json.NewDecoder(resp.Body).Decode(v); err != nil { + return fmt.Errorf("failed to decode response: %w", err) + } + + return nil +} diff --git a/internal/db/activity/db.go b/internal/db/activity/db.go new file mode 100644 index 0000000..3562a94 --- /dev/null +++ b/internal/db/activity/db.go @@ -0,0 +1,31 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package activity + +import ( + "context" + "database/sql" +) + +type DBTX interface { + ExecContext(context.Context, string, ...interface{}) (sql.Result, error) + PrepareContext(context.Context, string) (*sql.Stmt, error) + QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error) + QueryRowContext(context.Context, string, ...interface{}) *sql.Row +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx *sql.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/internal/db/activity/models.go b/internal/db/activity/models.go new file mode 100644 index 0000000..7e6f06d --- /dev/null +++ b/internal/db/activity/models.go @@ -0,0 +1,99 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package activity + +import ( + "database/sql" + "time" +) + +type Activity struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + AccountID string `json:"account_id"` + TxHash sql.NullString `json:"tx_hash"` + TxType string `json:"tx_type"` + Status string `json:"status"` + Amount sql.NullString `json:"amount"` + Fee sql.NullString `json:"fee"` + GasUsed sql.NullInt64 `json:"gas_used"` + GasWanted sql.NullInt64 `json:"gas_wanted"` + Memo sql.NullString `json:"memo"` + BlockHeight sql.NullInt64 `json:"block_height"` + Timestamp time.Time `json:"timestamp"` + RawLog sql.NullString `json:"raw_log"` + Error sql.NullString `json:"error"` +} + +type CryptoListing struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + ApiID string `json:"api_id"` + Name string `json:"name"` + Symbol string `json:"symbol"` + WebsiteSlug string `json:"website_slug"` +} + +type FearGreedIndex struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Value int64 `json:"value"` + ValueClassification string `json:"value_classification"` + Timestamp time.Time `json:"timestamp"` + TimeUntilUpdate sql.NullString `json:"time_until_update"` +} + +type GlobalMarket struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + TotalMarketCapUsd sql.NullFloat64 `json:"total_market_cap_usd"` + Total24hVolumeUsd sql.NullFloat64 `json:"total_24h_volume_usd"` + BitcoinPercentageOfMarketCap sql.NullFloat64 `json:"bitcoin_percentage_of_market_cap"` + ActiveCurrencies sql.NullInt64 `json:"active_currencies"` + ActiveAssets sql.NullInt64 `json:"active_assets"` + ActiveMarkets sql.NullInt64 `json:"active_markets"` + LastUpdated time.Time `json:"last_updated"` +} + +type Health struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + EndpointUrl string `json:"endpoint_url"` + EndpointType string `json:"endpoint_type"` + ChainID sql.NullString `json:"chain_id"` + Status string `json:"status"` + ResponseTimeMs sql.NullInt64 `json:"response_time_ms"` + LastChecked time.Time `json:"last_checked"` + NextCheck sql.NullTime `json:"next_check"` + FailureCount int64 `json:"failure_count"` + SuccessCount int64 `json:"success_count"` + ResponseData sql.NullString `json:"response_data"` + ErrorMessage sql.NullString `json:"error_message"` +} + +type Service struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Name string `json:"name"` + Description sql.NullString `json:"description"` + ChainID string `json:"chain_id"` + Address string `json:"address"` + OwnerAddress string `json:"owner_address"` + Metadata sql.NullString `json:"metadata"` + Status string `json:"status"` + BlockHeight int64 `json:"block_height"` +} diff --git a/internal/db/activity/querier.go b/internal/db/activity/querier.go new file mode 100644 index 0000000..c281773 --- /dev/null +++ b/internal/db/activity/querier.go @@ -0,0 +1,63 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package activity + +import ( + "context" + "database/sql" +) + +type Querier interface { + GetActivityByID(ctx context.Context, id string) (Activity, error) + GetActivityByTxHash(ctx context.Context, txHash sql.NullString) (Activity, error) + GetCryptoListingByApiID(ctx context.Context, apiID string) (CryptoListing, error) + GetCryptoListingByID(ctx context.Context, id string) (CryptoListing, error) + GetCryptoListingBySymbol(ctx context.Context, symbol string) (CryptoListing, error) + GetCryptoListingByWebsiteSlug(ctx context.Context, websiteSlug string) (CryptoListing, error) + GetFearGreedIndexByID(ctx context.Context, id string) (FearGreedIndex, error) + GetGlobalMarketByID(ctx context.Context, id string) (GlobalMarket, error) + GetHealthByEndpoint(ctx context.Context, endpointUrl string) (Health, error) + GetHealthByID(ctx context.Context, id string) (Health, error) + GetLatestFearGreedIndex(ctx context.Context) (FearGreedIndex, error) + GetLatestGlobalMarket(ctx context.Context) (GlobalMarket, error) + GetServiceByAddress(ctx context.Context, address string) (Service, error) + GetServiceByChainAndAddress(ctx context.Context, arg GetServiceByChainAndAddressParams) (Service, error) + GetServiceByID(ctx context.Context, id string) (Service, error) + // ACTIVITY QUERIES + InsertActivity(ctx context.Context, arg InsertActivityParams) (Activity, error) + // CRYPTO LISTINGS QUERIES (NEW) + InsertCryptoListing(ctx context.Context, arg InsertCryptoListingParams) (CryptoListing, error) + // FEAR AND GREED INDEX QUERIES (NEW) + InsertFearGreedIndex(ctx context.Context, arg InsertFearGreedIndexParams) (FearGreedIndex, error) + InsertGlobalMarket(ctx context.Context, arg InsertGlobalMarketParams) (GlobalMarket, error) + // HEALTH QUERIES + InsertHealth(ctx context.Context, arg InsertHealthParams) (Health, error) + InsertService(ctx context.Context, arg InsertServiceParams) (Service, error) + ListActivitiesByAccount(ctx context.Context, arg ListActivitiesByAccountParams) ([]Activity, error) + ListActivitiesByStatus(ctx context.Context, arg ListActivitiesByStatusParams) ([]Activity, error) + ListActivitiesByType(ctx context.Context, arg ListActivitiesByTypeParams) ([]Activity, error) + ListCryptoListings(ctx context.Context, arg ListCryptoListingsParams) ([]CryptoListing, error) + ListFearGreedIndexHistory(ctx context.Context, arg ListFearGreedIndexHistoryParams) ([]FearGreedIndex, error) + ListGlobalMarketHistory(ctx context.Context, arg ListGlobalMarketHistoryParams) ([]GlobalMarket, error) + ListHealthByChain(ctx context.Context, arg ListHealthByChainParams) ([]Health, error) + ListHealthByStatus(ctx context.Context, arg ListHealthByStatusParams) ([]Health, error) + ListHealthChecksNeedingUpdate(ctx context.Context, limit int64) ([]Health, error) + ListServicesByChain(ctx context.Context, arg ListServicesByChainParams) ([]Service, error) + ListServicesByOwner(ctx context.Context, arg ListServicesByOwnerParams) ([]Service, error) + SoftDeleteActivity(ctx context.Context, id string) error + SoftDeleteCryptoListing(ctx context.Context, id string) error + SoftDeleteFearGreedIndex(ctx context.Context, id string) error + SoftDeleteGlobalMarket(ctx context.Context, id string) error + SoftDeleteHealth(ctx context.Context, id string) error + SoftDeleteService(ctx context.Context, id string) error + UpdateActivityStatus(ctx context.Context, arg UpdateActivityStatusParams) (Activity, error) + UpdateCryptoListing(ctx context.Context, arg UpdateCryptoListingParams) (CryptoListing, error) + UpdateFearGreedIndex(ctx context.Context, arg UpdateFearGreedIndexParams) (FearGreedIndex, error) + UpdateGlobalMarket(ctx context.Context, arg UpdateGlobalMarketParams) (GlobalMarket, error) + UpdateHealthCheck(ctx context.Context, arg UpdateHealthCheckParams) (Health, error) + UpdateService(ctx context.Context, arg UpdateServiceParams) (Service, error) +} + +var _ Querier = (*Queries)(nil) diff --git a/internal/db/activity/query.sql b/internal/db/activity/query.sql new file mode 100644 index 0000000..cff4428 --- /dev/null +++ b/internal/db/activity/query.sql @@ -0,0 +1,338 @@ +-- name: InsertService :one +INSERT INTO services ( + name, + description, + chain_id, + address, + owner_address, + metadata, + status, + block_height +) VALUES (?, ?, ?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetServiceByID :one +SELECT * FROM services +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetServiceByAddress :one +SELECT * FROM services +WHERE address = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetServiceByChainAndAddress :one +SELECT * FROM services +WHERE chain_id = ? AND address = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: ListServicesByChain :many +SELECT * FROM services +WHERE chain_id = ? AND deleted_at IS NULL +ORDER BY name ASC +LIMIT ? OFFSET ?; + +-- name: ListServicesByOwner :many +SELECT * FROM services +WHERE owner_address = ? AND deleted_at IS NULL +ORDER BY created_at DESC +LIMIT ? OFFSET ?; + +-- name: UpdateService :one +UPDATE services +SET + name = ?, + description = ?, + owner_address = ?, + metadata = ?, + status = ?, + block_height = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteService :exec +UPDATE services +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- ACTIVITY QUERIES +-- name: InsertActivity :one +INSERT INTO activities ( + account_id, + tx_hash, + tx_type, + status, + amount, + fee, + gas_used, + gas_wanted, + memo, + block_height, + timestamp, + raw_log, + error +) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetActivityByID :one +SELECT * FROM activities +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetActivityByTxHash :one +SELECT * FROM activities +WHERE tx_hash = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: ListActivitiesByAccount :many +SELECT * FROM activities +WHERE account_id = ? AND deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT ? OFFSET ?; + +-- name: ListActivitiesByType :many +SELECT * FROM activities +WHERE tx_type = ? AND deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT ? OFFSET ?; + +-- name: ListActivitiesByStatus :many +SELECT * FROM activities +WHERE status = ? AND deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT ? OFFSET ?; + +-- name: UpdateActivityStatus :one +UPDATE activities +SET + status = ?, + tx_hash = ?, + block_height = ?, + gas_used = ?, + raw_log = ?, + error = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteActivity :exec +UPDATE activities +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- HEALTH QUERIES +-- name: InsertHealth :one +INSERT INTO health ( + endpoint_url, + endpoint_type, + chain_id, + status, + response_time_ms, + last_checked, + next_check, + failure_count, + success_count, + response_data, + error_message +) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetHealthByID :one +SELECT * FROM health +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetHealthByEndpoint :one +SELECT * FROM health +WHERE endpoint_url = ? AND deleted_at IS NULL +ORDER BY last_checked DESC +LIMIT 1; + +-- name: ListHealthByChain :many +SELECT * FROM health +WHERE chain_id = ? AND deleted_at IS NULL +ORDER BY last_checked DESC +LIMIT ? OFFSET ?; + +-- name: ListHealthByStatus :many +SELECT * FROM health +WHERE status = ? AND deleted_at IS NULL +ORDER BY last_checked DESC +LIMIT ? OFFSET ?; + +-- name: ListHealthChecksNeedingUpdate :many +SELECT * FROM health +WHERE next_check <= CURRENT_TIMESTAMP AND deleted_at IS NULL +ORDER BY next_check ASC +LIMIT ?; + +-- name: UpdateHealthCheck :one +UPDATE health +SET + status = ?, + response_time_ms = ?, + last_checked = CURRENT_TIMESTAMP, + next_check = ?, + failure_count = CASE WHEN status = 'failed' THEN failure_count + 1 ELSE failure_count END, + success_count = CASE WHEN status = 'success' THEN success_count + 1 ELSE success_count END, + response_data = ?, + error_message = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteHealth :exec +UPDATE health +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + + +-- name: GetGlobalMarketByID :one +SELECT * FROM global_market +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetLatestGlobalMarket :one +SELECT * FROM global_market +WHERE deleted_at IS NULL +ORDER BY last_updated DESC +LIMIT 1; + +-- name: ListGlobalMarketHistory :many +SELECT * FROM global_market +WHERE deleted_at IS NULL +ORDER BY last_updated DESC +LIMIT ? OFFSET ?; + +-- name: UpdateGlobalMarket :one +UPDATE global_market +SET + total_market_cap_usd = ?, + total_24h_volume_usd = ?, + bitcoin_percentage_of_market_cap = ?, + active_currencies = ?, + active_assets = ?, + active_markets = ?, + last_updated = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteGlobalMarket :exec +UPDATE global_market +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- FEAR AND GREED INDEX QUERIES (NEW) +-- name: InsertFearGreedIndex :one +INSERT INTO fear_greed_index ( + value, + value_classification, + timestamp, + time_until_update +) VALUES (?, ?, ?, ?) +RETURNING *; + +-- name: GetFearGreedIndexByID :one +SELECT * FROM fear_greed_index +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetLatestFearGreedIndex :one +SELECT * FROM fear_greed_index +WHERE deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT 1; + +-- name: ListFearGreedIndexHistory :many +SELECT * FROM fear_greed_index +WHERE deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT ? OFFSET ?; + +-- name: UpdateFearGreedIndex :one +UPDATE fear_greed_index +SET + value = ?, + value_classification = ?, + timestamp = ?, + time_until_update = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: InsertGlobalMarket :one +INSERT INTO global_market ( + total_market_cap_usd, + total_24h_volume_usd, + bitcoin_percentage_of_market_cap, + active_currencies, + active_assets, + active_markets, + last_updated +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING *; + + +-- name: SoftDeleteFearGreedIndex :exec +UPDATE fear_greed_index +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- CRYPTO LISTINGS QUERIES (NEW) +-- name: InsertCryptoListing :one +INSERT INTO crypto_listings ( + api_id, + name, + symbol, + website_slug +) VALUES (?, ?, ?, ?) +RETURNING *; + +-- name: GetCryptoListingByID :one +SELECT * FROM crypto_listings +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetCryptoListingByApiID :one +SELECT * FROM crypto_listings +WHERE api_id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetCryptoListingBySymbol :one +SELECT * FROM crypto_listings +WHERE symbol = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetCryptoListingByWebsiteSlug :one +SELECT * FROM crypto_listings +WHERE website_slug = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: ListCryptoListings :many +SELECT * FROM crypto_listings +WHERE deleted_at IS NULL +ORDER BY name ASC +LIMIT ? OFFSET ?; + +-- name: UpdateCryptoListing :one +UPDATE crypto_listings +SET + name = ?, + symbol = ?, + website_slug = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteCryptoListing :exec +UPDATE crypto_listings +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; diff --git a/internal/db/activity/query.sql.go b/internal/db/activity/query.sql.go new file mode 100644 index 0000000..dd11bc3 --- /dev/null +++ b/internal/db/activity/query.sql.go @@ -0,0 +1,1640 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 +// source: query.sql + +package activity + +import ( + "context" + "database/sql" + "time" +) + +const getActivityByID = `-- name: GetActivityByID :one +SELECT id, created_at, updated_at, deleted_at, account_id, tx_hash, tx_type, status, amount, fee, gas_used, gas_wanted, memo, block_height, timestamp, raw_log, error FROM activities +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetActivityByID(ctx context.Context, id string) (Activity, error) { + row := q.db.QueryRowContext(ctx, getActivityByID, id) + var i Activity + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AccountID, + &i.TxHash, + &i.TxType, + &i.Status, + &i.Amount, + &i.Fee, + &i.GasUsed, + &i.GasWanted, + &i.Memo, + &i.BlockHeight, + &i.Timestamp, + &i.RawLog, + &i.Error, + ) + return i, err +} + +const getActivityByTxHash = `-- name: GetActivityByTxHash :one +SELECT id, created_at, updated_at, deleted_at, account_id, tx_hash, tx_type, status, amount, fee, gas_used, gas_wanted, memo, block_height, timestamp, raw_log, error FROM activities +WHERE tx_hash = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetActivityByTxHash(ctx context.Context, txHash sql.NullString) (Activity, error) { + row := q.db.QueryRowContext(ctx, getActivityByTxHash, txHash) + var i Activity + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AccountID, + &i.TxHash, + &i.TxType, + &i.Status, + &i.Amount, + &i.Fee, + &i.GasUsed, + &i.GasWanted, + &i.Memo, + &i.BlockHeight, + &i.Timestamp, + &i.RawLog, + &i.Error, + ) + return i, err +} + +const getCryptoListingByApiID = `-- name: GetCryptoListingByApiID :one +SELECT id, created_at, updated_at, deleted_at, api_id, name, symbol, website_slug FROM crypto_listings +WHERE api_id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetCryptoListingByApiID(ctx context.Context, apiID string) (CryptoListing, error) { + row := q.db.QueryRowContext(ctx, getCryptoListingByApiID, apiID) + var i CryptoListing + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ApiID, + &i.Name, + &i.Symbol, + &i.WebsiteSlug, + ) + return i, err +} + +const getCryptoListingByID = `-- name: GetCryptoListingByID :one +SELECT id, created_at, updated_at, deleted_at, api_id, name, symbol, website_slug FROM crypto_listings +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetCryptoListingByID(ctx context.Context, id string) (CryptoListing, error) { + row := q.db.QueryRowContext(ctx, getCryptoListingByID, id) + var i CryptoListing + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ApiID, + &i.Name, + &i.Symbol, + &i.WebsiteSlug, + ) + return i, err +} + +const getCryptoListingBySymbol = `-- name: GetCryptoListingBySymbol :one +SELECT id, created_at, updated_at, deleted_at, api_id, name, symbol, website_slug FROM crypto_listings +WHERE symbol = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetCryptoListingBySymbol(ctx context.Context, symbol string) (CryptoListing, error) { + row := q.db.QueryRowContext(ctx, getCryptoListingBySymbol, symbol) + var i CryptoListing + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ApiID, + &i.Name, + &i.Symbol, + &i.WebsiteSlug, + ) + return i, err +} + +const getCryptoListingByWebsiteSlug = `-- name: GetCryptoListingByWebsiteSlug :one +SELECT id, created_at, updated_at, deleted_at, api_id, name, symbol, website_slug FROM crypto_listings +WHERE website_slug = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetCryptoListingByWebsiteSlug(ctx context.Context, websiteSlug string) (CryptoListing, error) { + row := q.db.QueryRowContext(ctx, getCryptoListingByWebsiteSlug, websiteSlug) + var i CryptoListing + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ApiID, + &i.Name, + &i.Symbol, + &i.WebsiteSlug, + ) + return i, err +} + +const getFearGreedIndexByID = `-- name: GetFearGreedIndexByID :one +SELECT id, created_at, updated_at, deleted_at, value, value_classification, timestamp, time_until_update FROM fear_greed_index +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetFearGreedIndexByID(ctx context.Context, id string) (FearGreedIndex, error) { + row := q.db.QueryRowContext(ctx, getFearGreedIndexByID, id) + var i FearGreedIndex + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Value, + &i.ValueClassification, + &i.Timestamp, + &i.TimeUntilUpdate, + ) + return i, err +} + +const getGlobalMarketByID = `-- name: GetGlobalMarketByID :one +SELECT id, created_at, updated_at, deleted_at, total_market_cap_usd, total_24h_volume_usd, bitcoin_percentage_of_market_cap, active_currencies, active_assets, active_markets, last_updated FROM global_market +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetGlobalMarketByID(ctx context.Context, id string) (GlobalMarket, error) { + row := q.db.QueryRowContext(ctx, getGlobalMarketByID, id) + var i GlobalMarket + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.TotalMarketCapUsd, + &i.Total24hVolumeUsd, + &i.BitcoinPercentageOfMarketCap, + &i.ActiveCurrencies, + &i.ActiveAssets, + &i.ActiveMarkets, + &i.LastUpdated, + ) + return i, err +} + +const getHealthByEndpoint = `-- name: GetHealthByEndpoint :one +SELECT id, created_at, updated_at, deleted_at, endpoint_url, endpoint_type, chain_id, status, response_time_ms, last_checked, next_check, failure_count, success_count, response_data, error_message FROM health +WHERE endpoint_url = ? AND deleted_at IS NULL +ORDER BY last_checked DESC +LIMIT 1 +` + +func (q *Queries) GetHealthByEndpoint(ctx context.Context, endpointUrl string) (Health, error) { + row := q.db.QueryRowContext(ctx, getHealthByEndpoint, endpointUrl) + var i Health + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.EndpointUrl, + &i.EndpointType, + &i.ChainID, + &i.Status, + &i.ResponseTimeMs, + &i.LastChecked, + &i.NextCheck, + &i.FailureCount, + &i.SuccessCount, + &i.ResponseData, + &i.ErrorMessage, + ) + return i, err +} + +const getHealthByID = `-- name: GetHealthByID :one +SELECT id, created_at, updated_at, deleted_at, endpoint_url, endpoint_type, chain_id, status, response_time_ms, last_checked, next_check, failure_count, success_count, response_data, error_message FROM health +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetHealthByID(ctx context.Context, id string) (Health, error) { + row := q.db.QueryRowContext(ctx, getHealthByID, id) + var i Health + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.EndpointUrl, + &i.EndpointType, + &i.ChainID, + &i.Status, + &i.ResponseTimeMs, + &i.LastChecked, + &i.NextCheck, + &i.FailureCount, + &i.SuccessCount, + &i.ResponseData, + &i.ErrorMessage, + ) + return i, err +} + +const getLatestFearGreedIndex = `-- name: GetLatestFearGreedIndex :one +SELECT id, created_at, updated_at, deleted_at, value, value_classification, timestamp, time_until_update FROM fear_greed_index +WHERE deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT 1 +` + +func (q *Queries) GetLatestFearGreedIndex(ctx context.Context) (FearGreedIndex, error) { + row := q.db.QueryRowContext(ctx, getLatestFearGreedIndex) + var i FearGreedIndex + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Value, + &i.ValueClassification, + &i.Timestamp, + &i.TimeUntilUpdate, + ) + return i, err +} + +const getLatestGlobalMarket = `-- name: GetLatestGlobalMarket :one +SELECT id, created_at, updated_at, deleted_at, total_market_cap_usd, total_24h_volume_usd, bitcoin_percentage_of_market_cap, active_currencies, active_assets, active_markets, last_updated FROM global_market +WHERE deleted_at IS NULL +ORDER BY last_updated DESC +LIMIT 1 +` + +func (q *Queries) GetLatestGlobalMarket(ctx context.Context) (GlobalMarket, error) { + row := q.db.QueryRowContext(ctx, getLatestGlobalMarket) + var i GlobalMarket + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.TotalMarketCapUsd, + &i.Total24hVolumeUsd, + &i.BitcoinPercentageOfMarketCap, + &i.ActiveCurrencies, + &i.ActiveAssets, + &i.ActiveMarkets, + &i.LastUpdated, + ) + return i, err +} + +const getServiceByAddress = `-- name: GetServiceByAddress :one +SELECT id, created_at, updated_at, deleted_at, name, description, chain_id, address, owner_address, metadata, status, block_height FROM services +WHERE address = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetServiceByAddress(ctx context.Context, address string) (Service, error) { + row := q.db.QueryRowContext(ctx, getServiceByAddress, address) + var i Service + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Description, + &i.ChainID, + &i.Address, + &i.OwnerAddress, + &i.Metadata, + &i.Status, + &i.BlockHeight, + ) + return i, err +} + +const getServiceByChainAndAddress = `-- name: GetServiceByChainAndAddress :one +SELECT id, created_at, updated_at, deleted_at, name, description, chain_id, address, owner_address, metadata, status, block_height FROM services +WHERE chain_id = ? AND address = ? AND deleted_at IS NULL +LIMIT 1 +` + +type GetServiceByChainAndAddressParams struct { + ChainID string `json:"chain_id"` + Address string `json:"address"` +} + +func (q *Queries) GetServiceByChainAndAddress(ctx context.Context, arg GetServiceByChainAndAddressParams) (Service, error) { + row := q.db.QueryRowContext(ctx, getServiceByChainAndAddress, arg.ChainID, arg.Address) + var i Service + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Description, + &i.ChainID, + &i.Address, + &i.OwnerAddress, + &i.Metadata, + &i.Status, + &i.BlockHeight, + ) + return i, err +} + +const getServiceByID = `-- name: GetServiceByID :one +SELECT id, created_at, updated_at, deleted_at, name, description, chain_id, address, owner_address, metadata, status, block_height FROM services +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetServiceByID(ctx context.Context, id string) (Service, error) { + row := q.db.QueryRowContext(ctx, getServiceByID, id) + var i Service + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Description, + &i.ChainID, + &i.Address, + &i.OwnerAddress, + &i.Metadata, + &i.Status, + &i.BlockHeight, + ) + return i, err +} + +const insertActivity = `-- name: InsertActivity :one +INSERT INTO activities ( + account_id, + tx_hash, + tx_type, + status, + amount, + fee, + gas_used, + gas_wanted, + memo, + block_height, + timestamp, + raw_log, + error +) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, account_id, tx_hash, tx_type, status, amount, fee, gas_used, gas_wanted, memo, block_height, timestamp, raw_log, error +` + +type InsertActivityParams struct { + AccountID string `json:"account_id"` + TxHash sql.NullString `json:"tx_hash"` + TxType string `json:"tx_type"` + Status string `json:"status"` + Amount sql.NullString `json:"amount"` + Fee sql.NullString `json:"fee"` + GasUsed sql.NullInt64 `json:"gas_used"` + GasWanted sql.NullInt64 `json:"gas_wanted"` + Memo sql.NullString `json:"memo"` + BlockHeight sql.NullInt64 `json:"block_height"` + Timestamp time.Time `json:"timestamp"` + RawLog sql.NullString `json:"raw_log"` + Error sql.NullString `json:"error"` +} + +// ACTIVITY QUERIES +func (q *Queries) InsertActivity(ctx context.Context, arg InsertActivityParams) (Activity, error) { + row := q.db.QueryRowContext(ctx, insertActivity, + arg.AccountID, + arg.TxHash, + arg.TxType, + arg.Status, + arg.Amount, + arg.Fee, + arg.GasUsed, + arg.GasWanted, + arg.Memo, + arg.BlockHeight, + arg.Timestamp, + arg.RawLog, + arg.Error, + ) + var i Activity + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AccountID, + &i.TxHash, + &i.TxType, + &i.Status, + &i.Amount, + &i.Fee, + &i.GasUsed, + &i.GasWanted, + &i.Memo, + &i.BlockHeight, + &i.Timestamp, + &i.RawLog, + &i.Error, + ) + return i, err +} + +const insertCryptoListing = `-- name: InsertCryptoListing :one +INSERT INTO crypto_listings ( + api_id, + name, + symbol, + website_slug +) VALUES (?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, api_id, name, symbol, website_slug +` + +type InsertCryptoListingParams struct { + ApiID string `json:"api_id"` + Name string `json:"name"` + Symbol string `json:"symbol"` + WebsiteSlug string `json:"website_slug"` +} + +// CRYPTO LISTINGS QUERIES (NEW) +func (q *Queries) InsertCryptoListing(ctx context.Context, arg InsertCryptoListingParams) (CryptoListing, error) { + row := q.db.QueryRowContext(ctx, insertCryptoListing, + arg.ApiID, + arg.Name, + arg.Symbol, + arg.WebsiteSlug, + ) + var i CryptoListing + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ApiID, + &i.Name, + &i.Symbol, + &i.WebsiteSlug, + ) + return i, err +} + +const insertFearGreedIndex = `-- name: InsertFearGreedIndex :one +INSERT INTO fear_greed_index ( + value, + value_classification, + timestamp, + time_until_update +) VALUES (?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, value, value_classification, timestamp, time_until_update +` + +type InsertFearGreedIndexParams struct { + Value int64 `json:"value"` + ValueClassification string `json:"value_classification"` + Timestamp time.Time `json:"timestamp"` + TimeUntilUpdate sql.NullString `json:"time_until_update"` +} + +// FEAR AND GREED INDEX QUERIES (NEW) +func (q *Queries) InsertFearGreedIndex(ctx context.Context, arg InsertFearGreedIndexParams) (FearGreedIndex, error) { + row := q.db.QueryRowContext(ctx, insertFearGreedIndex, + arg.Value, + arg.ValueClassification, + arg.Timestamp, + arg.TimeUntilUpdate, + ) + var i FearGreedIndex + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Value, + &i.ValueClassification, + &i.Timestamp, + &i.TimeUntilUpdate, + ) + return i, err +} + +const insertGlobalMarket = `-- name: InsertGlobalMarket :one +INSERT INTO global_market ( + total_market_cap_usd, + total_24h_volume_usd, + bitcoin_percentage_of_market_cap, + active_currencies, + active_assets, + active_markets, + last_updated +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, total_market_cap_usd, total_24h_volume_usd, bitcoin_percentage_of_market_cap, active_currencies, active_assets, active_markets, last_updated +` + +type InsertGlobalMarketParams struct { + TotalMarketCapUsd sql.NullFloat64 `json:"total_market_cap_usd"` + Total24hVolumeUsd sql.NullFloat64 `json:"total_24h_volume_usd"` + BitcoinPercentageOfMarketCap sql.NullFloat64 `json:"bitcoin_percentage_of_market_cap"` + ActiveCurrencies sql.NullInt64 `json:"active_currencies"` + ActiveAssets sql.NullInt64 `json:"active_assets"` + ActiveMarkets sql.NullInt64 `json:"active_markets"` + LastUpdated time.Time `json:"last_updated"` +} + +func (q *Queries) InsertGlobalMarket(ctx context.Context, arg InsertGlobalMarketParams) (GlobalMarket, error) { + row := q.db.QueryRowContext(ctx, insertGlobalMarket, + arg.TotalMarketCapUsd, + arg.Total24hVolumeUsd, + arg.BitcoinPercentageOfMarketCap, + arg.ActiveCurrencies, + arg.ActiveAssets, + arg.ActiveMarkets, + arg.LastUpdated, + ) + var i GlobalMarket + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.TotalMarketCapUsd, + &i.Total24hVolumeUsd, + &i.BitcoinPercentageOfMarketCap, + &i.ActiveCurrencies, + &i.ActiveAssets, + &i.ActiveMarkets, + &i.LastUpdated, + ) + return i, err +} + +const insertHealth = `-- name: InsertHealth :one +INSERT INTO health ( + endpoint_url, + endpoint_type, + chain_id, + status, + response_time_ms, + last_checked, + next_check, + failure_count, + success_count, + response_data, + error_message +) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, endpoint_url, endpoint_type, chain_id, status, response_time_ms, last_checked, next_check, failure_count, success_count, response_data, error_message +` + +type InsertHealthParams struct { + EndpointUrl string `json:"endpoint_url"` + EndpointType string `json:"endpoint_type"` + ChainID sql.NullString `json:"chain_id"` + Status string `json:"status"` + ResponseTimeMs sql.NullInt64 `json:"response_time_ms"` + LastChecked time.Time `json:"last_checked"` + NextCheck sql.NullTime `json:"next_check"` + FailureCount int64 `json:"failure_count"` + SuccessCount int64 `json:"success_count"` + ResponseData sql.NullString `json:"response_data"` + ErrorMessage sql.NullString `json:"error_message"` +} + +// HEALTH QUERIES +func (q *Queries) InsertHealth(ctx context.Context, arg InsertHealthParams) (Health, error) { + row := q.db.QueryRowContext(ctx, insertHealth, + arg.EndpointUrl, + arg.EndpointType, + arg.ChainID, + arg.Status, + arg.ResponseTimeMs, + arg.LastChecked, + arg.NextCheck, + arg.FailureCount, + arg.SuccessCount, + arg.ResponseData, + arg.ErrorMessage, + ) + var i Health + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.EndpointUrl, + &i.EndpointType, + &i.ChainID, + &i.Status, + &i.ResponseTimeMs, + &i.LastChecked, + &i.NextCheck, + &i.FailureCount, + &i.SuccessCount, + &i.ResponseData, + &i.ErrorMessage, + ) + return i, err +} + +const insertService = `-- name: InsertService :one +INSERT INTO services ( + name, + description, + chain_id, + address, + owner_address, + metadata, + status, + block_height +) VALUES (?, ?, ?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, name, description, chain_id, address, owner_address, metadata, status, block_height +` + +type InsertServiceParams struct { + Name string `json:"name"` + Description sql.NullString `json:"description"` + ChainID string `json:"chain_id"` + Address string `json:"address"` + OwnerAddress string `json:"owner_address"` + Metadata sql.NullString `json:"metadata"` + Status string `json:"status"` + BlockHeight int64 `json:"block_height"` +} + +func (q *Queries) InsertService(ctx context.Context, arg InsertServiceParams) (Service, error) { + row := q.db.QueryRowContext(ctx, insertService, + arg.Name, + arg.Description, + arg.ChainID, + arg.Address, + arg.OwnerAddress, + arg.Metadata, + arg.Status, + arg.BlockHeight, + ) + var i Service + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Description, + &i.ChainID, + &i.Address, + &i.OwnerAddress, + &i.Metadata, + &i.Status, + &i.BlockHeight, + ) + return i, err +} + +const listActivitiesByAccount = `-- name: ListActivitiesByAccount :many +SELECT id, created_at, updated_at, deleted_at, account_id, tx_hash, tx_type, status, amount, fee, gas_used, gas_wanted, memo, block_height, timestamp, raw_log, error FROM activities +WHERE account_id = ? AND deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT ? OFFSET ? +` + +type ListActivitiesByAccountParams struct { + AccountID string `json:"account_id"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListActivitiesByAccount(ctx context.Context, arg ListActivitiesByAccountParams) ([]Activity, error) { + rows, err := q.db.QueryContext(ctx, listActivitiesByAccount, arg.AccountID, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Activity + for rows.Next() { + var i Activity + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AccountID, + &i.TxHash, + &i.TxType, + &i.Status, + &i.Amount, + &i.Fee, + &i.GasUsed, + &i.GasWanted, + &i.Memo, + &i.BlockHeight, + &i.Timestamp, + &i.RawLog, + &i.Error, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listActivitiesByStatus = `-- name: ListActivitiesByStatus :many +SELECT id, created_at, updated_at, deleted_at, account_id, tx_hash, tx_type, status, amount, fee, gas_used, gas_wanted, memo, block_height, timestamp, raw_log, error FROM activities +WHERE status = ? AND deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT ? OFFSET ? +` + +type ListActivitiesByStatusParams struct { + Status string `json:"status"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListActivitiesByStatus(ctx context.Context, arg ListActivitiesByStatusParams) ([]Activity, error) { + rows, err := q.db.QueryContext(ctx, listActivitiesByStatus, arg.Status, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Activity + for rows.Next() { + var i Activity + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AccountID, + &i.TxHash, + &i.TxType, + &i.Status, + &i.Amount, + &i.Fee, + &i.GasUsed, + &i.GasWanted, + &i.Memo, + &i.BlockHeight, + &i.Timestamp, + &i.RawLog, + &i.Error, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listActivitiesByType = `-- name: ListActivitiesByType :many +SELECT id, created_at, updated_at, deleted_at, account_id, tx_hash, tx_type, status, amount, fee, gas_used, gas_wanted, memo, block_height, timestamp, raw_log, error FROM activities +WHERE tx_type = ? AND deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT ? OFFSET ? +` + +type ListActivitiesByTypeParams struct { + TxType string `json:"tx_type"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListActivitiesByType(ctx context.Context, arg ListActivitiesByTypeParams) ([]Activity, error) { + rows, err := q.db.QueryContext(ctx, listActivitiesByType, arg.TxType, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Activity + for rows.Next() { + var i Activity + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AccountID, + &i.TxHash, + &i.TxType, + &i.Status, + &i.Amount, + &i.Fee, + &i.GasUsed, + &i.GasWanted, + &i.Memo, + &i.BlockHeight, + &i.Timestamp, + &i.RawLog, + &i.Error, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listCryptoListings = `-- name: ListCryptoListings :many +SELECT id, created_at, updated_at, deleted_at, api_id, name, symbol, website_slug FROM crypto_listings +WHERE deleted_at IS NULL +ORDER BY name ASC +LIMIT ? OFFSET ? +` + +type ListCryptoListingsParams struct { + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListCryptoListings(ctx context.Context, arg ListCryptoListingsParams) ([]CryptoListing, error) { + rows, err := q.db.QueryContext(ctx, listCryptoListings, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []CryptoListing + for rows.Next() { + var i CryptoListing + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ApiID, + &i.Name, + &i.Symbol, + &i.WebsiteSlug, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listFearGreedIndexHistory = `-- name: ListFearGreedIndexHistory :many +SELECT id, created_at, updated_at, deleted_at, value, value_classification, timestamp, time_until_update FROM fear_greed_index +WHERE deleted_at IS NULL +ORDER BY timestamp DESC +LIMIT ? OFFSET ? +` + +type ListFearGreedIndexHistoryParams struct { + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListFearGreedIndexHistory(ctx context.Context, arg ListFearGreedIndexHistoryParams) ([]FearGreedIndex, error) { + rows, err := q.db.QueryContext(ctx, listFearGreedIndexHistory, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []FearGreedIndex + for rows.Next() { + var i FearGreedIndex + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Value, + &i.ValueClassification, + &i.Timestamp, + &i.TimeUntilUpdate, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listGlobalMarketHistory = `-- name: ListGlobalMarketHistory :many +SELECT id, created_at, updated_at, deleted_at, total_market_cap_usd, total_24h_volume_usd, bitcoin_percentage_of_market_cap, active_currencies, active_assets, active_markets, last_updated FROM global_market +WHERE deleted_at IS NULL +ORDER BY last_updated DESC +LIMIT ? OFFSET ? +` + +type ListGlobalMarketHistoryParams struct { + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListGlobalMarketHistory(ctx context.Context, arg ListGlobalMarketHistoryParams) ([]GlobalMarket, error) { + rows, err := q.db.QueryContext(ctx, listGlobalMarketHistory, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GlobalMarket + for rows.Next() { + var i GlobalMarket + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.TotalMarketCapUsd, + &i.Total24hVolumeUsd, + &i.BitcoinPercentageOfMarketCap, + &i.ActiveCurrencies, + &i.ActiveAssets, + &i.ActiveMarkets, + &i.LastUpdated, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listHealthByChain = `-- name: ListHealthByChain :many +SELECT id, created_at, updated_at, deleted_at, endpoint_url, endpoint_type, chain_id, status, response_time_ms, last_checked, next_check, failure_count, success_count, response_data, error_message FROM health +WHERE chain_id = ? AND deleted_at IS NULL +ORDER BY last_checked DESC +LIMIT ? OFFSET ? +` + +type ListHealthByChainParams struct { + ChainID sql.NullString `json:"chain_id"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListHealthByChain(ctx context.Context, arg ListHealthByChainParams) ([]Health, error) { + rows, err := q.db.QueryContext(ctx, listHealthByChain, arg.ChainID, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Health + for rows.Next() { + var i Health + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.EndpointUrl, + &i.EndpointType, + &i.ChainID, + &i.Status, + &i.ResponseTimeMs, + &i.LastChecked, + &i.NextCheck, + &i.FailureCount, + &i.SuccessCount, + &i.ResponseData, + &i.ErrorMessage, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listHealthByStatus = `-- name: ListHealthByStatus :many +SELECT id, created_at, updated_at, deleted_at, endpoint_url, endpoint_type, chain_id, status, response_time_ms, last_checked, next_check, failure_count, success_count, response_data, error_message FROM health +WHERE status = ? AND deleted_at IS NULL +ORDER BY last_checked DESC +LIMIT ? OFFSET ? +` + +type ListHealthByStatusParams struct { + Status string `json:"status"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListHealthByStatus(ctx context.Context, arg ListHealthByStatusParams) ([]Health, error) { + rows, err := q.db.QueryContext(ctx, listHealthByStatus, arg.Status, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Health + for rows.Next() { + var i Health + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.EndpointUrl, + &i.EndpointType, + &i.ChainID, + &i.Status, + &i.ResponseTimeMs, + &i.LastChecked, + &i.NextCheck, + &i.FailureCount, + &i.SuccessCount, + &i.ResponseData, + &i.ErrorMessage, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listHealthChecksNeedingUpdate = `-- name: ListHealthChecksNeedingUpdate :many +SELECT id, created_at, updated_at, deleted_at, endpoint_url, endpoint_type, chain_id, status, response_time_ms, last_checked, next_check, failure_count, success_count, response_data, error_message FROM health +WHERE next_check <= CURRENT_TIMESTAMP AND deleted_at IS NULL +ORDER BY next_check ASC +LIMIT ? +` + +func (q *Queries) ListHealthChecksNeedingUpdate(ctx context.Context, limit int64) ([]Health, error) { + rows, err := q.db.QueryContext(ctx, listHealthChecksNeedingUpdate, limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Health + for rows.Next() { + var i Health + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.EndpointUrl, + &i.EndpointType, + &i.ChainID, + &i.Status, + &i.ResponseTimeMs, + &i.LastChecked, + &i.NextCheck, + &i.FailureCount, + &i.SuccessCount, + &i.ResponseData, + &i.ErrorMessage, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listServicesByChain = `-- name: ListServicesByChain :many +SELECT id, created_at, updated_at, deleted_at, name, description, chain_id, address, owner_address, metadata, status, block_height FROM services +WHERE chain_id = ? AND deleted_at IS NULL +ORDER BY name ASC +LIMIT ? OFFSET ? +` + +type ListServicesByChainParams struct { + ChainID string `json:"chain_id"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListServicesByChain(ctx context.Context, arg ListServicesByChainParams) ([]Service, error) { + rows, err := q.db.QueryContext(ctx, listServicesByChain, arg.ChainID, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Service + for rows.Next() { + var i Service + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Description, + &i.ChainID, + &i.Address, + &i.OwnerAddress, + &i.Metadata, + &i.Status, + &i.BlockHeight, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listServicesByOwner = `-- name: ListServicesByOwner :many +SELECT id, created_at, updated_at, deleted_at, name, description, chain_id, address, owner_address, metadata, status, block_height FROM services +WHERE owner_address = ? AND deleted_at IS NULL +ORDER BY created_at DESC +LIMIT ? OFFSET ? +` + +type ListServicesByOwnerParams struct { + OwnerAddress string `json:"owner_address"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListServicesByOwner(ctx context.Context, arg ListServicesByOwnerParams) ([]Service, error) { + rows, err := q.db.QueryContext(ctx, listServicesByOwner, arg.OwnerAddress, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Service + for rows.Next() { + var i Service + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Description, + &i.ChainID, + &i.Address, + &i.OwnerAddress, + &i.Metadata, + &i.Status, + &i.BlockHeight, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const softDeleteActivity = `-- name: SoftDeleteActivity :exec +UPDATE activities +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteActivity(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteActivity, id) + return err +} + +const softDeleteCryptoListing = `-- name: SoftDeleteCryptoListing :exec +UPDATE crypto_listings +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteCryptoListing(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteCryptoListing, id) + return err +} + +const softDeleteFearGreedIndex = `-- name: SoftDeleteFearGreedIndex :exec +UPDATE fear_greed_index +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteFearGreedIndex(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteFearGreedIndex, id) + return err +} + +const softDeleteGlobalMarket = `-- name: SoftDeleteGlobalMarket :exec +UPDATE global_market +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteGlobalMarket(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteGlobalMarket, id) + return err +} + +const softDeleteHealth = `-- name: SoftDeleteHealth :exec +UPDATE health +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteHealth(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteHealth, id) + return err +} + +const softDeleteService = `-- name: SoftDeleteService :exec +UPDATE services +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteService(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteService, id) + return err +} + +const updateActivityStatus = `-- name: UpdateActivityStatus :one +UPDATE activities +SET + status = ?, + tx_hash = ?, + block_height = ?, + gas_used = ?, + raw_log = ?, + error = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, account_id, tx_hash, tx_type, status, amount, fee, gas_used, gas_wanted, memo, block_height, timestamp, raw_log, error +` + +type UpdateActivityStatusParams struct { + Status string `json:"status"` + TxHash sql.NullString `json:"tx_hash"` + BlockHeight sql.NullInt64 `json:"block_height"` + GasUsed sql.NullInt64 `json:"gas_used"` + RawLog sql.NullString `json:"raw_log"` + Error sql.NullString `json:"error"` + ID string `json:"id"` +} + +func (q *Queries) UpdateActivityStatus(ctx context.Context, arg UpdateActivityStatusParams) (Activity, error) { + row := q.db.QueryRowContext(ctx, updateActivityStatus, + arg.Status, + arg.TxHash, + arg.BlockHeight, + arg.GasUsed, + arg.RawLog, + arg.Error, + arg.ID, + ) + var i Activity + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AccountID, + &i.TxHash, + &i.TxType, + &i.Status, + &i.Amount, + &i.Fee, + &i.GasUsed, + &i.GasWanted, + &i.Memo, + &i.BlockHeight, + &i.Timestamp, + &i.RawLog, + &i.Error, + ) + return i, err +} + +const updateCryptoListing = `-- name: UpdateCryptoListing :one +UPDATE crypto_listings +SET + name = ?, + symbol = ?, + website_slug = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, api_id, name, symbol, website_slug +` + +type UpdateCryptoListingParams struct { + Name string `json:"name"` + Symbol string `json:"symbol"` + WebsiteSlug string `json:"website_slug"` + ID string `json:"id"` +} + +func (q *Queries) UpdateCryptoListing(ctx context.Context, arg UpdateCryptoListingParams) (CryptoListing, error) { + row := q.db.QueryRowContext(ctx, updateCryptoListing, + arg.Name, + arg.Symbol, + arg.WebsiteSlug, + arg.ID, + ) + var i CryptoListing + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ApiID, + &i.Name, + &i.Symbol, + &i.WebsiteSlug, + ) + return i, err +} + +const updateFearGreedIndex = `-- name: UpdateFearGreedIndex :one +UPDATE fear_greed_index +SET + value = ?, + value_classification = ?, + timestamp = ?, + time_until_update = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, value, value_classification, timestamp, time_until_update +` + +type UpdateFearGreedIndexParams struct { + Value int64 `json:"value"` + ValueClassification string `json:"value_classification"` + Timestamp time.Time `json:"timestamp"` + TimeUntilUpdate sql.NullString `json:"time_until_update"` + ID string `json:"id"` +} + +func (q *Queries) UpdateFearGreedIndex(ctx context.Context, arg UpdateFearGreedIndexParams) (FearGreedIndex, error) { + row := q.db.QueryRowContext(ctx, updateFearGreedIndex, + arg.Value, + arg.ValueClassification, + arg.Timestamp, + arg.TimeUntilUpdate, + arg.ID, + ) + var i FearGreedIndex + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Value, + &i.ValueClassification, + &i.Timestamp, + &i.TimeUntilUpdate, + ) + return i, err +} + +const updateGlobalMarket = `-- name: UpdateGlobalMarket :one +UPDATE global_market +SET + total_market_cap_usd = ?, + total_24h_volume_usd = ?, + bitcoin_percentage_of_market_cap = ?, + active_currencies = ?, + active_assets = ?, + active_markets = ?, + last_updated = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, total_market_cap_usd, total_24h_volume_usd, bitcoin_percentage_of_market_cap, active_currencies, active_assets, active_markets, last_updated +` + +type UpdateGlobalMarketParams struct { + TotalMarketCapUsd sql.NullFloat64 `json:"total_market_cap_usd"` + Total24hVolumeUsd sql.NullFloat64 `json:"total_24h_volume_usd"` + BitcoinPercentageOfMarketCap sql.NullFloat64 `json:"bitcoin_percentage_of_market_cap"` + ActiveCurrencies sql.NullInt64 `json:"active_currencies"` + ActiveAssets sql.NullInt64 `json:"active_assets"` + ActiveMarkets sql.NullInt64 `json:"active_markets"` + LastUpdated time.Time `json:"last_updated"` + ID string `json:"id"` +} + +func (q *Queries) UpdateGlobalMarket(ctx context.Context, arg UpdateGlobalMarketParams) (GlobalMarket, error) { + row := q.db.QueryRowContext(ctx, updateGlobalMarket, + arg.TotalMarketCapUsd, + arg.Total24hVolumeUsd, + arg.BitcoinPercentageOfMarketCap, + arg.ActiveCurrencies, + arg.ActiveAssets, + arg.ActiveMarkets, + arg.LastUpdated, + arg.ID, + ) + var i GlobalMarket + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.TotalMarketCapUsd, + &i.Total24hVolumeUsd, + &i.BitcoinPercentageOfMarketCap, + &i.ActiveCurrencies, + &i.ActiveAssets, + &i.ActiveMarkets, + &i.LastUpdated, + ) + return i, err +} + +const updateHealthCheck = `-- name: UpdateHealthCheck :one +UPDATE health +SET + status = ?, + response_time_ms = ?, + last_checked = CURRENT_TIMESTAMP, + next_check = ?, + failure_count = CASE WHEN status = 'failed' THEN failure_count + 1 ELSE failure_count END, + success_count = CASE WHEN status = 'success' THEN success_count + 1 ELSE success_count END, + response_data = ?, + error_message = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, endpoint_url, endpoint_type, chain_id, status, response_time_ms, last_checked, next_check, failure_count, success_count, response_data, error_message +` + +type UpdateHealthCheckParams struct { + Status string `json:"status"` + ResponseTimeMs sql.NullInt64 `json:"response_time_ms"` + NextCheck sql.NullTime `json:"next_check"` + ResponseData sql.NullString `json:"response_data"` + ErrorMessage sql.NullString `json:"error_message"` + ID string `json:"id"` +} + +func (q *Queries) UpdateHealthCheck(ctx context.Context, arg UpdateHealthCheckParams) (Health, error) { + row := q.db.QueryRowContext(ctx, updateHealthCheck, + arg.Status, + arg.ResponseTimeMs, + arg.NextCheck, + arg.ResponseData, + arg.ErrorMessage, + arg.ID, + ) + var i Health + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.EndpointUrl, + &i.EndpointType, + &i.ChainID, + &i.Status, + &i.ResponseTimeMs, + &i.LastChecked, + &i.NextCheck, + &i.FailureCount, + &i.SuccessCount, + &i.ResponseData, + &i.ErrorMessage, + ) + return i, err +} + +const updateService = `-- name: UpdateService :one +UPDATE services +SET + name = ?, + description = ?, + owner_address = ?, + metadata = ?, + status = ?, + block_height = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, name, description, chain_id, address, owner_address, metadata, status, block_height +` + +type UpdateServiceParams struct { + Name string `json:"name"` + Description sql.NullString `json:"description"` + OwnerAddress string `json:"owner_address"` + Metadata sql.NullString `json:"metadata"` + Status string `json:"status"` + BlockHeight int64 `json:"block_height"` + ID string `json:"id"` +} + +func (q *Queries) UpdateService(ctx context.Context, arg UpdateServiceParams) (Service, error) { + row := q.db.QueryRowContext(ctx, updateService, + arg.Name, + arg.Description, + arg.OwnerAddress, + arg.Metadata, + arg.Status, + arg.BlockHeight, + arg.ID, + ) + var i Service + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Description, + &i.ChainID, + &i.Address, + &i.OwnerAddress, + &i.Metadata, + &i.Status, + &i.BlockHeight, + ) + return i, err +} diff --git a/internal/db/activity/schema.sql b/internal/db/activity/schema.sql new file mode 100644 index 0000000..1c64724 --- /dev/null +++ b/internal/db/activity/schema.sql @@ -0,0 +1,136 @@ + +-- Service for Service Records sourced on chain +CREATE TABLE services ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + name TEXT NOT NULL, + description TEXT, + chain_id TEXT NOT NULL, + address TEXT NOT NULL, + owner_address TEXT NOT NULL, + metadata TEXT CHECK(json_valid(metadata)), + status TEXT NOT NULL, + block_height INTEGER NOT NULL, + FOREIGN KEY (chain_id) REFERENCES assets(chain_id), + UNIQUE(chain_id, address) +); + +-- Activity table for basic transaction broadcast activity +CREATE TABLE activities ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + account_id TEXT NOT NULL, + tx_hash TEXT, + tx_type TEXT NOT NULL, + status TEXT NOT NULL, + amount TEXT, + fee TEXT, + gas_used INTEGER, + gas_wanted INTEGER, + memo TEXT, + block_height INTEGER, + timestamp TIMESTAMP NOT NULL, + raw_log TEXT, + error TEXT, + FOREIGN KEY (account_id) REFERENCES accounts(id) +); + +-- Health table for scheduled checks for API endpoints +CREATE TABLE health ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + endpoint_url TEXT NOT NULL, + endpoint_type TEXT NOT NULL, + chain_id TEXT, + status TEXT NOT NULL, + response_time_ms INTEGER, + last_checked TIMESTAMP NOT NULL, + next_check TIMESTAMP, + failure_count INTEGER NOT NULL DEFAULT 0, + success_count INTEGER NOT NULL DEFAULT 0, + response_data TEXT, + error_message TEXT, + FOREIGN KEY (chain_id) REFERENCES assets(chain_id) +); + +-- Global market data from Alternative.me API +CREATE TABLE global_market ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + total_market_cap_usd REAL, + total_24h_volume_usd REAL, + bitcoin_percentage_of_market_cap REAL, + active_currencies INTEGER, + active_assets INTEGER, + active_markets INTEGER, + last_updated TIMESTAMP NOT NULL +); + +-- Fear and Greed Index data from Alternative.me +CREATE TABLE fear_greed_index ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + value INTEGER NOT NULL, + value_classification TEXT NOT NULL, + timestamp TIMESTAMP NOT NULL, + time_until_update TEXT +); + +-- Listings data from Alternative.me API +CREATE TABLE crypto_listings ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + api_id TEXT NOT NULL, + name TEXT NOT NULL, + symbol TEXT NOT NULL, + website_slug TEXT NOT NULL, + UNIQUE(api_id) +); + + +CREATE INDEX idx_services_name ON services(name); +CREATE INDEX idx_services_chain_id ON services(chain_id); +CREATE INDEX idx_services_address ON services(address); +CREATE INDEX idx_services_owner_address ON services(owner_address); +CREATE INDEX idx_services_status ON services(status); +CREATE INDEX idx_services_deleted_at ON services(deleted_at); + +CREATE INDEX idx_activities_account_id ON activities(account_id); +CREATE INDEX idx_activities_tx_hash ON activities(tx_hash); +CREATE INDEX idx_activities_tx_type ON activities(tx_type); +CREATE INDEX idx_activities_status ON activities(status); +CREATE INDEX idx_activities_timestamp ON activities(timestamp); +CREATE INDEX idx_activities_block_height ON activities(block_height); +CREATE INDEX idx_activities_deleted_at ON activities(deleted_at); + +CREATE INDEX idx_health_endpoint_url ON health(endpoint_url); +CREATE INDEX idx_health_endpoint_type ON health(endpoint_type); +CREATE INDEX idx_health_chain_id ON health(chain_id); +CREATE INDEX idx_health_status ON health(status); +CREATE INDEX idx_health_last_checked ON health(last_checked); +CREATE INDEX idx_health_next_check ON health(next_check); +CREATE INDEX idx_health_deleted_at ON health(deleted_at); + +CREATE INDEX idx_global_market_last_updated ON global_market(last_updated); +CREATE INDEX idx_global_market_deleted_at ON global_market(deleted_at); + +CREATE INDEX idx_fear_greed_index_timestamp ON fear_greed_index(timestamp); +CREATE INDEX idx_fear_greed_index_value ON fear_greed_index(value); +CREATE INDEX idx_fear_greed_index_deleted_at ON fear_greed_index(deleted_at); + +CREATE INDEX idx_crypto_listings_api_id ON crypto_listings(api_id); +CREATE INDEX idx_crypto_listings_symbol ON crypto_listings(symbol); +CREATE INDEX idx_crypto_listings_website_slug ON crypto_listings(website_slug); +CREATE INDEX idx_crypto_listings_deleted_at ON crypto_listings(deleted_at); diff --git a/internal/db/network/db.go b/internal/db/network/db.go new file mode 100644 index 0000000..d69bdcb --- /dev/null +++ b/internal/db/network/db.go @@ -0,0 +1,31 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package network + +import ( + "context" + "database/sql" +) + +type DBTX interface { + ExecContext(context.Context, string, ...interface{}) (sql.Result, error) + PrepareContext(context.Context, string) (*sql.Stmt, error) + QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error) + QueryRowContext(context.Context, string, ...interface{}) *sql.Row +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx *sql.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/internal/db/network/models.go b/internal/db/network/models.go new file mode 100644 index 0000000..b61d9fc --- /dev/null +++ b/internal/db/network/models.go @@ -0,0 +1,96 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package network + +import ( + "database/sql" + "time" +) + +type Asset struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Name string `json:"name"` + Symbol string `json:"symbol"` + Decimals int64 `json:"decimals"` + ChainID string `json:"chain_id"` + Channel string `json:"channel"` + AssetType string `json:"asset_type"` + CoingeckoID sql.NullString `json:"coingecko_id"` +} + +type Blockchain struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + ChainName string `json:"chain_name"` + ChainIDCosmos sql.NullString `json:"chain_id_cosmos"` + ChainIDEvm sql.NullString `json:"chain_id_evm"` + ApiName sql.NullString `json:"api_name"` + BechAccountPrefix sql.NullString `json:"bech_account_prefix"` + BechValidatorPrefix sql.NullString `json:"bech_validator_prefix"` + MainAssetSymbol sql.NullString `json:"main_asset_symbol"` + MainAssetDenom sql.NullString `json:"main_asset_denom"` + StakingAssetSymbol sql.NullString `json:"staking_asset_symbol"` + StakingAssetDenom sql.NullString `json:"staking_asset_denom"` + IsStakeEnabled bool `json:"is_stake_enabled"` + ChainImage sql.NullString `json:"chain_image"` + MainAssetImage sql.NullString `json:"main_asset_image"` + StakingAssetImage sql.NullString `json:"staking_asset_image"` + ChainType string `json:"chain_type"` + IsSupportMobileWallet bool `json:"is_support_mobile_wallet"` + IsSupportExtensionWallet bool `json:"is_support_extension_wallet"` + IsSupportErc20 bool `json:"is_support_erc20"` + DescriptionEn sql.NullString `json:"description_en"` + DescriptionKo sql.NullString `json:"description_ko"` + DescriptionJa sql.NullString `json:"description_ja"` + OriginGenesisTime sql.NullTime `json:"origin_genesis_time"` + AccountType string `json:"account_type"` + BtcStaking sql.NullString `json:"btc_staking"` + CosmosFeeInfo sql.NullString `json:"cosmos_fee_info"` + EvmFeeInfo sql.NullString `json:"evm_fee_info"` + LcdEndpoint sql.NullString `json:"lcd_endpoint"` + GrpcEndpoint sql.NullString `json:"grpc_endpoint"` + EvmRpcEndpoint sql.NullString `json:"evm_rpc_endpoint"` + Explorer sql.NullString `json:"explorer"` + About sql.NullString `json:"about"` + Forum sql.NullString `json:"forum"` +} + +type Price struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + AssetID string `json:"asset_id"` + PriceUsd sql.NullFloat64 `json:"price_usd"` + PriceBtc sql.NullFloat64 `json:"price_btc"` + Volume24hUsd sql.NullFloat64 `json:"volume_24h_usd"` + MarketCapUsd sql.NullFloat64 `json:"market_cap_usd"` + AvailableSupply sql.NullFloat64 `json:"available_supply"` + TotalSupply sql.NullFloat64 `json:"total_supply"` + MaxSupply sql.NullFloat64 `json:"max_supply"` + PercentChange1h sql.NullFloat64 `json:"percent_change_1h"` + PercentChange24h sql.NullFloat64 `json:"percent_change_24h"` + PercentChange7d sql.NullFloat64 `json:"percent_change_7d"` + Rank sql.NullInt64 `json:"rank"` + LastUpdated time.Time `json:"last_updated"` +} + +type PriceConversion struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + PriceID string `json:"price_id"` + CurrencyCode string `json:"currency_code"` + Price sql.NullFloat64 `json:"price"` + Volume24h sql.NullFloat64 `json:"volume_24h"` + MarketCap sql.NullFloat64 `json:"market_cap"` + LastUpdated time.Time `json:"last_updated"` +} diff --git a/internal/db/network/querier.go b/internal/db/network/querier.go new file mode 100644 index 0000000..61a3a06 --- /dev/null +++ b/internal/db/network/querier.go @@ -0,0 +1,64 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package network + +import ( + "context" + "database/sql" +) + +type Querier interface { + CountBlockchainsByChainType(ctx context.Context, dollar_1 sql.NullString) (int64, error) + GetAssetByChainAndSymbol(ctx context.Context, arg GetAssetByChainAndSymbolParams) (Asset, error) + GetAssetByID(ctx context.Context, id string) (Asset, error) + GetAssetBySymbol(ctx context.Context, symbol string) (Asset, error) + GetAssetWithLatestPrice(ctx context.Context, id string) (GetAssetWithLatestPriceRow, error) + GetBlockchainByChainName(ctx context.Context, chainName string) (Blockchain, error) + GetBlockchainByCosmosChainID(ctx context.Context, chainIDCosmos sql.NullString) (Blockchain, error) + GetBlockchainByEvmChainID(ctx context.Context, chainIDEvm sql.NullString) (Blockchain, error) + GetBlockchainByID(ctx context.Context, id string) (Blockchain, error) + GetBlockchainEndpoints(ctx context.Context, id string) (GetBlockchainEndpointsRow, error) + GetBlockchainExplorer(ctx context.Context, id string) (GetBlockchainExplorerRow, error) + GetBlockchainWithAssetInfo(ctx context.Context, id string) (GetBlockchainWithAssetInfoRow, error) + GetPriceByAssetID(ctx context.Context, assetID string) (Price, error) + GetPriceByID(ctx context.Context, id string) (Price, error) + GetPriceConversionByCurrency(ctx context.Context, arg GetPriceConversionByCurrencyParams) (PriceConversion, error) + GetPriceConversionByID(ctx context.Context, id string) (PriceConversion, error) + GetPriceConversionsByPriceID(ctx context.Context, priceID string) ([]PriceConversion, error) + // ASSET QUERIES + InsertAsset(ctx context.Context, arg InsertAssetParams) (Asset, error) + // BLOCKCHAIN QUERIES + InsertBlockchain(ctx context.Context, arg InsertBlockchainParams) (Blockchain, error) + // PRICE QUERIES (UPDATED) + InsertPrice(ctx context.Context, arg InsertPriceParams) (Price, error) + // PRICE CONVERSION QUERIES (NEW) + InsertPriceConversion(ctx context.Context, arg InsertPriceConversionParams) (PriceConversion, error) + ListAllBlockchains(ctx context.Context) ([]Blockchain, error) + ListAssetsByChain(ctx context.Context, chainID string) ([]Asset, error) + ListAssetsWithLatestPrices(ctx context.Context, arg ListAssetsWithLatestPricesParams) ([]ListAssetsWithLatestPricesRow, error) + ListBlockchainsByChainType(ctx context.Context, dollar_1 sql.NullString) ([]Blockchain, error) + ListBlockchainsWithAssetInfo(ctx context.Context, arg ListBlockchainsWithAssetInfoParams) ([]ListBlockchainsWithAssetInfoRow, error) + ListBlockchainsWithERC20Support(ctx context.Context) ([]Blockchain, error) + ListBlockchainsWithExtensionSupport(ctx context.Context) ([]Blockchain, error) + ListBlockchainsWithMobileSupport(ctx context.Context) ([]Blockchain, error) + ListBlockchainsWithStaking(ctx context.Context) ([]Blockchain, error) + ListPriceHistoryByAssetID(ctx context.Context, arg ListPriceHistoryByAssetIDParams) ([]Price, error) + SearchBlockchains(ctx context.Context, arg SearchBlockchainsParams) ([]Blockchain, error) + SoftDeleteAsset(ctx context.Context, id string) error + SoftDeleteBlockchain(ctx context.Context, id string) error + SoftDeletePriceConversion(ctx context.Context, id string) error + UpdateAsset(ctx context.Context, arg UpdateAssetParams) (Asset, error) + UpdateBlockchain(ctx context.Context, arg UpdateBlockchainParams) (Blockchain, error) + UpdateBlockchainDescriptions(ctx context.Context, arg UpdateBlockchainDescriptionsParams) (Blockchain, error) + UpdateBlockchainEndpoints(ctx context.Context, arg UpdateBlockchainEndpointsParams) (Blockchain, error) + UpdateBlockchainExplorer(ctx context.Context, arg UpdateBlockchainExplorerParams) (Blockchain, error) + UpdateBlockchainFeeInfo(ctx context.Context, arg UpdateBlockchainFeeInfoParams) (Blockchain, error) + UpdateBlockchainImages(ctx context.Context, arg UpdateBlockchainImagesParams) (Blockchain, error) + UpdateBlockchainSocialLinks(ctx context.Context, arg UpdateBlockchainSocialLinksParams) (Blockchain, error) + UpdatePrice(ctx context.Context, arg UpdatePriceParams) (Price, error) + UpdatePriceConversion(ctx context.Context, arg UpdatePriceConversionParams) (PriceConversion, error) +} + +var _ Querier = (*Queries)(nil) diff --git a/internal/db/network/query.sql b/internal/db/network/query.sql new file mode 100644 index 0000000..b228615 --- /dev/null +++ b/internal/db/network/query.sql @@ -0,0 +1,453 @@ +-- ASSET QUERIES +-- name: InsertAsset :one +INSERT INTO assets ( + name, + symbol, + decimals, + chain_id, + channel, + asset_type, + coingecko_id +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetAssetByID :one +SELECT * FROM assets +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetAssetBySymbol :one +SELECT * FROM assets +WHERE symbol = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetAssetByChainAndSymbol :one +SELECT * FROM assets +WHERE chain_id = ? AND symbol = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: ListAssetsByChain :many +SELECT * FROM assets +WHERE chain_id = ? AND deleted_at IS NULL +ORDER BY symbol ASC; + +-- name: UpdateAsset :one +UPDATE assets +SET + name = ?, + decimals = ?, + channel = ?, + asset_type = ?, + coingecko_id = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteAsset :exec +UPDATE assets +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- PRICE QUERIES (UPDATED) +-- name: InsertPrice :one +INSERT INTO prices ( + asset_id, + price_usd, + price_btc, + volume_24h_usd, + market_cap_usd, + available_supply, + total_supply, + max_supply, + percent_change_1h, + percent_change_24h, + percent_change_7d, + rank, + last_updated +) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetPriceByAssetID :one +SELECT * FROM prices +WHERE asset_id = ? AND deleted_at IS NULL +ORDER BY last_updated DESC +LIMIT 1; + +-- name: GetPriceByID :one +SELECT * FROM prices +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: ListPriceHistoryByAssetID :many +SELECT * FROM prices +WHERE asset_id = ? AND deleted_at IS NULL +ORDER BY last_updated DESC +LIMIT ? OFFSET ?; + +-- name: GetAssetWithLatestPrice :one +SELECT a.*, p.price_usd, p.price_btc, p.volume_24h_usd, p.market_cap_usd, + p.available_supply, p.total_supply, p.max_supply, + p.percent_change_1h, p.percent_change_24h, p.percent_change_7d, + p.rank, p.last_updated +FROM assets a +LEFT JOIN ( + SELECT p1.* + FROM prices p1 + INNER JOIN ( + SELECT asset_id, MAX(last_updated) as max_date + FROM prices + WHERE deleted_at IS NULL + GROUP BY asset_id + ) p2 ON p1.asset_id = p2.asset_id AND p1.last_updated = p2.max_date + WHERE p1.deleted_at IS NULL +) p ON a.id = p.asset_id +WHERE a.id = ? AND a.deleted_at IS NULL +LIMIT 1; + +-- name: ListAssetsWithLatestPrices :many +SELECT a.*, p.price_usd, p.price_btc, p.volume_24h_usd, p.market_cap_usd, + p.available_supply, p.total_supply, p.max_supply, + p.percent_change_1h, p.percent_change_24h, p.percent_change_7d, + p.rank, p.last_updated +FROM assets a +LEFT JOIN ( + SELECT p1.* + FROM prices p1 + INNER JOIN ( + SELECT asset_id, MAX(last_updated) as max_date + FROM prices + WHERE deleted_at IS NULL + GROUP BY asset_id + ) p2 ON p1.asset_id = p2.asset_id AND p1.last_updated = p2.max_date + WHERE p1.deleted_at IS NULL +) p ON a.id = p.asset_id +WHERE a.deleted_at IS NULL +ORDER BY p.rank ASC, a.symbol ASC +LIMIT ? OFFSET ?; + +-- name: UpdatePrice :one +UPDATE prices +SET + price_usd = ?, + price_btc = ?, + volume_24h_usd = ?, + market_cap_usd = ?, + available_supply = ?, + total_supply = ?, + max_supply = ?, + percent_change_1h = ?, + percent_change_24h = ?, + percent_change_7d = ?, + rank = ?, + last_updated = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- PRICE CONVERSION QUERIES (NEW) +-- name: InsertPriceConversion :one +INSERT INTO price_conversions ( + price_id, + currency_code, + price, + volume_24h, + market_cap, + last_updated +) VALUES (?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetPriceConversionByID :one +SELECT * FROM price_conversions +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetPriceConversionsByPriceID :many +SELECT * FROM price_conversions +WHERE price_id = ? AND deleted_at IS NULL +ORDER BY currency_code ASC; + +-- name: GetPriceConversionByCurrency :one +SELECT * FROM price_conversions +WHERE price_id = ? AND currency_code = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: UpdatePriceConversion :one +UPDATE price_conversions +SET + price = ?, + volume_24h = ?, + market_cap = ?, + last_updated = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeletePriceConversion :exec +UPDATE price_conversions +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- BLOCKCHAIN QUERIES +-- name: InsertBlockchain :one +INSERT INTO blockchains ( + id, + chain_name, + chain_id_cosmos, + chain_id_evm, + api_name, + bech_account_prefix, + bech_validator_prefix, + main_asset_symbol, + main_asset_denom, + staking_asset_symbol, + staking_asset_denom, + is_stake_enabled, + chain_image, + main_asset_image, + staking_asset_image, + chain_type, + is_support_mobile_wallet, + is_support_extension_wallet, + is_support_erc20, + description_en, + description_ko, + description_ja, + origin_genesis_time, + account_type, + btc_staking, + cosmos_fee_info, + evm_fee_info, + lcd_endpoint, + grpc_endpoint, + evm_rpc_endpoint, + explorer, + about, + forum +) VALUES ( + ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? +) +RETURNING *; + +-- name: GetBlockchainByID :one +SELECT * FROM blockchains +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetBlockchainByChainName :one +SELECT * FROM blockchains +WHERE chain_name = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetBlockchainByCosmosChainID :one +SELECT * FROM blockchains +WHERE chain_id_cosmos = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetBlockchainByEvmChainID :one +SELECT * FROM blockchains +WHERE chain_id_evm = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: ListAllBlockchains :many +SELECT * FROM blockchains +WHERE deleted_at IS NULL +ORDER BY chain_name ASC; + +-- name: ListBlockchainsByChainType :many +SELECT * FROM blockchains +WHERE chain_type LIKE '%' || ? || '%' AND deleted_at IS NULL +ORDER BY chain_name ASC; + +-- name: ListBlockchainsWithStaking :many +SELECT * FROM blockchains +WHERE is_stake_enabled = 1 AND deleted_at IS NULL +ORDER BY chain_name ASC; + +-- name: ListBlockchainsWithMobileSupport :many +SELECT * FROM blockchains +WHERE is_support_mobile_wallet = 1 AND deleted_at IS NULL +ORDER BY chain_name ASC; + +-- name: ListBlockchainsWithExtensionSupport :many +SELECT * FROM blockchains +WHERE is_support_extension_wallet = 1 AND deleted_at IS NULL +ORDER BY chain_name ASC; + +-- name: ListBlockchainsWithERC20Support :many +SELECT * FROM blockchains +WHERE is_support_erc20 = 1 AND deleted_at IS NULL +ORDER BY chain_name ASC; + +-- name: UpdateBlockchain :one +UPDATE blockchains +SET + chain_name = ?, + chain_id_cosmos = ?, + chain_id_evm = ?, + api_name = ?, + bech_account_prefix = ?, + bech_validator_prefix = ?, + main_asset_symbol = ?, + main_asset_denom = ?, + staking_asset_symbol = ?, + staking_asset_denom = ?, + is_stake_enabled = ?, + chain_image = ?, + main_asset_image = ?, + staking_asset_image = ?, + chain_type = ?, + is_support_mobile_wallet = ?, + is_support_extension_wallet = ?, + is_support_erc20 = ?, + description_en = ?, + description_ko = ?, + description_ja = ?, + origin_genesis_time = ?, + account_type = ?, + btc_staking = ?, + cosmos_fee_info = ?, + evm_fee_info = ?, + lcd_endpoint = ?, + grpc_endpoint = ?, + evm_rpc_endpoint = ?, + explorer = ?, + about = ?, + forum = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: UpdateBlockchainEndpoints :one +UPDATE blockchains +SET + lcd_endpoint = ?, + grpc_endpoint = ?, + evm_rpc_endpoint = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: UpdateBlockchainExplorer :one +UPDATE blockchains +SET + explorer = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: UpdateBlockchainFeeInfo :one +UPDATE blockchains +SET + cosmos_fee_info = ?, + evm_fee_info = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: UpdateBlockchainImages :one +UPDATE blockchains +SET + chain_image = ?, + main_asset_image = ?, + staking_asset_image = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: UpdateBlockchainDescriptions :one +UPDATE blockchains +SET + description_en = ?, + description_ko = ?, + description_ja = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: UpdateBlockchainSocialLinks :one +UPDATE blockchains +SET + about = ?, + forum = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteBlockchain :exec +UPDATE blockchains +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- name: GetBlockchainWithAssetInfo :one +SELECT b.*, a.id as asset_id, a.symbol, a.decimals, p.price_usd, p.price_btc +FROM blockchains b +LEFT JOIN assets a ON b.main_asset_symbol = a.symbol +LEFT JOIN ( + SELECT p1.* + FROM prices p1 + INNER JOIN ( + SELECT asset_id, MAX(last_updated) as max_date + FROM prices + WHERE deleted_at IS NULL + GROUP BY asset_id + ) p2 ON p1.asset_id = p2.asset_id AND p1.last_updated = p2.max_date + WHERE p1.deleted_at IS NULL +) p ON a.id = p.asset_id +WHERE b.id = ? AND b.deleted_at IS NULL +LIMIT 1; + +-- name: ListBlockchainsWithAssetInfo :many +SELECT b.*, a.id as asset_id, a.symbol, a.decimals, p.price_usd, p.price_btc +FROM blockchains b +LEFT JOIN assets a ON b.main_asset_symbol = a.symbol +LEFT JOIN ( + SELECT p1.* + FROM prices p1 + INNER JOIN ( + SELECT asset_id, MAX(last_updated) as max_date + FROM prices + WHERE deleted_at IS NULL + GROUP BY asset_id + ) p2 ON p1.asset_id = p2.asset_id AND p1.last_updated = p2.max_date + WHERE p1.deleted_at IS NULL +) p ON a.id = p.asset_id +WHERE b.deleted_at IS NULL +ORDER BY b.chain_name ASC +LIMIT ? OFFSET ?; + +-- name: SearchBlockchains :many +SELECT * FROM blockchains +WHERE ( + chain_name LIKE '%' || ? || '%' OR + main_asset_symbol LIKE '%' || ? || '%' OR + staking_asset_symbol LIKE '%' || ? || '%' OR + description_en LIKE '%' || ? || '%' +) AND deleted_at IS NULL +ORDER BY chain_name ASC +LIMIT ? OFFSET ?; + +-- name: CountBlockchainsByChainType :one +SELECT COUNT(*) as count FROM blockchains +WHERE chain_type LIKE '%' || ? || '%' AND deleted_at IS NULL; + +-- name: GetBlockchainEndpoints :one +SELECT id, chain_name, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint +FROM blockchains +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetBlockchainExplorer :one +SELECT id, chain_name, explorer +FROM blockchains +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; diff --git a/internal/db/network/query.sql.go b/internal/db/network/query.sql.go new file mode 100644 index 0000000..ae29548 --- /dev/null +++ b/internal/db/network/query.sql.go @@ -0,0 +1,2583 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 +// source: query.sql + +package network + +import ( + "context" + "database/sql" + "time" +) + +const countBlockchainsByChainType = `-- name: CountBlockchainsByChainType :one +SELECT COUNT(*) as count FROM blockchains +WHERE chain_type LIKE '%' || ? || '%' AND deleted_at IS NULL +` + +func (q *Queries) CountBlockchainsByChainType(ctx context.Context, dollar_1 sql.NullString) (int64, error) { + row := q.db.QueryRowContext(ctx, countBlockchainsByChainType, dollar_1) + var count int64 + err := row.Scan(&count) + return count, err +} + +const getAssetByChainAndSymbol = `-- name: GetAssetByChainAndSymbol :one +SELECT id, created_at, updated_at, deleted_at, name, symbol, decimals, chain_id, channel, asset_type, coingecko_id FROM assets +WHERE chain_id = ? AND symbol = ? AND deleted_at IS NULL +LIMIT 1 +` + +type GetAssetByChainAndSymbolParams struct { + ChainID string `json:"chain_id"` + Symbol string `json:"symbol"` +} + +func (q *Queries) GetAssetByChainAndSymbol(ctx context.Context, arg GetAssetByChainAndSymbolParams) (Asset, error) { + row := q.db.QueryRowContext(ctx, getAssetByChainAndSymbol, arg.ChainID, arg.Symbol) + var i Asset + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Symbol, + &i.Decimals, + &i.ChainID, + &i.Channel, + &i.AssetType, + &i.CoingeckoID, + ) + return i, err +} + +const getAssetByID = `-- name: GetAssetByID :one +SELECT id, created_at, updated_at, deleted_at, name, symbol, decimals, chain_id, channel, asset_type, coingecko_id FROM assets +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetAssetByID(ctx context.Context, id string) (Asset, error) { + row := q.db.QueryRowContext(ctx, getAssetByID, id) + var i Asset + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Symbol, + &i.Decimals, + &i.ChainID, + &i.Channel, + &i.AssetType, + &i.CoingeckoID, + ) + return i, err +} + +const getAssetBySymbol = `-- name: GetAssetBySymbol :one +SELECT id, created_at, updated_at, deleted_at, name, symbol, decimals, chain_id, channel, asset_type, coingecko_id FROM assets +WHERE symbol = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetAssetBySymbol(ctx context.Context, symbol string) (Asset, error) { + row := q.db.QueryRowContext(ctx, getAssetBySymbol, symbol) + var i Asset + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Symbol, + &i.Decimals, + &i.ChainID, + &i.Channel, + &i.AssetType, + &i.CoingeckoID, + ) + return i, err +} + +const getAssetWithLatestPrice = `-- name: GetAssetWithLatestPrice :one +SELECT a.id, a.created_at, a.updated_at, a.deleted_at, a.name, a.symbol, a.decimals, a.chain_id, a.channel, a.asset_type, a.coingecko_id, p.price_usd, p.price_btc, p.volume_24h_usd, p.market_cap_usd, + p.available_supply, p.total_supply, p.max_supply, + p.percent_change_1h, p.percent_change_24h, p.percent_change_7d, + p.rank, p.last_updated +FROM assets a +LEFT JOIN ( + SELECT p1.id, p1.created_at, p1.updated_at, p1.deleted_at, p1.asset_id, p1.price_usd, p1.price_btc, p1.volume_24h_usd, p1.market_cap_usd, p1.available_supply, p1.total_supply, p1.max_supply, p1.percent_change_1h, p1.percent_change_24h, p1.percent_change_7d, p1.rank, p1.last_updated + FROM prices p1 + INNER JOIN ( + SELECT asset_id, MAX(last_updated) as max_date + FROM prices + WHERE deleted_at IS NULL + GROUP BY asset_id + ) p2 ON p1.asset_id = p2.asset_id AND p1.last_updated = p2.max_date + WHERE p1.deleted_at IS NULL +) p ON a.id = p.asset_id +WHERE a.id = ? AND a.deleted_at IS NULL +LIMIT 1 +` + +type GetAssetWithLatestPriceRow struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Name string `json:"name"` + Symbol string `json:"symbol"` + Decimals int64 `json:"decimals"` + ChainID string `json:"chain_id"` + Channel string `json:"channel"` + AssetType string `json:"asset_type"` + CoingeckoID sql.NullString `json:"coingecko_id"` + PriceUsd sql.NullFloat64 `json:"price_usd"` + PriceBtc sql.NullFloat64 `json:"price_btc"` + Volume24hUsd sql.NullFloat64 `json:"volume_24h_usd"` + MarketCapUsd sql.NullFloat64 `json:"market_cap_usd"` + AvailableSupply sql.NullFloat64 `json:"available_supply"` + TotalSupply sql.NullFloat64 `json:"total_supply"` + MaxSupply sql.NullFloat64 `json:"max_supply"` + PercentChange1h sql.NullFloat64 `json:"percent_change_1h"` + PercentChange24h sql.NullFloat64 `json:"percent_change_24h"` + PercentChange7d sql.NullFloat64 `json:"percent_change_7d"` + Rank sql.NullInt64 `json:"rank"` + LastUpdated time.Time `json:"last_updated"` +} + +func (q *Queries) GetAssetWithLatestPrice(ctx context.Context, id string) (GetAssetWithLatestPriceRow, error) { + row := q.db.QueryRowContext(ctx, getAssetWithLatestPrice, id) + var i GetAssetWithLatestPriceRow + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Symbol, + &i.Decimals, + &i.ChainID, + &i.Channel, + &i.AssetType, + &i.CoingeckoID, + &i.PriceUsd, + &i.PriceBtc, + &i.Volume24hUsd, + &i.MarketCapUsd, + &i.AvailableSupply, + &i.TotalSupply, + &i.MaxSupply, + &i.PercentChange1h, + &i.PercentChange24h, + &i.PercentChange7d, + &i.Rank, + &i.LastUpdated, + ) + return i, err +} + +const getBlockchainByChainName = `-- name: GetBlockchainByChainName :one +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE chain_name = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetBlockchainByChainName(ctx context.Context, chainName string) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, getBlockchainByChainName, chainName) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const getBlockchainByCosmosChainID = `-- name: GetBlockchainByCosmosChainID :one +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE chain_id_cosmos = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetBlockchainByCosmosChainID(ctx context.Context, chainIDCosmos sql.NullString) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, getBlockchainByCosmosChainID, chainIDCosmos) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const getBlockchainByEvmChainID = `-- name: GetBlockchainByEvmChainID :one +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE chain_id_evm = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetBlockchainByEvmChainID(ctx context.Context, chainIDEvm sql.NullString) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, getBlockchainByEvmChainID, chainIDEvm) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const getBlockchainByID = `-- name: GetBlockchainByID :one +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetBlockchainByID(ctx context.Context, id string) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, getBlockchainByID, id) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const getBlockchainEndpoints = `-- name: GetBlockchainEndpoints :one +SELECT id, chain_name, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint +FROM blockchains +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +type GetBlockchainEndpointsRow struct { + ID string `json:"id"` + ChainName string `json:"chain_name"` + LcdEndpoint sql.NullString `json:"lcd_endpoint"` + GrpcEndpoint sql.NullString `json:"grpc_endpoint"` + EvmRpcEndpoint sql.NullString `json:"evm_rpc_endpoint"` +} + +func (q *Queries) GetBlockchainEndpoints(ctx context.Context, id string) (GetBlockchainEndpointsRow, error) { + row := q.db.QueryRowContext(ctx, getBlockchainEndpoints, id) + var i GetBlockchainEndpointsRow + err := row.Scan( + &i.ID, + &i.ChainName, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + ) + return i, err +} + +const getBlockchainExplorer = `-- name: GetBlockchainExplorer :one +SELECT id, chain_name, explorer +FROM blockchains +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +type GetBlockchainExplorerRow struct { + ID string `json:"id"` + ChainName string `json:"chain_name"` + Explorer sql.NullString `json:"explorer"` +} + +func (q *Queries) GetBlockchainExplorer(ctx context.Context, id string) (GetBlockchainExplorerRow, error) { + row := q.db.QueryRowContext(ctx, getBlockchainExplorer, id) + var i GetBlockchainExplorerRow + err := row.Scan(&i.ID, &i.ChainName, &i.Explorer) + return i, err +} + +const getBlockchainWithAssetInfo = `-- name: GetBlockchainWithAssetInfo :one +SELECT b.id, b.created_at, b.updated_at, b.deleted_at, b.chain_name, b.chain_id_cosmos, b.chain_id_evm, b.api_name, b.bech_account_prefix, b.bech_validator_prefix, b.main_asset_symbol, b.main_asset_denom, b.staking_asset_symbol, b.staking_asset_denom, b.is_stake_enabled, b.chain_image, b.main_asset_image, b.staking_asset_image, b.chain_type, b.is_support_mobile_wallet, b.is_support_extension_wallet, b.is_support_erc20, b.description_en, b.description_ko, b.description_ja, b.origin_genesis_time, b.account_type, b.btc_staking, b.cosmos_fee_info, b.evm_fee_info, b.lcd_endpoint, b.grpc_endpoint, b.evm_rpc_endpoint, b.explorer, b.about, b.forum, a.id as asset_id, a.symbol, a.decimals, p.price_usd, p.price_btc +FROM blockchains b +LEFT JOIN assets a ON b.main_asset_symbol = a.symbol +LEFT JOIN ( + SELECT p1.id, p1.created_at, p1.updated_at, p1.deleted_at, p1.asset_id, p1.price_usd, p1.price_btc, p1.volume_24h_usd, p1.market_cap_usd, p1.available_supply, p1.total_supply, p1.max_supply, p1.percent_change_1h, p1.percent_change_24h, p1.percent_change_7d, p1.rank, p1.last_updated + FROM prices p1 + INNER JOIN ( + SELECT asset_id, MAX(last_updated) as max_date + FROM prices + WHERE deleted_at IS NULL + GROUP BY asset_id + ) p2 ON p1.asset_id = p2.asset_id AND p1.last_updated = p2.max_date + WHERE p1.deleted_at IS NULL +) p ON a.id = p.asset_id +WHERE b.id = ? AND b.deleted_at IS NULL +LIMIT 1 +` + +type GetBlockchainWithAssetInfoRow struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + ChainName string `json:"chain_name"` + ChainIDCosmos sql.NullString `json:"chain_id_cosmos"` + ChainIDEvm sql.NullString `json:"chain_id_evm"` + ApiName sql.NullString `json:"api_name"` + BechAccountPrefix sql.NullString `json:"bech_account_prefix"` + BechValidatorPrefix sql.NullString `json:"bech_validator_prefix"` + MainAssetSymbol sql.NullString `json:"main_asset_symbol"` + MainAssetDenom sql.NullString `json:"main_asset_denom"` + StakingAssetSymbol sql.NullString `json:"staking_asset_symbol"` + StakingAssetDenom sql.NullString `json:"staking_asset_denom"` + IsStakeEnabled bool `json:"is_stake_enabled"` + ChainImage sql.NullString `json:"chain_image"` + MainAssetImage sql.NullString `json:"main_asset_image"` + StakingAssetImage sql.NullString `json:"staking_asset_image"` + ChainType string `json:"chain_type"` + IsSupportMobileWallet bool `json:"is_support_mobile_wallet"` + IsSupportExtensionWallet bool `json:"is_support_extension_wallet"` + IsSupportErc20 bool `json:"is_support_erc20"` + DescriptionEn sql.NullString `json:"description_en"` + DescriptionKo sql.NullString `json:"description_ko"` + DescriptionJa sql.NullString `json:"description_ja"` + OriginGenesisTime sql.NullTime `json:"origin_genesis_time"` + AccountType string `json:"account_type"` + BtcStaking sql.NullString `json:"btc_staking"` + CosmosFeeInfo sql.NullString `json:"cosmos_fee_info"` + EvmFeeInfo sql.NullString `json:"evm_fee_info"` + LcdEndpoint sql.NullString `json:"lcd_endpoint"` + GrpcEndpoint sql.NullString `json:"grpc_endpoint"` + EvmRpcEndpoint sql.NullString `json:"evm_rpc_endpoint"` + Explorer sql.NullString `json:"explorer"` + About sql.NullString `json:"about"` + Forum sql.NullString `json:"forum"` + AssetID sql.NullString `json:"asset_id"` + Symbol sql.NullString `json:"symbol"` + Decimals sql.NullInt64 `json:"decimals"` + PriceUsd sql.NullFloat64 `json:"price_usd"` + PriceBtc sql.NullFloat64 `json:"price_btc"` +} + +func (q *Queries) GetBlockchainWithAssetInfo(ctx context.Context, id string) (GetBlockchainWithAssetInfoRow, error) { + row := q.db.QueryRowContext(ctx, getBlockchainWithAssetInfo, id) + var i GetBlockchainWithAssetInfoRow + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + &i.AssetID, + &i.Symbol, + &i.Decimals, + &i.PriceUsd, + &i.PriceBtc, + ) + return i, err +} + +const getPriceByAssetID = `-- name: GetPriceByAssetID :one +SELECT id, created_at, updated_at, deleted_at, asset_id, price_usd, price_btc, volume_24h_usd, market_cap_usd, available_supply, total_supply, max_supply, percent_change_1h, percent_change_24h, percent_change_7d, rank, last_updated FROM prices +WHERE asset_id = ? AND deleted_at IS NULL +ORDER BY last_updated DESC +LIMIT 1 +` + +func (q *Queries) GetPriceByAssetID(ctx context.Context, assetID string) (Price, error) { + row := q.db.QueryRowContext(ctx, getPriceByAssetID, assetID) + var i Price + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AssetID, + &i.PriceUsd, + &i.PriceBtc, + &i.Volume24hUsd, + &i.MarketCapUsd, + &i.AvailableSupply, + &i.TotalSupply, + &i.MaxSupply, + &i.PercentChange1h, + &i.PercentChange24h, + &i.PercentChange7d, + &i.Rank, + &i.LastUpdated, + ) + return i, err +} + +const getPriceByID = `-- name: GetPriceByID :one +SELECT id, created_at, updated_at, deleted_at, asset_id, price_usd, price_btc, volume_24h_usd, market_cap_usd, available_supply, total_supply, max_supply, percent_change_1h, percent_change_24h, percent_change_7d, rank, last_updated FROM prices +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetPriceByID(ctx context.Context, id string) (Price, error) { + row := q.db.QueryRowContext(ctx, getPriceByID, id) + var i Price + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AssetID, + &i.PriceUsd, + &i.PriceBtc, + &i.Volume24hUsd, + &i.MarketCapUsd, + &i.AvailableSupply, + &i.TotalSupply, + &i.MaxSupply, + &i.PercentChange1h, + &i.PercentChange24h, + &i.PercentChange7d, + &i.Rank, + &i.LastUpdated, + ) + return i, err +} + +const getPriceConversionByCurrency = `-- name: GetPriceConversionByCurrency :one +SELECT id, created_at, updated_at, deleted_at, price_id, currency_code, price, volume_24h, market_cap, last_updated FROM price_conversions +WHERE price_id = ? AND currency_code = ? AND deleted_at IS NULL +LIMIT 1 +` + +type GetPriceConversionByCurrencyParams struct { + PriceID string `json:"price_id"` + CurrencyCode string `json:"currency_code"` +} + +func (q *Queries) GetPriceConversionByCurrency(ctx context.Context, arg GetPriceConversionByCurrencyParams) (PriceConversion, error) { + row := q.db.QueryRowContext(ctx, getPriceConversionByCurrency, arg.PriceID, arg.CurrencyCode) + var i PriceConversion + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.PriceID, + &i.CurrencyCode, + &i.Price, + &i.Volume24h, + &i.MarketCap, + &i.LastUpdated, + ) + return i, err +} + +const getPriceConversionByID = `-- name: GetPriceConversionByID :one +SELECT id, created_at, updated_at, deleted_at, price_id, currency_code, price, volume_24h, market_cap, last_updated FROM price_conversions +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetPriceConversionByID(ctx context.Context, id string) (PriceConversion, error) { + row := q.db.QueryRowContext(ctx, getPriceConversionByID, id) + var i PriceConversion + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.PriceID, + &i.CurrencyCode, + &i.Price, + &i.Volume24h, + &i.MarketCap, + &i.LastUpdated, + ) + return i, err +} + +const getPriceConversionsByPriceID = `-- name: GetPriceConversionsByPriceID :many +SELECT id, created_at, updated_at, deleted_at, price_id, currency_code, price, volume_24h, market_cap, last_updated FROM price_conversions +WHERE price_id = ? AND deleted_at IS NULL +ORDER BY currency_code ASC +` + +func (q *Queries) GetPriceConversionsByPriceID(ctx context.Context, priceID string) ([]PriceConversion, error) { + rows, err := q.db.QueryContext(ctx, getPriceConversionsByPriceID, priceID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []PriceConversion + for rows.Next() { + var i PriceConversion + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.PriceID, + &i.CurrencyCode, + &i.Price, + &i.Volume24h, + &i.MarketCap, + &i.LastUpdated, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const insertAsset = `-- name: InsertAsset :one +INSERT INTO assets ( + name, + symbol, + decimals, + chain_id, + channel, + asset_type, + coingecko_id +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, name, symbol, decimals, chain_id, channel, asset_type, coingecko_id +` + +type InsertAssetParams struct { + Name string `json:"name"` + Symbol string `json:"symbol"` + Decimals int64 `json:"decimals"` + ChainID string `json:"chain_id"` + Channel string `json:"channel"` + AssetType string `json:"asset_type"` + CoingeckoID sql.NullString `json:"coingecko_id"` +} + +// ASSET QUERIES +func (q *Queries) InsertAsset(ctx context.Context, arg InsertAssetParams) (Asset, error) { + row := q.db.QueryRowContext(ctx, insertAsset, + arg.Name, + arg.Symbol, + arg.Decimals, + arg.ChainID, + arg.Channel, + arg.AssetType, + arg.CoingeckoID, + ) + var i Asset + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Symbol, + &i.Decimals, + &i.ChainID, + &i.Channel, + &i.AssetType, + &i.CoingeckoID, + ) + return i, err +} + +const insertBlockchain = `-- name: InsertBlockchain :one +INSERT INTO blockchains ( + id, + chain_name, + chain_id_cosmos, + chain_id_evm, + api_name, + bech_account_prefix, + bech_validator_prefix, + main_asset_symbol, + main_asset_denom, + staking_asset_symbol, + staking_asset_denom, + is_stake_enabled, + chain_image, + main_asset_image, + staking_asset_image, + chain_type, + is_support_mobile_wallet, + is_support_extension_wallet, + is_support_erc20, + description_en, + description_ko, + description_ja, + origin_genesis_time, + account_type, + btc_staking, + cosmos_fee_info, + evm_fee_info, + lcd_endpoint, + grpc_endpoint, + evm_rpc_endpoint, + explorer, + about, + forum +) VALUES ( + ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? +) +RETURNING id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum +` + +type InsertBlockchainParams struct { + ID string `json:"id"` + ChainName string `json:"chain_name"` + ChainIDCosmos sql.NullString `json:"chain_id_cosmos"` + ChainIDEvm sql.NullString `json:"chain_id_evm"` + ApiName sql.NullString `json:"api_name"` + BechAccountPrefix sql.NullString `json:"bech_account_prefix"` + BechValidatorPrefix sql.NullString `json:"bech_validator_prefix"` + MainAssetSymbol sql.NullString `json:"main_asset_symbol"` + MainAssetDenom sql.NullString `json:"main_asset_denom"` + StakingAssetSymbol sql.NullString `json:"staking_asset_symbol"` + StakingAssetDenom sql.NullString `json:"staking_asset_denom"` + IsStakeEnabled bool `json:"is_stake_enabled"` + ChainImage sql.NullString `json:"chain_image"` + MainAssetImage sql.NullString `json:"main_asset_image"` + StakingAssetImage sql.NullString `json:"staking_asset_image"` + ChainType string `json:"chain_type"` + IsSupportMobileWallet bool `json:"is_support_mobile_wallet"` + IsSupportExtensionWallet bool `json:"is_support_extension_wallet"` + IsSupportErc20 bool `json:"is_support_erc20"` + DescriptionEn sql.NullString `json:"description_en"` + DescriptionKo sql.NullString `json:"description_ko"` + DescriptionJa sql.NullString `json:"description_ja"` + OriginGenesisTime sql.NullTime `json:"origin_genesis_time"` + AccountType string `json:"account_type"` + BtcStaking sql.NullString `json:"btc_staking"` + CosmosFeeInfo sql.NullString `json:"cosmos_fee_info"` + EvmFeeInfo sql.NullString `json:"evm_fee_info"` + LcdEndpoint sql.NullString `json:"lcd_endpoint"` + GrpcEndpoint sql.NullString `json:"grpc_endpoint"` + EvmRpcEndpoint sql.NullString `json:"evm_rpc_endpoint"` + Explorer sql.NullString `json:"explorer"` + About sql.NullString `json:"about"` + Forum sql.NullString `json:"forum"` +} + +// BLOCKCHAIN QUERIES +func (q *Queries) InsertBlockchain(ctx context.Context, arg InsertBlockchainParams) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, insertBlockchain, + arg.ID, + arg.ChainName, + arg.ChainIDCosmos, + arg.ChainIDEvm, + arg.ApiName, + arg.BechAccountPrefix, + arg.BechValidatorPrefix, + arg.MainAssetSymbol, + arg.MainAssetDenom, + arg.StakingAssetSymbol, + arg.StakingAssetDenom, + arg.IsStakeEnabled, + arg.ChainImage, + arg.MainAssetImage, + arg.StakingAssetImage, + arg.ChainType, + arg.IsSupportMobileWallet, + arg.IsSupportExtensionWallet, + arg.IsSupportErc20, + arg.DescriptionEn, + arg.DescriptionKo, + arg.DescriptionJa, + arg.OriginGenesisTime, + arg.AccountType, + arg.BtcStaking, + arg.CosmosFeeInfo, + arg.EvmFeeInfo, + arg.LcdEndpoint, + arg.GrpcEndpoint, + arg.EvmRpcEndpoint, + arg.Explorer, + arg.About, + arg.Forum, + ) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const insertPrice = `-- name: InsertPrice :one +INSERT INTO prices ( + asset_id, + price_usd, + price_btc, + volume_24h_usd, + market_cap_usd, + available_supply, + total_supply, + max_supply, + percent_change_1h, + percent_change_24h, + percent_change_7d, + rank, + last_updated +) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, asset_id, price_usd, price_btc, volume_24h_usd, market_cap_usd, available_supply, total_supply, max_supply, percent_change_1h, percent_change_24h, percent_change_7d, rank, last_updated +` + +type InsertPriceParams struct { + AssetID string `json:"asset_id"` + PriceUsd sql.NullFloat64 `json:"price_usd"` + PriceBtc sql.NullFloat64 `json:"price_btc"` + Volume24hUsd sql.NullFloat64 `json:"volume_24h_usd"` + MarketCapUsd sql.NullFloat64 `json:"market_cap_usd"` + AvailableSupply sql.NullFloat64 `json:"available_supply"` + TotalSupply sql.NullFloat64 `json:"total_supply"` + MaxSupply sql.NullFloat64 `json:"max_supply"` + PercentChange1h sql.NullFloat64 `json:"percent_change_1h"` + PercentChange24h sql.NullFloat64 `json:"percent_change_24h"` + PercentChange7d sql.NullFloat64 `json:"percent_change_7d"` + Rank sql.NullInt64 `json:"rank"` + LastUpdated time.Time `json:"last_updated"` +} + +// PRICE QUERIES (UPDATED) +func (q *Queries) InsertPrice(ctx context.Context, arg InsertPriceParams) (Price, error) { + row := q.db.QueryRowContext(ctx, insertPrice, + arg.AssetID, + arg.PriceUsd, + arg.PriceBtc, + arg.Volume24hUsd, + arg.MarketCapUsd, + arg.AvailableSupply, + arg.TotalSupply, + arg.MaxSupply, + arg.PercentChange1h, + arg.PercentChange24h, + arg.PercentChange7d, + arg.Rank, + arg.LastUpdated, + ) + var i Price + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AssetID, + &i.PriceUsd, + &i.PriceBtc, + &i.Volume24hUsd, + &i.MarketCapUsd, + &i.AvailableSupply, + &i.TotalSupply, + &i.MaxSupply, + &i.PercentChange1h, + &i.PercentChange24h, + &i.PercentChange7d, + &i.Rank, + &i.LastUpdated, + ) + return i, err +} + +const insertPriceConversion = `-- name: InsertPriceConversion :one +INSERT INTO price_conversions ( + price_id, + currency_code, + price, + volume_24h, + market_cap, + last_updated +) VALUES (?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, price_id, currency_code, price, volume_24h, market_cap, last_updated +` + +type InsertPriceConversionParams struct { + PriceID string `json:"price_id"` + CurrencyCode string `json:"currency_code"` + Price sql.NullFloat64 `json:"price"` + Volume24h sql.NullFloat64 `json:"volume_24h"` + MarketCap sql.NullFloat64 `json:"market_cap"` + LastUpdated time.Time `json:"last_updated"` +} + +// PRICE CONVERSION QUERIES (NEW) +func (q *Queries) InsertPriceConversion(ctx context.Context, arg InsertPriceConversionParams) (PriceConversion, error) { + row := q.db.QueryRowContext(ctx, insertPriceConversion, + arg.PriceID, + arg.CurrencyCode, + arg.Price, + arg.Volume24h, + arg.MarketCap, + arg.LastUpdated, + ) + var i PriceConversion + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.PriceID, + &i.CurrencyCode, + &i.Price, + &i.Volume24h, + &i.MarketCap, + &i.LastUpdated, + ) + return i, err +} + +const listAllBlockchains = `-- name: ListAllBlockchains :many +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE deleted_at IS NULL +ORDER BY chain_name ASC +` + +func (q *Queries) ListAllBlockchains(ctx context.Context) ([]Blockchain, error) { + rows, err := q.db.QueryContext(ctx, listAllBlockchains) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Blockchain + for rows.Next() { + var i Blockchain + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listAssetsByChain = `-- name: ListAssetsByChain :many +SELECT id, created_at, updated_at, deleted_at, name, symbol, decimals, chain_id, channel, asset_type, coingecko_id FROM assets +WHERE chain_id = ? AND deleted_at IS NULL +ORDER BY symbol ASC +` + +func (q *Queries) ListAssetsByChain(ctx context.Context, chainID string) ([]Asset, error) { + rows, err := q.db.QueryContext(ctx, listAssetsByChain, chainID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Asset + for rows.Next() { + var i Asset + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Symbol, + &i.Decimals, + &i.ChainID, + &i.Channel, + &i.AssetType, + &i.CoingeckoID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listAssetsWithLatestPrices = `-- name: ListAssetsWithLatestPrices :many +SELECT a.id, a.created_at, a.updated_at, a.deleted_at, a.name, a.symbol, a.decimals, a.chain_id, a.channel, a.asset_type, a.coingecko_id, p.price_usd, p.price_btc, p.volume_24h_usd, p.market_cap_usd, + p.available_supply, p.total_supply, p.max_supply, + p.percent_change_1h, p.percent_change_24h, p.percent_change_7d, + p.rank, p.last_updated +FROM assets a +LEFT JOIN ( + SELECT p1.id, p1.created_at, p1.updated_at, p1.deleted_at, p1.asset_id, p1.price_usd, p1.price_btc, p1.volume_24h_usd, p1.market_cap_usd, p1.available_supply, p1.total_supply, p1.max_supply, p1.percent_change_1h, p1.percent_change_24h, p1.percent_change_7d, p1.rank, p1.last_updated + FROM prices p1 + INNER JOIN ( + SELECT asset_id, MAX(last_updated) as max_date + FROM prices + WHERE deleted_at IS NULL + GROUP BY asset_id + ) p2 ON p1.asset_id = p2.asset_id AND p1.last_updated = p2.max_date + WHERE p1.deleted_at IS NULL +) p ON a.id = p.asset_id +WHERE a.deleted_at IS NULL +ORDER BY p.rank ASC, a.symbol ASC +LIMIT ? OFFSET ? +` + +type ListAssetsWithLatestPricesParams struct { + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +type ListAssetsWithLatestPricesRow struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Name string `json:"name"` + Symbol string `json:"symbol"` + Decimals int64 `json:"decimals"` + ChainID string `json:"chain_id"` + Channel string `json:"channel"` + AssetType string `json:"asset_type"` + CoingeckoID sql.NullString `json:"coingecko_id"` + PriceUsd sql.NullFloat64 `json:"price_usd"` + PriceBtc sql.NullFloat64 `json:"price_btc"` + Volume24hUsd sql.NullFloat64 `json:"volume_24h_usd"` + MarketCapUsd sql.NullFloat64 `json:"market_cap_usd"` + AvailableSupply sql.NullFloat64 `json:"available_supply"` + TotalSupply sql.NullFloat64 `json:"total_supply"` + MaxSupply sql.NullFloat64 `json:"max_supply"` + PercentChange1h sql.NullFloat64 `json:"percent_change_1h"` + PercentChange24h sql.NullFloat64 `json:"percent_change_24h"` + PercentChange7d sql.NullFloat64 `json:"percent_change_7d"` + Rank sql.NullInt64 `json:"rank"` + LastUpdated time.Time `json:"last_updated"` +} + +func (q *Queries) ListAssetsWithLatestPrices(ctx context.Context, arg ListAssetsWithLatestPricesParams) ([]ListAssetsWithLatestPricesRow, error) { + rows, err := q.db.QueryContext(ctx, listAssetsWithLatestPrices, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListAssetsWithLatestPricesRow + for rows.Next() { + var i ListAssetsWithLatestPricesRow + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Symbol, + &i.Decimals, + &i.ChainID, + &i.Channel, + &i.AssetType, + &i.CoingeckoID, + &i.PriceUsd, + &i.PriceBtc, + &i.Volume24hUsd, + &i.MarketCapUsd, + &i.AvailableSupply, + &i.TotalSupply, + &i.MaxSupply, + &i.PercentChange1h, + &i.PercentChange24h, + &i.PercentChange7d, + &i.Rank, + &i.LastUpdated, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listBlockchainsByChainType = `-- name: ListBlockchainsByChainType :many +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE chain_type LIKE '%' || ? || '%' AND deleted_at IS NULL +ORDER BY chain_name ASC +` + +func (q *Queries) ListBlockchainsByChainType(ctx context.Context, dollar_1 sql.NullString) ([]Blockchain, error) { + rows, err := q.db.QueryContext(ctx, listBlockchainsByChainType, dollar_1) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Blockchain + for rows.Next() { + var i Blockchain + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listBlockchainsWithAssetInfo = `-- name: ListBlockchainsWithAssetInfo :many +SELECT b.id, b.created_at, b.updated_at, b.deleted_at, b.chain_name, b.chain_id_cosmos, b.chain_id_evm, b.api_name, b.bech_account_prefix, b.bech_validator_prefix, b.main_asset_symbol, b.main_asset_denom, b.staking_asset_symbol, b.staking_asset_denom, b.is_stake_enabled, b.chain_image, b.main_asset_image, b.staking_asset_image, b.chain_type, b.is_support_mobile_wallet, b.is_support_extension_wallet, b.is_support_erc20, b.description_en, b.description_ko, b.description_ja, b.origin_genesis_time, b.account_type, b.btc_staking, b.cosmos_fee_info, b.evm_fee_info, b.lcd_endpoint, b.grpc_endpoint, b.evm_rpc_endpoint, b.explorer, b.about, b.forum, a.id as asset_id, a.symbol, a.decimals, p.price_usd, p.price_btc +FROM blockchains b +LEFT JOIN assets a ON b.main_asset_symbol = a.symbol +LEFT JOIN ( + SELECT p1.id, p1.created_at, p1.updated_at, p1.deleted_at, p1.asset_id, p1.price_usd, p1.price_btc, p1.volume_24h_usd, p1.market_cap_usd, p1.available_supply, p1.total_supply, p1.max_supply, p1.percent_change_1h, p1.percent_change_24h, p1.percent_change_7d, p1.rank, p1.last_updated + FROM prices p1 + INNER JOIN ( + SELECT asset_id, MAX(last_updated) as max_date + FROM prices + WHERE deleted_at IS NULL + GROUP BY asset_id + ) p2 ON p1.asset_id = p2.asset_id AND p1.last_updated = p2.max_date + WHERE p1.deleted_at IS NULL +) p ON a.id = p.asset_id +WHERE b.deleted_at IS NULL +ORDER BY b.chain_name ASC +LIMIT ? OFFSET ? +` + +type ListBlockchainsWithAssetInfoParams struct { + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +type ListBlockchainsWithAssetInfoRow struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + ChainName string `json:"chain_name"` + ChainIDCosmos sql.NullString `json:"chain_id_cosmos"` + ChainIDEvm sql.NullString `json:"chain_id_evm"` + ApiName sql.NullString `json:"api_name"` + BechAccountPrefix sql.NullString `json:"bech_account_prefix"` + BechValidatorPrefix sql.NullString `json:"bech_validator_prefix"` + MainAssetSymbol sql.NullString `json:"main_asset_symbol"` + MainAssetDenom sql.NullString `json:"main_asset_denom"` + StakingAssetSymbol sql.NullString `json:"staking_asset_symbol"` + StakingAssetDenom sql.NullString `json:"staking_asset_denom"` + IsStakeEnabled bool `json:"is_stake_enabled"` + ChainImage sql.NullString `json:"chain_image"` + MainAssetImage sql.NullString `json:"main_asset_image"` + StakingAssetImage sql.NullString `json:"staking_asset_image"` + ChainType string `json:"chain_type"` + IsSupportMobileWallet bool `json:"is_support_mobile_wallet"` + IsSupportExtensionWallet bool `json:"is_support_extension_wallet"` + IsSupportErc20 bool `json:"is_support_erc20"` + DescriptionEn sql.NullString `json:"description_en"` + DescriptionKo sql.NullString `json:"description_ko"` + DescriptionJa sql.NullString `json:"description_ja"` + OriginGenesisTime sql.NullTime `json:"origin_genesis_time"` + AccountType string `json:"account_type"` + BtcStaking sql.NullString `json:"btc_staking"` + CosmosFeeInfo sql.NullString `json:"cosmos_fee_info"` + EvmFeeInfo sql.NullString `json:"evm_fee_info"` + LcdEndpoint sql.NullString `json:"lcd_endpoint"` + GrpcEndpoint sql.NullString `json:"grpc_endpoint"` + EvmRpcEndpoint sql.NullString `json:"evm_rpc_endpoint"` + Explorer sql.NullString `json:"explorer"` + About sql.NullString `json:"about"` + Forum sql.NullString `json:"forum"` + AssetID sql.NullString `json:"asset_id"` + Symbol sql.NullString `json:"symbol"` + Decimals sql.NullInt64 `json:"decimals"` + PriceUsd sql.NullFloat64 `json:"price_usd"` + PriceBtc sql.NullFloat64 `json:"price_btc"` +} + +func (q *Queries) ListBlockchainsWithAssetInfo(ctx context.Context, arg ListBlockchainsWithAssetInfoParams) ([]ListBlockchainsWithAssetInfoRow, error) { + rows, err := q.db.QueryContext(ctx, listBlockchainsWithAssetInfo, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListBlockchainsWithAssetInfoRow + for rows.Next() { + var i ListBlockchainsWithAssetInfoRow + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + &i.AssetID, + &i.Symbol, + &i.Decimals, + &i.PriceUsd, + &i.PriceBtc, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listBlockchainsWithERC20Support = `-- name: ListBlockchainsWithERC20Support :many +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE is_support_erc20 = 1 AND deleted_at IS NULL +ORDER BY chain_name ASC +` + +func (q *Queries) ListBlockchainsWithERC20Support(ctx context.Context) ([]Blockchain, error) { + rows, err := q.db.QueryContext(ctx, listBlockchainsWithERC20Support) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Blockchain + for rows.Next() { + var i Blockchain + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listBlockchainsWithExtensionSupport = `-- name: ListBlockchainsWithExtensionSupport :many +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE is_support_extension_wallet = 1 AND deleted_at IS NULL +ORDER BY chain_name ASC +` + +func (q *Queries) ListBlockchainsWithExtensionSupport(ctx context.Context) ([]Blockchain, error) { + rows, err := q.db.QueryContext(ctx, listBlockchainsWithExtensionSupport) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Blockchain + for rows.Next() { + var i Blockchain + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listBlockchainsWithMobileSupport = `-- name: ListBlockchainsWithMobileSupport :many +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE is_support_mobile_wallet = 1 AND deleted_at IS NULL +ORDER BY chain_name ASC +` + +func (q *Queries) ListBlockchainsWithMobileSupport(ctx context.Context) ([]Blockchain, error) { + rows, err := q.db.QueryContext(ctx, listBlockchainsWithMobileSupport) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Blockchain + for rows.Next() { + var i Blockchain + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listBlockchainsWithStaking = `-- name: ListBlockchainsWithStaking :many +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE is_stake_enabled = 1 AND deleted_at IS NULL +ORDER BY chain_name ASC +` + +func (q *Queries) ListBlockchainsWithStaking(ctx context.Context) ([]Blockchain, error) { + rows, err := q.db.QueryContext(ctx, listBlockchainsWithStaking) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Blockchain + for rows.Next() { + var i Blockchain + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listPriceHistoryByAssetID = `-- name: ListPriceHistoryByAssetID :many +SELECT id, created_at, updated_at, deleted_at, asset_id, price_usd, price_btc, volume_24h_usd, market_cap_usd, available_supply, total_supply, max_supply, percent_change_1h, percent_change_24h, percent_change_7d, rank, last_updated FROM prices +WHERE asset_id = ? AND deleted_at IS NULL +ORDER BY last_updated DESC +LIMIT ? OFFSET ? +` + +type ListPriceHistoryByAssetIDParams struct { + AssetID string `json:"asset_id"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListPriceHistoryByAssetID(ctx context.Context, arg ListPriceHistoryByAssetIDParams) ([]Price, error) { + rows, err := q.db.QueryContext(ctx, listPriceHistoryByAssetID, arg.AssetID, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Price + for rows.Next() { + var i Price + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AssetID, + &i.PriceUsd, + &i.PriceBtc, + &i.Volume24hUsd, + &i.MarketCapUsd, + &i.AvailableSupply, + &i.TotalSupply, + &i.MaxSupply, + &i.PercentChange1h, + &i.PercentChange24h, + &i.PercentChange7d, + &i.Rank, + &i.LastUpdated, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchBlockchains = `-- name: SearchBlockchains :many +SELECT id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum FROM blockchains +WHERE ( + chain_name LIKE '%' || ? || '%' OR + main_asset_symbol LIKE '%' || ? || '%' OR + staking_asset_symbol LIKE '%' || ? || '%' OR + description_en LIKE '%' || ? || '%' +) AND deleted_at IS NULL +ORDER BY chain_name ASC +LIMIT ? OFFSET ? +` + +type SearchBlockchainsParams struct { + Column1 sql.NullString `json:"column_1"` + Column2 sql.NullString `json:"column_2"` + Column3 sql.NullString `json:"column_3"` + Column4 sql.NullString `json:"column_4"` + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) SearchBlockchains(ctx context.Context, arg SearchBlockchainsParams) ([]Blockchain, error) { + rows, err := q.db.QueryContext(ctx, searchBlockchains, + arg.Column1, + arg.Column2, + arg.Column3, + arg.Column4, + arg.Limit, + arg.Offset, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Blockchain + for rows.Next() { + var i Blockchain + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const softDeleteAsset = `-- name: SoftDeleteAsset :exec +UPDATE assets +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteAsset(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteAsset, id) + return err +} + +const softDeleteBlockchain = `-- name: SoftDeleteBlockchain :exec +UPDATE blockchains +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteBlockchain(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteBlockchain, id) + return err +} + +const softDeletePriceConversion = `-- name: SoftDeletePriceConversion :exec +UPDATE price_conversions +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeletePriceConversion(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeletePriceConversion, id) + return err +} + +const updateAsset = `-- name: UpdateAsset :one +UPDATE assets +SET + name = ?, + decimals = ?, + channel = ?, + asset_type = ?, + coingecko_id = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, name, symbol, decimals, chain_id, channel, asset_type, coingecko_id +` + +type UpdateAssetParams struct { + Name string `json:"name"` + Decimals int64 `json:"decimals"` + Channel string `json:"channel"` + AssetType string `json:"asset_type"` + CoingeckoID sql.NullString `json:"coingecko_id"` + ID string `json:"id"` +} + +func (q *Queries) UpdateAsset(ctx context.Context, arg UpdateAssetParams) (Asset, error) { + row := q.db.QueryRowContext(ctx, updateAsset, + arg.Name, + arg.Decimals, + arg.Channel, + arg.AssetType, + arg.CoingeckoID, + arg.ID, + ) + var i Asset + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Name, + &i.Symbol, + &i.Decimals, + &i.ChainID, + &i.Channel, + &i.AssetType, + &i.CoingeckoID, + ) + return i, err +} + +const updateBlockchain = `-- name: UpdateBlockchain :one +UPDATE blockchains +SET + chain_name = ?, + chain_id_cosmos = ?, + chain_id_evm = ?, + api_name = ?, + bech_account_prefix = ?, + bech_validator_prefix = ?, + main_asset_symbol = ?, + main_asset_denom = ?, + staking_asset_symbol = ?, + staking_asset_denom = ?, + is_stake_enabled = ?, + chain_image = ?, + main_asset_image = ?, + staking_asset_image = ?, + chain_type = ?, + is_support_mobile_wallet = ?, + is_support_extension_wallet = ?, + is_support_erc20 = ?, + description_en = ?, + description_ko = ?, + description_ja = ?, + origin_genesis_time = ?, + account_type = ?, + btc_staking = ?, + cosmos_fee_info = ?, + evm_fee_info = ?, + lcd_endpoint = ?, + grpc_endpoint = ?, + evm_rpc_endpoint = ?, + explorer = ?, + about = ?, + forum = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum +` + +type UpdateBlockchainParams struct { + ChainName string `json:"chain_name"` + ChainIDCosmos sql.NullString `json:"chain_id_cosmos"` + ChainIDEvm sql.NullString `json:"chain_id_evm"` + ApiName sql.NullString `json:"api_name"` + BechAccountPrefix sql.NullString `json:"bech_account_prefix"` + BechValidatorPrefix sql.NullString `json:"bech_validator_prefix"` + MainAssetSymbol sql.NullString `json:"main_asset_symbol"` + MainAssetDenom sql.NullString `json:"main_asset_denom"` + StakingAssetSymbol sql.NullString `json:"staking_asset_symbol"` + StakingAssetDenom sql.NullString `json:"staking_asset_denom"` + IsStakeEnabled bool `json:"is_stake_enabled"` + ChainImage sql.NullString `json:"chain_image"` + MainAssetImage sql.NullString `json:"main_asset_image"` + StakingAssetImage sql.NullString `json:"staking_asset_image"` + ChainType string `json:"chain_type"` + IsSupportMobileWallet bool `json:"is_support_mobile_wallet"` + IsSupportExtensionWallet bool `json:"is_support_extension_wallet"` + IsSupportErc20 bool `json:"is_support_erc20"` + DescriptionEn sql.NullString `json:"description_en"` + DescriptionKo sql.NullString `json:"description_ko"` + DescriptionJa sql.NullString `json:"description_ja"` + OriginGenesisTime sql.NullTime `json:"origin_genesis_time"` + AccountType string `json:"account_type"` + BtcStaking sql.NullString `json:"btc_staking"` + CosmosFeeInfo sql.NullString `json:"cosmos_fee_info"` + EvmFeeInfo sql.NullString `json:"evm_fee_info"` + LcdEndpoint sql.NullString `json:"lcd_endpoint"` + GrpcEndpoint sql.NullString `json:"grpc_endpoint"` + EvmRpcEndpoint sql.NullString `json:"evm_rpc_endpoint"` + Explorer sql.NullString `json:"explorer"` + About sql.NullString `json:"about"` + Forum sql.NullString `json:"forum"` + ID string `json:"id"` +} + +func (q *Queries) UpdateBlockchain(ctx context.Context, arg UpdateBlockchainParams) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, updateBlockchain, + arg.ChainName, + arg.ChainIDCosmos, + arg.ChainIDEvm, + arg.ApiName, + arg.BechAccountPrefix, + arg.BechValidatorPrefix, + arg.MainAssetSymbol, + arg.MainAssetDenom, + arg.StakingAssetSymbol, + arg.StakingAssetDenom, + arg.IsStakeEnabled, + arg.ChainImage, + arg.MainAssetImage, + arg.StakingAssetImage, + arg.ChainType, + arg.IsSupportMobileWallet, + arg.IsSupportExtensionWallet, + arg.IsSupportErc20, + arg.DescriptionEn, + arg.DescriptionKo, + arg.DescriptionJa, + arg.OriginGenesisTime, + arg.AccountType, + arg.BtcStaking, + arg.CosmosFeeInfo, + arg.EvmFeeInfo, + arg.LcdEndpoint, + arg.GrpcEndpoint, + arg.EvmRpcEndpoint, + arg.Explorer, + arg.About, + arg.Forum, + arg.ID, + ) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const updateBlockchainDescriptions = `-- name: UpdateBlockchainDescriptions :one +UPDATE blockchains +SET + description_en = ?, + description_ko = ?, + description_ja = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum +` + +type UpdateBlockchainDescriptionsParams struct { + DescriptionEn sql.NullString `json:"description_en"` + DescriptionKo sql.NullString `json:"description_ko"` + DescriptionJa sql.NullString `json:"description_ja"` + ID string `json:"id"` +} + +func (q *Queries) UpdateBlockchainDescriptions(ctx context.Context, arg UpdateBlockchainDescriptionsParams) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, updateBlockchainDescriptions, + arg.DescriptionEn, + arg.DescriptionKo, + arg.DescriptionJa, + arg.ID, + ) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const updateBlockchainEndpoints = `-- name: UpdateBlockchainEndpoints :one +UPDATE blockchains +SET + lcd_endpoint = ?, + grpc_endpoint = ?, + evm_rpc_endpoint = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum +` + +type UpdateBlockchainEndpointsParams struct { + LcdEndpoint sql.NullString `json:"lcd_endpoint"` + GrpcEndpoint sql.NullString `json:"grpc_endpoint"` + EvmRpcEndpoint sql.NullString `json:"evm_rpc_endpoint"` + ID string `json:"id"` +} + +func (q *Queries) UpdateBlockchainEndpoints(ctx context.Context, arg UpdateBlockchainEndpointsParams) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, updateBlockchainEndpoints, + arg.LcdEndpoint, + arg.GrpcEndpoint, + arg.EvmRpcEndpoint, + arg.ID, + ) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const updateBlockchainExplorer = `-- name: UpdateBlockchainExplorer :one +UPDATE blockchains +SET + explorer = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum +` + +type UpdateBlockchainExplorerParams struct { + Explorer sql.NullString `json:"explorer"` + ID string `json:"id"` +} + +func (q *Queries) UpdateBlockchainExplorer(ctx context.Context, arg UpdateBlockchainExplorerParams) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, updateBlockchainExplorer, arg.Explorer, arg.ID) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const updateBlockchainFeeInfo = `-- name: UpdateBlockchainFeeInfo :one +UPDATE blockchains +SET + cosmos_fee_info = ?, + evm_fee_info = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum +` + +type UpdateBlockchainFeeInfoParams struct { + CosmosFeeInfo sql.NullString `json:"cosmos_fee_info"` + EvmFeeInfo sql.NullString `json:"evm_fee_info"` + ID string `json:"id"` +} + +func (q *Queries) UpdateBlockchainFeeInfo(ctx context.Context, arg UpdateBlockchainFeeInfoParams) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, updateBlockchainFeeInfo, arg.CosmosFeeInfo, arg.EvmFeeInfo, arg.ID) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const updateBlockchainImages = `-- name: UpdateBlockchainImages :one +UPDATE blockchains +SET + chain_image = ?, + main_asset_image = ?, + staking_asset_image = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum +` + +type UpdateBlockchainImagesParams struct { + ChainImage sql.NullString `json:"chain_image"` + MainAssetImage sql.NullString `json:"main_asset_image"` + StakingAssetImage sql.NullString `json:"staking_asset_image"` + ID string `json:"id"` +} + +func (q *Queries) UpdateBlockchainImages(ctx context.Context, arg UpdateBlockchainImagesParams) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, updateBlockchainImages, + arg.ChainImage, + arg.MainAssetImage, + arg.StakingAssetImage, + arg.ID, + ) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const updateBlockchainSocialLinks = `-- name: UpdateBlockchainSocialLinks :one +UPDATE blockchains +SET + about = ?, + forum = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, chain_name, chain_id_cosmos, chain_id_evm, api_name, bech_account_prefix, bech_validator_prefix, main_asset_symbol, main_asset_denom, staking_asset_symbol, staking_asset_denom, is_stake_enabled, chain_image, main_asset_image, staking_asset_image, chain_type, is_support_mobile_wallet, is_support_extension_wallet, is_support_erc20, description_en, description_ko, description_ja, origin_genesis_time, account_type, btc_staking, cosmos_fee_info, evm_fee_info, lcd_endpoint, grpc_endpoint, evm_rpc_endpoint, explorer, about, forum +` + +type UpdateBlockchainSocialLinksParams struct { + About sql.NullString `json:"about"` + Forum sql.NullString `json:"forum"` + ID string `json:"id"` +} + +func (q *Queries) UpdateBlockchainSocialLinks(ctx context.Context, arg UpdateBlockchainSocialLinksParams) (Blockchain, error) { + row := q.db.QueryRowContext(ctx, updateBlockchainSocialLinks, arg.About, arg.Forum, arg.ID) + var i Blockchain + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.ChainName, + &i.ChainIDCosmos, + &i.ChainIDEvm, + &i.ApiName, + &i.BechAccountPrefix, + &i.BechValidatorPrefix, + &i.MainAssetSymbol, + &i.MainAssetDenom, + &i.StakingAssetSymbol, + &i.StakingAssetDenom, + &i.IsStakeEnabled, + &i.ChainImage, + &i.MainAssetImage, + &i.StakingAssetImage, + &i.ChainType, + &i.IsSupportMobileWallet, + &i.IsSupportExtensionWallet, + &i.IsSupportErc20, + &i.DescriptionEn, + &i.DescriptionKo, + &i.DescriptionJa, + &i.OriginGenesisTime, + &i.AccountType, + &i.BtcStaking, + &i.CosmosFeeInfo, + &i.EvmFeeInfo, + &i.LcdEndpoint, + &i.GrpcEndpoint, + &i.EvmRpcEndpoint, + &i.Explorer, + &i.About, + &i.Forum, + ) + return i, err +} + +const updatePrice = `-- name: UpdatePrice :one +UPDATE prices +SET + price_usd = ?, + price_btc = ?, + volume_24h_usd = ?, + market_cap_usd = ?, + available_supply = ?, + total_supply = ?, + max_supply = ?, + percent_change_1h = ?, + percent_change_24h = ?, + percent_change_7d = ?, + rank = ?, + last_updated = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, asset_id, price_usd, price_btc, volume_24h_usd, market_cap_usd, available_supply, total_supply, max_supply, percent_change_1h, percent_change_24h, percent_change_7d, rank, last_updated +` + +type UpdatePriceParams struct { + PriceUsd sql.NullFloat64 `json:"price_usd"` + PriceBtc sql.NullFloat64 `json:"price_btc"` + Volume24hUsd sql.NullFloat64 `json:"volume_24h_usd"` + MarketCapUsd sql.NullFloat64 `json:"market_cap_usd"` + AvailableSupply sql.NullFloat64 `json:"available_supply"` + TotalSupply sql.NullFloat64 `json:"total_supply"` + MaxSupply sql.NullFloat64 `json:"max_supply"` + PercentChange1h sql.NullFloat64 `json:"percent_change_1h"` + PercentChange24h sql.NullFloat64 `json:"percent_change_24h"` + PercentChange7d sql.NullFloat64 `json:"percent_change_7d"` + Rank sql.NullInt64 `json:"rank"` + LastUpdated time.Time `json:"last_updated"` + ID string `json:"id"` +} + +func (q *Queries) UpdatePrice(ctx context.Context, arg UpdatePriceParams) (Price, error) { + row := q.db.QueryRowContext(ctx, updatePrice, + arg.PriceUsd, + arg.PriceBtc, + arg.Volume24hUsd, + arg.MarketCapUsd, + arg.AvailableSupply, + arg.TotalSupply, + arg.MaxSupply, + arg.PercentChange1h, + arg.PercentChange24h, + arg.PercentChange7d, + arg.Rank, + arg.LastUpdated, + arg.ID, + ) + var i Price + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.AssetID, + &i.PriceUsd, + &i.PriceBtc, + &i.Volume24hUsd, + &i.MarketCapUsd, + &i.AvailableSupply, + &i.TotalSupply, + &i.MaxSupply, + &i.PercentChange1h, + &i.PercentChange24h, + &i.PercentChange7d, + &i.Rank, + &i.LastUpdated, + ) + return i, err +} + +const updatePriceConversion = `-- name: UpdatePriceConversion :one +UPDATE price_conversions +SET + price = ?, + volume_24h = ?, + market_cap = ?, + last_updated = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, price_id, currency_code, price, volume_24h, market_cap, last_updated +` + +type UpdatePriceConversionParams struct { + Price sql.NullFloat64 `json:"price"` + Volume24h sql.NullFloat64 `json:"volume_24h"` + MarketCap sql.NullFloat64 `json:"market_cap"` + LastUpdated time.Time `json:"last_updated"` + ID string `json:"id"` +} + +func (q *Queries) UpdatePriceConversion(ctx context.Context, arg UpdatePriceConversionParams) (PriceConversion, error) { + row := q.db.QueryRowContext(ctx, updatePriceConversion, + arg.Price, + arg.Volume24h, + arg.MarketCap, + arg.LastUpdated, + arg.ID, + ) + var i PriceConversion + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.PriceID, + &i.CurrencyCode, + &i.Price, + &i.Volume24h, + &i.MarketCap, + &i.LastUpdated, + ) + return i, err +} diff --git a/internal/db/network/schema.sql b/internal/db/network/schema.sql new file mode 100644 index 0000000..297f045 --- /dev/null +++ b/internal/db/network/schema.sql @@ -0,0 +1,139 @@ +-- Assets represent tokens and coins +CREATE TABLE assets ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + name TEXT NOT NULL, + symbol TEXT NOT NULL, + decimals INTEGER NOT NULL CHECK(decimals >= 0), + chain_id TEXT NOT NULL, + channel TEXT NOT NULL, + asset_type TEXT NOT NULL, + coingecko_id TEXT, + UNIQUE(chain_id, symbol) +); + +-- Prices entity based on the Alternative.me API for crypto prices +CREATE TABLE prices ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + asset_id TEXT NOT NULL, + price_usd REAL, + price_btc REAL, + volume_24h_usd REAL, + market_cap_usd REAL, + available_supply REAL, + total_supply REAL, + max_supply REAL, + percent_change_1h REAL, + percent_change_24h REAL, + percent_change_7d REAL, + rank INTEGER, + last_updated TIMESTAMP NOT NULL, + FOREIGN KEY (asset_id) REFERENCES assets(id) +); + +-- Currency conversion rates for crypto prices +CREATE TABLE price_conversions ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + price_id TEXT NOT NULL, + currency_code TEXT NOT NULL, + price REAL, + volume_24h REAL, + market_cap REAL, + last_updated TIMESTAMP NOT NULL, + FOREIGN KEY (price_id) REFERENCES prices(id), + UNIQUE(price_id, currency_code) +); + +-- Blockchains table to store chain configuration parameters +CREATE TABLE blockchains ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + + -- Basic chain information + chain_name TEXT NOT NULL, + chain_id_cosmos TEXT, + chain_id_evm TEXT, + api_name TEXT, + bech_account_prefix TEXT, + bech_validator_prefix TEXT, + + -- Chain assets + main_asset_symbol TEXT, + main_asset_denom TEXT, + staking_asset_symbol TEXT, + staking_asset_denom TEXT, + is_stake_enabled BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_stake_enabled IN (0,1)), + + -- Chain images + chain_image TEXT, + main_asset_image TEXT, + staking_asset_image TEXT, + + -- Chain types and features + chain_type TEXT NOT NULL CHECK(json_valid(chain_type)), + is_support_mobile_wallet BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_mobile_wallet IN (0,1)), + is_support_extension_wallet BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_extension_wallet IN (0,1)), + is_support_erc20 BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_erc20 IN (0,1)), + + -- Descriptions in multiple languages + description_en TEXT, + description_ko TEXT, + description_ja TEXT, + + -- Genesis information + origin_genesis_time TIMESTAMP, + + -- Account types configuration + account_type TEXT NOT NULL CHECK(json_valid(account_type)), + + -- BTC staking specific + btc_staking TEXT CHECK(json_valid(btc_staking)), + + -- Cosmos fee information + cosmos_fee_info TEXT CHECK(json_valid(cosmos_fee_info)), + + -- EVM fee information + evm_fee_info TEXT CHECK(json_valid(evm_fee_info)), + + -- Endpoints + lcd_endpoint TEXT CHECK(json_valid(lcd_endpoint)), + grpc_endpoint TEXT CHECK(json_valid(grpc_endpoint)), + evm_rpc_endpoint TEXT CHECK(json_valid(evm_rpc_endpoint)), + + -- Explorer information + explorer TEXT CHECK(json_valid(explorer)), + + -- Social and documentation links + about TEXT CHECK(json_valid(about)), + forum TEXT CHECK(json_valid(forum)) +); + +-- Add all necessary indexes +CREATE INDEX idx_assets_symbol ON assets(symbol); +CREATE INDEX idx_assets_chain_id ON assets(chain_id); +CREATE INDEX idx_assets_deleted_at ON assets(deleted_at); + +CREATE INDEX idx_prices_asset_id ON prices(asset_id); +CREATE INDEX idx_prices_rank ON prices(rank); +CREATE INDEX idx_prices_last_updated ON prices(last_updated); +CREATE INDEX idx_prices_deleted_at ON prices(deleted_at); + +CREATE INDEX idx_price_conversions_price_id ON price_conversions(price_id); +CREATE INDEX idx_price_conversions_currency_code ON price_conversions(currency_code); +CREATE INDEX idx_price_conversions_deleted_at ON price_conversions(deleted_at); + +CREATE INDEX idx_blockchains_chain_name ON blockchains(chain_name); +CREATE INDEX idx_blockchains_chain_id_cosmos ON blockchains(chain_id_cosmos); +CREATE INDEX idx_blockchains_chain_id_evm ON blockchains(chain_id_evm); +CREATE INDEX idx_blockchains_main_asset_symbol ON blockchains(main_asset_symbol); +CREATE INDEX idx_blockchains_deleted_at ON blockchains(deleted_at); diff --git a/internal/db/sqlc.yaml b/internal/db/sqlc.yaml new file mode 100644 index 0000000..4adec8d --- /dev/null +++ b/internal/db/sqlc.yaml @@ -0,0 +1,34 @@ +version: "2" +sql: + # Activity DB - User to User Interactions + - engine: "sqlite" + queries: "./activity/query.sql" + schema: "./activity/schema.sql" + gen: + go: + emit_interface: true + emit_json_tags: true + package: "activity" + out: "./activity" + + # Network DB - Blockchain Parameters and Asset Metadata + - engine: "sqlite" + queries: "./network/query.sql" + schema: "./network/schema.sql" + gen: + go: + emit_interface: true + emit_json_tags: true + package: "network" + out: "./network" + + # Users DB - Accounts, Profiles, and Vault Metadata + - engine: "sqlite" + queries: "./users/query.sql" + schema: "./users/schema.sql" + gen: + go: + emit_interface: true + emit_json_tags: true + package: "users" + out: "./users" diff --git a/internal/db/users/db.go b/internal/db/users/db.go new file mode 100644 index 0000000..9615903 --- /dev/null +++ b/internal/db/users/db.go @@ -0,0 +1,31 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package users + +import ( + "context" + "database/sql" +) + +type DBTX interface { + ExecContext(context.Context, string, ...interface{}) (sql.Result, error) + PrepareContext(context.Context, string) (*sql.Stmt, error) + QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error) + QueryRowContext(context.Context, string, ...interface{}) *sql.Row +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx *sql.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/internal/db/users/models.go b/internal/db/users/models.go new file mode 100644 index 0000000..0468a2e --- /dev/null +++ b/internal/db/users/models.go @@ -0,0 +1,68 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package users + +import ( + "database/sql" + "time" +) + +type Account struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Number int64 `json:"number"` + Sequence int64 `json:"sequence"` + Address string `json:"address"` + PublicKey string `json:"public_key"` + ChainID string `json:"chain_id"` + BlockCreated int64 `json:"block_created"` + Controller string `json:"controller"` + Label string `json:"label"` + Handle string `json:"handle"` + IsSubsidiary bool `json:"is_subsidiary"` + IsValidator bool `json:"is_validator"` + IsDelegator bool `json:"is_delegator"` + IsAccountable bool `json:"is_accountable"` +} + +type Credential struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Handle string `json:"handle"` + CredentialID string `json:"credential_id"` + AuthenticatorAttachment string `json:"authenticator_attachment"` + Origin string `json:"origin"` + Type string `json:"type"` + Transports string `json:"transports"` +} + +type Profile struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Address string `json:"address"` + Handle string `json:"handle"` + Origin string `json:"origin"` + Name string `json:"name"` +} + +type Vault struct { + ID string `json:"id"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt sql.NullTime `json:"deleted_at"` + Handle string `json:"handle"` + Origin string `json:"origin"` + Address string `json:"address"` + Cid string `json:"cid"` + Config string `json:"config"` + SessionID string `json:"session_id"` + RedirectUri string `json:"redirect_uri"` +} diff --git a/internal/db/users/querier.go b/internal/db/users/querier.go new file mode 100644 index 0000000..49368b8 --- /dev/null +++ b/internal/db/users/querier.go @@ -0,0 +1,53 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 + +package users + +import ( + "context" +) + +type Querier interface { + CheckHandleExists(ctx context.Context, handle string) (bool, error) + GetAccountByAddress(ctx context.Context, address string) (Account, error) + GetAccountByController(ctx context.Context, controller string) (Account, error) + GetAccountByID(ctx context.Context, id string) (Account, error) + GetAccountByNumber(ctx context.Context, number int64) (Account, error) + GetAccountByPublicKey(ctx context.Context, publicKey string) (Account, error) + GetAccountBySequence(ctx context.Context, sequence int64) (Account, error) + GetAccountsByChainID(ctx context.Context, chainID string) ([]Account, error) + GetAccountsByController(ctx context.Context, controller string) ([]Account, error) + GetAccountsByHandle(ctx context.Context, handle string) ([]Account, error) + GetAccountsByLabel(ctx context.Context, label string) ([]Account, error) + GetCredentialByID(ctx context.Context, credentialID string) (Credential, error) + GetCredentialsByHandle(ctx context.Context, handle string) ([]Credential, error) + GetProfileByAddress(ctx context.Context, address string) (Profile, error) + GetProfileByHandle(ctx context.Context, handle string) (Profile, error) + GetProfileByID(ctx context.Context, id string) (Profile, error) + GetVaultByID(ctx context.Context, id string) (Vault, error) + GetVaultConfigByCID(ctx context.Context, cid string) (Vault, error) + GetVaultRedirectURIBySessionID(ctx context.Context, sessionID string) (string, error) + GetVaultsByHandle(ctx context.Context, handle string) ([]Vault, error) + // ACCOUNT QUERIES + InsertAccount(ctx context.Context, arg InsertAccountParams) (Account, error) + // CREDENTIAL QUERIES + InsertCredential(ctx context.Context, arg InsertCredentialParams) (Credential, error) + // PROFILE QUERIES + InsertProfile(ctx context.Context, arg InsertProfileParams) (Profile, error) + // VAULT QUERIES + InsertVault(ctx context.Context, arg InsertVaultParams) (Vault, error) + ListDelegatorAccounts(ctx context.Context) ([]Account, error) + ListProfiles(ctx context.Context, arg ListProfilesParams) ([]Profile, error) + ListValidatorAccounts(ctx context.Context) ([]Account, error) + SoftDeleteAccount(ctx context.Context, id string) error + SoftDeleteCredential(ctx context.Context, credentialID string) error + SoftDeleteProfile(ctx context.Context, address string) error + SoftDeleteVault(ctx context.Context, id string) error + UpdateAccountLabel(ctx context.Context, arg UpdateAccountLabelParams) (Account, error) + UpdateAccountSequence(ctx context.Context, arg UpdateAccountSequenceParams) (Account, error) + UpdateProfile(ctx context.Context, arg UpdateProfileParams) (Profile, error) + UpdateVault(ctx context.Context, arg UpdateVaultParams) (Vault, error) +} + +var _ Querier = (*Queries)(nil) diff --git a/internal/db/users/query.sql b/internal/db/users/query.sql new file mode 100644 index 0000000..73b50d1 --- /dev/null +++ b/internal/db/users/query.sql @@ -0,0 +1,234 @@ +-- PROFILE QUERIES +-- name: InsertProfile :one +INSERT INTO profiles ( + address, + handle, + origin, + name +) VALUES (?, ?, ?, ?) +RETURNING *; + +-- name: GetProfileByID :one +SELECT * FROM profiles +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetProfileByAddress :one +SELECT * FROM profiles +WHERE address = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetProfileByHandle :one +SELECT * FROM profiles +WHERE handle = ? +AND deleted_at IS NULL +LIMIT 1; + +-- name: CheckHandleExists :one +SELECT COUNT(*) > 0 as handle_exists FROM profiles +WHERE handle = ? +AND deleted_at IS NULL; + +-- name: UpdateProfile :one +UPDATE profiles +SET + name = ?, + handle = ?, + updated_at = CURRENT_TIMESTAMP +WHERE address = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteProfile :exec +UPDATE profiles +SET deleted_at = CURRENT_TIMESTAMP +WHERE address = ?; + +-- name: ListProfiles :many +SELECT * FROM profiles +WHERE deleted_at IS NULL +ORDER BY created_at DESC +LIMIT ? OFFSET ?; + +-- VAULT QUERIES +-- name: InsertVault :one +INSERT INTO vaults ( + handle, + origin, + address, + cid, + config, + session_id, + redirect_uri +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetVaultByID :one +SELECT * FROM vaults +WHERE id = ? +AND deleted_at IS NULL +LIMIT 1; + +-- name: GetVaultsByHandle :many +SELECT * FROM vaults +WHERE handle = ? +AND deleted_at IS NULL +ORDER BY created_at DESC; + +-- name: GetVaultConfigByCID :one +SELECT * FROM vaults +WHERE cid = ? +AND deleted_at IS NULL +LIMIT 1; + +-- name: GetVaultRedirectURIBySessionID :one +SELECT redirect_uri FROM vaults +WHERE session_id = ? +AND deleted_at IS NULL +LIMIT 1; + +-- name: UpdateVault :one +UPDATE vaults +SET + config = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteVault :exec +UPDATE vaults +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- ACCOUNT QUERIES +-- name: InsertAccount :one +INSERT INTO accounts ( + number, + sequence, + address, + public_key, + chain_id, + block_created, + controller, + label, + is_subsidiary, + is_validator, + is_delegator, + is_accountable +) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetAccountByID :one +SELECT * FROM accounts +WHERE id = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetAccountByAddress :one +SELECT * FROM accounts +WHERE address = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetAccountsByHandle :many +SELECT * FROM accounts +WHERE handle = ? AND deleted_at IS NULL +ORDER BY created_at DESC; + +-- name: GetAccountByController :one +SELECT * FROM accounts +WHERE controller = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetAccountByPublicKey :one +SELECT * FROM accounts +WHERE public_key = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetAccountByNumber :one +SELECT * FROM accounts +WHERE number = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetAccountBySequence :one +SELECT * FROM accounts +WHERE sequence = ? AND deleted_at IS NULL +LIMIT 1; + +-- name: GetAccountsByChainID :many +SELECT * FROM accounts +WHERE chain_id = ? AND deleted_at IS NULL +ORDER BY sequence DESC; + +-- name: GetAccountsByController :many +SELECT * FROM accounts +WHERE controller = ? AND deleted_at IS NULL +ORDER BY created_at DESC; + +-- name: GetAccountsByLabel :many +SELECT * FROM accounts +WHERE label = ? AND deleted_at IS NULL +ORDER BY created_at DESC; + +-- name: UpdateAccountSequence :one +UPDATE accounts +SET + sequence = ?, + updated_at = CURRENT_TIMESTAMP +WHERE address = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: UpdateAccountLabel :one +UPDATE accounts +SET + label = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING *; + +-- name: SoftDeleteAccount :exec +UPDATE accounts +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ?; + +-- name: ListValidatorAccounts :many +SELECT * FROM accounts +WHERE is_validator = 1 +AND deleted_at IS NULL +ORDER BY created_at DESC; + +-- name: ListDelegatorAccounts :many +SELECT * FROM accounts +WHERE is_delegator = 1 +AND deleted_at IS NULL +ORDER BY created_at DESC; + +-- CREDENTIAL QUERIES +-- name: InsertCredential :one +INSERT INTO credentials ( + handle, + credential_id, + authenticator_attachment, + origin, + type, + transports +) VALUES (?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetCredentialsByHandle :many +SELECT * FROM credentials +WHERE handle = ? +AND deleted_at IS NULL; + +-- name: GetCredentialByID :one +SELECT * FROM credentials +WHERE credential_id = ? +AND deleted_at IS NULL +LIMIT 1; + +-- name: SoftDeleteCredential :exec +UPDATE credentials +SET deleted_at = CURRENT_TIMESTAMP +WHERE credential_id = ?; + diff --git a/internal/db/users/query.sql.go b/internal/db/users/query.sql.go new file mode 100644 index 0000000..6a0879d --- /dev/null +++ b/internal/db/users/query.sql.go @@ -0,0 +1,1177 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 +// source: query.sql + +package users + +import ( + "context" +) + +const checkHandleExists = `-- name: CheckHandleExists :one +SELECT COUNT(*) > 0 as handle_exists FROM profiles +WHERE handle = ? +AND deleted_at IS NULL +` + +func (q *Queries) CheckHandleExists(ctx context.Context, handle string) (bool, error) { + row := q.db.QueryRowContext(ctx, checkHandleExists, handle) + var handle_exists bool + err := row.Scan(&handle_exists) + return handle_exists, err +} + +const getAccountByAddress = `-- name: GetAccountByAddress :one +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE address = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetAccountByAddress(ctx context.Context, address string) (Account, error) { + row := q.db.QueryRowContext(ctx, getAccountByAddress, address) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const getAccountByController = `-- name: GetAccountByController :one +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE controller = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetAccountByController(ctx context.Context, controller string) (Account, error) { + row := q.db.QueryRowContext(ctx, getAccountByController, controller) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const getAccountByID = `-- name: GetAccountByID :one +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetAccountByID(ctx context.Context, id string) (Account, error) { + row := q.db.QueryRowContext(ctx, getAccountByID, id) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const getAccountByNumber = `-- name: GetAccountByNumber :one +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE number = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetAccountByNumber(ctx context.Context, number int64) (Account, error) { + row := q.db.QueryRowContext(ctx, getAccountByNumber, number) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const getAccountByPublicKey = `-- name: GetAccountByPublicKey :one +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE public_key = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetAccountByPublicKey(ctx context.Context, publicKey string) (Account, error) { + row := q.db.QueryRowContext(ctx, getAccountByPublicKey, publicKey) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const getAccountBySequence = `-- name: GetAccountBySequence :one +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE sequence = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetAccountBySequence(ctx context.Context, sequence int64) (Account, error) { + row := q.db.QueryRowContext(ctx, getAccountBySequence, sequence) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const getAccountsByChainID = `-- name: GetAccountsByChainID :many +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE chain_id = ? AND deleted_at IS NULL +ORDER BY sequence DESC +` + +func (q *Queries) GetAccountsByChainID(ctx context.Context, chainID string) ([]Account, error) { + rows, err := q.db.QueryContext(ctx, getAccountsByChainID, chainID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Account + for rows.Next() { + var i Account + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getAccountsByController = `-- name: GetAccountsByController :many +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE controller = ? AND deleted_at IS NULL +ORDER BY created_at DESC +` + +func (q *Queries) GetAccountsByController(ctx context.Context, controller string) ([]Account, error) { + rows, err := q.db.QueryContext(ctx, getAccountsByController, controller) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Account + for rows.Next() { + var i Account + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getAccountsByHandle = `-- name: GetAccountsByHandle :many +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE handle = ? AND deleted_at IS NULL +ORDER BY created_at DESC +` + +func (q *Queries) GetAccountsByHandle(ctx context.Context, handle string) ([]Account, error) { + rows, err := q.db.QueryContext(ctx, getAccountsByHandle, handle) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Account + for rows.Next() { + var i Account + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getAccountsByLabel = `-- name: GetAccountsByLabel :many +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE label = ? AND deleted_at IS NULL +ORDER BY created_at DESC +` + +func (q *Queries) GetAccountsByLabel(ctx context.Context, label string) ([]Account, error) { + rows, err := q.db.QueryContext(ctx, getAccountsByLabel, label) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Account + for rows.Next() { + var i Account + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getCredentialByID = `-- name: GetCredentialByID :one +SELECT id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports FROM credentials +WHERE credential_id = ? +AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetCredentialByID(ctx context.Context, credentialID string) (Credential, error) { + row := q.db.QueryRowContext(ctx, getCredentialByID, credentialID) + var i Credential + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Handle, + &i.CredentialID, + &i.AuthenticatorAttachment, + &i.Origin, + &i.Type, + &i.Transports, + ) + return i, err +} + +const getCredentialsByHandle = `-- name: GetCredentialsByHandle :many +SELECT id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports FROM credentials +WHERE handle = ? +AND deleted_at IS NULL +` + +func (q *Queries) GetCredentialsByHandle(ctx context.Context, handle string) ([]Credential, error) { + rows, err := q.db.QueryContext(ctx, getCredentialsByHandle, handle) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Credential + for rows.Next() { + var i Credential + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Handle, + &i.CredentialID, + &i.AuthenticatorAttachment, + &i.Origin, + &i.Type, + &i.Transports, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getProfileByAddress = `-- name: GetProfileByAddress :one +SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name FROM profiles +WHERE address = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetProfileByAddress(ctx context.Context, address string) (Profile, error) { + row := q.db.QueryRowContext(ctx, getProfileByAddress, address) + var i Profile + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Address, + &i.Handle, + &i.Origin, + &i.Name, + ) + return i, err +} + +const getProfileByHandle = `-- name: GetProfileByHandle :one +SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name FROM profiles +WHERE handle = ? +AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetProfileByHandle(ctx context.Context, handle string) (Profile, error) { + row := q.db.QueryRowContext(ctx, getProfileByHandle, handle) + var i Profile + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Address, + &i.Handle, + &i.Origin, + &i.Name, + ) + return i, err +} + +const getProfileByID = `-- name: GetProfileByID :one +SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name FROM profiles +WHERE id = ? AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetProfileByID(ctx context.Context, id string) (Profile, error) { + row := q.db.QueryRowContext(ctx, getProfileByID, id) + var i Profile + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Address, + &i.Handle, + &i.Origin, + &i.Name, + ) + return i, err +} + +const getVaultByID = `-- name: GetVaultByID :one +SELECT id, created_at, updated_at, deleted_at, handle, origin, address, cid, config, session_id, redirect_uri FROM vaults +WHERE id = ? +AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetVaultByID(ctx context.Context, id string) (Vault, error) { + row := q.db.QueryRowContext(ctx, getVaultByID, id) + var i Vault + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Handle, + &i.Origin, + &i.Address, + &i.Cid, + &i.Config, + &i.SessionID, + &i.RedirectUri, + ) + return i, err +} + +const getVaultConfigByCID = `-- name: GetVaultConfigByCID :one +SELECT id, created_at, updated_at, deleted_at, handle, origin, address, cid, config, session_id, redirect_uri FROM vaults +WHERE cid = ? +AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetVaultConfigByCID(ctx context.Context, cid string) (Vault, error) { + row := q.db.QueryRowContext(ctx, getVaultConfigByCID, cid) + var i Vault + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Handle, + &i.Origin, + &i.Address, + &i.Cid, + &i.Config, + &i.SessionID, + &i.RedirectUri, + ) + return i, err +} + +const getVaultRedirectURIBySessionID = `-- name: GetVaultRedirectURIBySessionID :one +SELECT redirect_uri FROM vaults +WHERE session_id = ? +AND deleted_at IS NULL +LIMIT 1 +` + +func (q *Queries) GetVaultRedirectURIBySessionID(ctx context.Context, sessionID string) (string, error) { + row := q.db.QueryRowContext(ctx, getVaultRedirectURIBySessionID, sessionID) + var redirect_uri string + err := row.Scan(&redirect_uri) + return redirect_uri, err +} + +const getVaultsByHandle = `-- name: GetVaultsByHandle :many +SELECT id, created_at, updated_at, deleted_at, handle, origin, address, cid, config, session_id, redirect_uri FROM vaults +WHERE handle = ? +AND deleted_at IS NULL +ORDER BY created_at DESC +` + +func (q *Queries) GetVaultsByHandle(ctx context.Context, handle string) ([]Vault, error) { + rows, err := q.db.QueryContext(ctx, getVaultsByHandle, handle) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Vault + for rows.Next() { + var i Vault + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Handle, + &i.Origin, + &i.Address, + &i.Cid, + &i.Config, + &i.SessionID, + &i.RedirectUri, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const insertAccount = `-- name: InsertAccount :one +INSERT INTO accounts ( + number, + sequence, + address, + public_key, + chain_id, + block_created, + controller, + label, + is_subsidiary, + is_validator, + is_delegator, + is_accountable +) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable +` + +type InsertAccountParams struct { + Number int64 `json:"number"` + Sequence int64 `json:"sequence"` + Address string `json:"address"` + PublicKey string `json:"public_key"` + ChainID string `json:"chain_id"` + BlockCreated int64 `json:"block_created"` + Controller string `json:"controller"` + Label string `json:"label"` + IsSubsidiary bool `json:"is_subsidiary"` + IsValidator bool `json:"is_validator"` + IsDelegator bool `json:"is_delegator"` + IsAccountable bool `json:"is_accountable"` +} + +// ACCOUNT QUERIES +func (q *Queries) InsertAccount(ctx context.Context, arg InsertAccountParams) (Account, error) { + row := q.db.QueryRowContext(ctx, insertAccount, + arg.Number, + arg.Sequence, + arg.Address, + arg.PublicKey, + arg.ChainID, + arg.BlockCreated, + arg.Controller, + arg.Label, + arg.IsSubsidiary, + arg.IsValidator, + arg.IsDelegator, + arg.IsAccountable, + ) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const insertCredential = `-- name: InsertCredential :one +INSERT INTO credentials ( + handle, + credential_id, + authenticator_attachment, + origin, + type, + transports +) VALUES (?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, handle, credential_id, authenticator_attachment, origin, type, transports +` + +type InsertCredentialParams struct { + Handle string `json:"handle"` + CredentialID string `json:"credential_id"` + AuthenticatorAttachment string `json:"authenticator_attachment"` + Origin string `json:"origin"` + Type string `json:"type"` + Transports string `json:"transports"` +} + +// CREDENTIAL QUERIES +func (q *Queries) InsertCredential(ctx context.Context, arg InsertCredentialParams) (Credential, error) { + row := q.db.QueryRowContext(ctx, insertCredential, + arg.Handle, + arg.CredentialID, + arg.AuthenticatorAttachment, + arg.Origin, + arg.Type, + arg.Transports, + ) + var i Credential + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Handle, + &i.CredentialID, + &i.AuthenticatorAttachment, + &i.Origin, + &i.Type, + &i.Transports, + ) + return i, err +} + +const insertProfile = `-- name: InsertProfile :one +INSERT INTO profiles ( + address, + handle, + origin, + name +) VALUES (?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, address, handle, origin, name +` + +type InsertProfileParams struct { + Address string `json:"address"` + Handle string `json:"handle"` + Origin string `json:"origin"` + Name string `json:"name"` +} + +// PROFILE QUERIES +func (q *Queries) InsertProfile(ctx context.Context, arg InsertProfileParams) (Profile, error) { + row := q.db.QueryRowContext(ctx, insertProfile, + arg.Address, + arg.Handle, + arg.Origin, + arg.Name, + ) + var i Profile + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Address, + &i.Handle, + &i.Origin, + &i.Name, + ) + return i, err +} + +const insertVault = `-- name: InsertVault :one +INSERT INTO vaults ( + handle, + origin, + address, + cid, + config, + session_id, + redirect_uri +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING id, created_at, updated_at, deleted_at, handle, origin, address, cid, config, session_id, redirect_uri +` + +type InsertVaultParams struct { + Handle string `json:"handle"` + Origin string `json:"origin"` + Address string `json:"address"` + Cid string `json:"cid"` + Config string `json:"config"` + SessionID string `json:"session_id"` + RedirectUri string `json:"redirect_uri"` +} + +// VAULT QUERIES +func (q *Queries) InsertVault(ctx context.Context, arg InsertVaultParams) (Vault, error) { + row := q.db.QueryRowContext(ctx, insertVault, + arg.Handle, + arg.Origin, + arg.Address, + arg.Cid, + arg.Config, + arg.SessionID, + arg.RedirectUri, + ) + var i Vault + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Handle, + &i.Origin, + &i.Address, + &i.Cid, + &i.Config, + &i.SessionID, + &i.RedirectUri, + ) + return i, err +} + +const listDelegatorAccounts = `-- name: ListDelegatorAccounts :many +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE is_delegator = 1 +AND deleted_at IS NULL +ORDER BY created_at DESC +` + +func (q *Queries) ListDelegatorAccounts(ctx context.Context) ([]Account, error) { + rows, err := q.db.QueryContext(ctx, listDelegatorAccounts) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Account + for rows.Next() { + var i Account + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listProfiles = `-- name: ListProfiles :many +SELECT id, created_at, updated_at, deleted_at, address, handle, origin, name FROM profiles +WHERE deleted_at IS NULL +ORDER BY created_at DESC +LIMIT ? OFFSET ? +` + +type ListProfilesParams struct { + Limit int64 `json:"limit"` + Offset int64 `json:"offset"` +} + +func (q *Queries) ListProfiles(ctx context.Context, arg ListProfilesParams) ([]Profile, error) { + rows, err := q.db.QueryContext(ctx, listProfiles, arg.Limit, arg.Offset) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Profile + for rows.Next() { + var i Profile + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Address, + &i.Handle, + &i.Origin, + &i.Name, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listValidatorAccounts = `-- name: ListValidatorAccounts :many +SELECT id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable FROM accounts +WHERE is_validator = 1 +AND deleted_at IS NULL +ORDER BY created_at DESC +` + +func (q *Queries) ListValidatorAccounts(ctx context.Context) ([]Account, error) { + rows, err := q.db.QueryContext(ctx, listValidatorAccounts) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Account + for rows.Next() { + var i Account + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const softDeleteAccount = `-- name: SoftDeleteAccount :exec +UPDATE accounts +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteAccount(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteAccount, id) + return err +} + +const softDeleteCredential = `-- name: SoftDeleteCredential :exec +UPDATE credentials +SET deleted_at = CURRENT_TIMESTAMP +WHERE credential_id = ? +` + +func (q *Queries) SoftDeleteCredential(ctx context.Context, credentialID string) error { + _, err := q.db.ExecContext(ctx, softDeleteCredential, credentialID) + return err +} + +const softDeleteProfile = `-- name: SoftDeleteProfile :exec +UPDATE profiles +SET deleted_at = CURRENT_TIMESTAMP +WHERE address = ? +` + +func (q *Queries) SoftDeleteProfile(ctx context.Context, address string) error { + _, err := q.db.ExecContext(ctx, softDeleteProfile, address) + return err +} + +const softDeleteVault = `-- name: SoftDeleteVault :exec +UPDATE vaults +SET deleted_at = CURRENT_TIMESTAMP +WHERE id = ? +` + +func (q *Queries) SoftDeleteVault(ctx context.Context, id string) error { + _, err := q.db.ExecContext(ctx, softDeleteVault, id) + return err +} + +const updateAccountLabel = `-- name: UpdateAccountLabel :one +UPDATE accounts +SET + label = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable +` + +type UpdateAccountLabelParams struct { + Label string `json:"label"` + ID string `json:"id"` +} + +func (q *Queries) UpdateAccountLabel(ctx context.Context, arg UpdateAccountLabelParams) (Account, error) { + row := q.db.QueryRowContext(ctx, updateAccountLabel, arg.Label, arg.ID) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const updateAccountSequence = `-- name: UpdateAccountSequence :one +UPDATE accounts +SET + sequence = ?, + updated_at = CURRENT_TIMESTAMP +WHERE address = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, number, sequence, address, public_key, chain_id, block_created, controller, label, handle, is_subsidiary, is_validator, is_delegator, is_accountable +` + +type UpdateAccountSequenceParams struct { + Sequence int64 `json:"sequence"` + Address string `json:"address"` +} + +func (q *Queries) UpdateAccountSequence(ctx context.Context, arg UpdateAccountSequenceParams) (Account, error) { + row := q.db.QueryRowContext(ctx, updateAccountSequence, arg.Sequence, arg.Address) + var i Account + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Number, + &i.Sequence, + &i.Address, + &i.PublicKey, + &i.ChainID, + &i.BlockCreated, + &i.Controller, + &i.Label, + &i.Handle, + &i.IsSubsidiary, + &i.IsValidator, + &i.IsDelegator, + &i.IsAccountable, + ) + return i, err +} + +const updateProfile = `-- name: UpdateProfile :one +UPDATE profiles +SET + name = ?, + handle = ?, + updated_at = CURRENT_TIMESTAMP +WHERE address = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, address, handle, origin, name +` + +type UpdateProfileParams struct { + Name string `json:"name"` + Handle string `json:"handle"` + Address string `json:"address"` +} + +func (q *Queries) UpdateProfile(ctx context.Context, arg UpdateProfileParams) (Profile, error) { + row := q.db.QueryRowContext(ctx, updateProfile, arg.Name, arg.Handle, arg.Address) + var i Profile + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Address, + &i.Handle, + &i.Origin, + &i.Name, + ) + return i, err +} + +const updateVault = `-- name: UpdateVault :one +UPDATE vaults +SET + config = ?, + updated_at = CURRENT_TIMESTAMP +WHERE id = ? +AND deleted_at IS NULL +RETURNING id, created_at, updated_at, deleted_at, handle, origin, address, cid, config, session_id, redirect_uri +` + +type UpdateVaultParams struct { + Config string `json:"config"` + ID string `json:"id"` +} + +func (q *Queries) UpdateVault(ctx context.Context, arg UpdateVaultParams) (Vault, error) { + row := q.db.QueryRowContext(ctx, updateVault, arg.Config, arg.ID) + var i Vault + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.DeletedAt, + &i.Handle, + &i.Origin, + &i.Address, + &i.Cid, + &i.Config, + &i.SessionID, + &i.RedirectUri, + ) + return i, err +} diff --git a/internal/db/users/schema.sql b/internal/db/users/schema.sql new file mode 100644 index 0000000..b5ba399 --- /dev/null +++ b/internal/db/users/schema.sql @@ -0,0 +1,81 @@ +-- Credentials store WebAuthn credentials +CREATE TABLE credentials ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + handle TEXT NOT NULL, + credential_id TEXT NOT NULL UNIQUE, + authenticator_attachment TEXT NOT NULL, + origin TEXT NOT NULL, + type TEXT NOT NULL, + transports TEXT NOT NULL +); + +-- Accounts represent blockchain accounts +CREATE TABLE accounts ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + number INTEGER NOT NULL, + sequence INTEGER NOT NULL DEFAULT 0, + address TEXT NOT NULL UNIQUE, + public_key TEXT NOT NULL CHECK(json_valid(public_key)), + chain_id TEXT NOT NULL, + block_created INTEGER NOT NULL, + controller TEXT NOT NULL, + label TEXT NOT NULL, + handle TEXT NOT NULL, + is_subsidiary BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_subsidiary IN (0,1)), + is_validator BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_validator IN (0,1)), + is_delegator BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_delegator IN (0,1)), + is_accountable BOOLEAN NOT NULL DEFAULT TRUE CHECK(is_accountable IN (0,1)) +); + +-- Profiles represent user identities +CREATE TABLE profiles ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + address TEXT NOT NULL, + handle TEXT NOT NULL UNIQUE, + origin TEXT NOT NULL, + name TEXT NOT NULL, + UNIQUE(address, origin) +); + +-- Vaults store encrypted data +CREATE TABLE vaults ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + handle TEXT NOT NULL, + origin TEXT NOT NULL, + address TEXT NOT NULL, + cid TEXT NOT NULL UNIQUE, + config TEXT NOT NULL CHECK(json_valid(config)), + session_id TEXT NOT NULL, + redirect_uri TEXT NOT NULL +); + +CREATE INDEX idx_credentials_handle ON credentials(handle); +CREATE INDEX idx_credentials_origin ON credentials(origin); +CREATE INDEX idx_credentials_deleted_at ON credentials(deleted_at); + +CREATE INDEX idx_accounts_address ON accounts(address); +CREATE INDEX idx_accounts_chain_id ON accounts(chain_id); +CREATE INDEX idx_accounts_block_created ON accounts(block_created); +CREATE INDEX idx_accounts_label ON accounts(label); +CREATE INDEX idx_accounts_controller ON accounts(controller); +CREATE INDEX idx_accounts_deleted_at ON accounts(deleted_at); + +CREATE INDEX idx_profiles_handle ON profiles(handle); +CREATE INDEX idx_profiles_address ON profiles(address); +CREATE INDEX idx_profiles_deleted_at ON profiles(deleted_at); + +CREATE INDEX idx_vaults_handle ON vaults(handle); +CREATE INDEX idx_vaults_session_id ON vaults(session_id); +CREATE INDEX idx_vaults_deleted_at ON vaults(deleted_at); diff --git a/internal/jobs/cron.go b/internal/jobs/cron.go new file mode 100644 index 0000000..415ac45 --- /dev/null +++ b/internal/jobs/cron.go @@ -0,0 +1 @@ +package jobs diff --git a/internal/jobs/events.go b/internal/jobs/events.go new file mode 100644 index 0000000..f80b145 --- /dev/null +++ b/internal/jobs/events.go @@ -0,0 +1,12 @@ +package jobs + +type EventTrigger string + +var ( + EventTriggerMinute = EventTrigger("0 * * * * *") // Every minute (with seconds) + EventTriggerHourly = EventTrigger("0 */1 * * *") // Every hour at minute 0 + EventTriggerDaily = EventTrigger("0 0 0 * * *") // Every day at 00:00:00 + EventTriggerWeekly = EventTrigger("0 0 0 * * 0") // Every Sunday at 00:00:00 + EventTriggerMonthly = EventTrigger("0 0 0 1 * *") // First day of every month at 00:00:00 + EventTriggerYearly = EventTrigger("0 0 0 1 1 *") // January 1st every year at 00:00:00 +) diff --git a/internal/jobs/tasks.go b/internal/jobs/tasks.go new file mode 100644 index 0000000..415ac45 --- /dev/null +++ b/internal/jobs/tasks.go @@ -0,0 +1 @@ +package jobs diff --git a/internal/meta/metadata.templ b/internal/meta/metadata.templ new file mode 100644 index 0000000..ca35f66 --- /dev/null +++ b/internal/meta/metadata.templ @@ -0,0 +1,97 @@ +package meta + +import "github.com/labstack/echo/v4" + +func GetMetadata(c echo.Context) Metadata { + return DefaultMetadata() +} + +func GetMetaComponent(c echo.Context) templ.Component { + return MetaComponent(GetMetadata(c)) +} + +func DefaultMetadata() Metadata { + return Metadata{ + Title: "Motr", + Author: "Sonr", + Favicon: "https://cdn.sonr.id/favicon.png", + Robots: "index, follow", + Googlebot: "index, follow", + Google: "nositelinkssearchbox", + Description: "Sonr is a decentralized social network that allows you to create your own personalized digital identity.", + Keywords: "Sonr, social network, decentralized, identity, decentralized social network, decentralized identity, self-sovereign identity, self-sovereign, self-sovereign social network, self-sovereign identity network, sso, sso network, sso identity, sso social network, digital identity, digital social network", + CanonicalURL: "https://sonr.io", + OGImage: "https://cdn.sonr.id/og.png", + OGURL: "https://sonr.io", + OGSiteName: "Sonr", + TwitterSite: "@sonr_io", + TwitterCreator: "@sonr_io", + TwitterImage: "https://cdn.sonr.id/og.png", + } +} + +type Metadata struct { + Title string + Author string + Favicon string + Robots string + Googlebot string + Google string + Description string + Keywords string + CanonicalURL string + OGImage string + OGURL string + OGSiteName string + TwitterSite string + TwitterCreator string + TwitterImage string +} + +templ DefaultMetaComponent() { + Motr + + + + + + + + + + + + + + + + + + + + +} + +templ MetaComponent(m Metadata) { + { m.Title } + + + + + + + + + + + + + + + + + + + + +} diff --git a/internal/meta/metadata_templ.go b/internal/meta/metadata_templ.go new file mode 100644 index 0000000..92d0bda --- /dev/null +++ b/internal/meta/metadata_templ.go @@ -0,0 +1,351 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package meta + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +import "github.com/labstack/echo/v4" + +func GetMetadata(c echo.Context) Metadata { + return DefaultMetadata() +} + +func GetMetaComponent(c echo.Context) templ.Component { + return MetaComponent(GetMetadata(c)) +} + +func DefaultMetadata() Metadata { + return Metadata{ + Title: "Motr", + Author: "Sonr", + Favicon: "https://cdn.sonr.id/favicon.png", + Robots: "index, follow", + Googlebot: "index, follow", + Google: "nositelinkssearchbox", + Description: "Sonr is a decentralized social network that allows you to create your own personalized digital identity.", + Keywords: "Sonr, social network, decentralized, identity, decentralized social network, decentralized identity, self-sovereign identity, self-sovereign, self-sovereign social network, self-sovereign identity network, sso, sso network, sso identity, sso social network, digital identity, digital social network", + CanonicalURL: "https://sonr.io", + OGImage: "https://cdn.sonr.id/og.png", + OGURL: "https://sonr.io", + OGSiteName: "Sonr", + TwitterSite: "@sonr_io", + TwitterCreator: "@sonr_io", + TwitterImage: "https://cdn.sonr.id/og.png", + } +} + +type Metadata struct { + Title string + Author string + Favicon string + Robots string + Googlebot string + Google string + Description string + Keywords string + CanonicalURL string + OGImage string + OGURL string + OGSiteName string + TwitterSite string + TwitterCreator string + TwitterImage string +} + +func DefaultMetaComponent() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "Motr") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +func MetaComponent(m Metadata) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var2 := templ.GetChildren(ctx) + if templ_7745c5c3_Var2 == nil { + templ_7745c5c3_Var2 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var3 string + templ_7745c5c3_Var3, templ_7745c5c3_Err = templ.JoinStringErrs(m.Title) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/meta/metadata.templ`, Line: 76, Col: 17} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var3)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 3, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +var _ = templruntime.GeneratedTemplate diff --git a/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite b/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..0b58f0dca7d6ce95907cb0b322dc4802f04688c0 GIT binary patch literal 4096 zcmWFz^vNtqRY=P(%1ta$FlG>7U}9o$P*7lCU|@t|AVoG{WYDXN;00+HAlr;ljiVtj n8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*O6ovo**AfQj literal 0 HcmV?d00001 diff --git a/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite-shm b/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite-shm new file mode 100644 index 0000000000000000000000000000000000000000..4b157f3d8cc051cdcd69b0170f0b98f69cba9ba3 GIT binary patch literal 32768 zcmeI5yHZnO6on50f`9@d5D6+b<$jlowLP>)*-$u|E@+y{A`4Kbz-` zN1oSL-DXRFuOH<8<;t%+zWy`ODY`_rcrJQGuXrK)M8EJ^KMshO;*}T_Lt(5KS>D-ii0(gJ^xm&!@z+m=Uw$;d^uX|D*UM=EZ_&z3=by=NHA2 zSQab7w%3Fm6AWMg0~o*n1~7mD3}65Q7{CAqFn|FJU;qOczyJm?fB_6(00S7n00uCC z0SsUO0~o*n1~7mD3}65Q7{CAqFn|FJU;qOczyJm?fB_6(00S7nKzRnfly}Q*x?}em zkcfc}bx4d?b82VUjvQ~}nXhfUc3GEpqX&r?aNkN9`BuhlK7qS*mko&+aMMs3`Ci6t zK7qS*>k^3=aK}~|aU&Ut7;t|Xi5PIZ9LZ(CZF?kQz`cqjmw~BV<22sxP9g?w02Wd;O~rIFjIVk=hCC~>NcK^4DY4akeh<*~z|;PrE(e_g1^n$o@;G`ucwwNhF?%^cDNR-v3nJy52AKW_n)l zxzw|fBw_;r5C8!X009sH0T2Lz$i)ON?ApaG9x~OlB~8iCC=+rneKeg*`}ccBvT`~n zi@EfXaar`|h^d6=Z0EEEaV(RQkI7l_M0RWf>zQ@t)QqTOG-GM7|R@$Pl;^`i<23VRY-YlNDp+y zg@Z}1(JaU?RkLBRI^FGs(1HO0G)X$o^Fo>oV_kFW)e9A^uBe4VT{B!EUG0Sg$_Hcx za-=iyxNv8ZTWFRTlEt@|<&+Faand9;78mYHat}4PpD(I=Ef%Esa$&EgqWGkiJ9r~aq*};nw}odi6hh5tenXyZV{^cjauQoSN3$+ zlT4enhi`Y6b%drDG$UWvYo=bU*rTDT3_BfQum)3v7L5h8u2tQ#W~yX(D$N_YPaZj* zdeEp=6mqufg%rz{B-#zLudBi^tI$C%GS=^TYqNgmyZ(Zn9Usv*Z~gJN`m99iO0f2@fdB}A00@8p2!H?x zfB*=900@8p2&_y3mXE;o3+(*LPo9416Mys3gp~-tfJFft2!H?xfB*=900@8p2!H?x zfB*=rWCHd(0POvOv&TR7^yAg<|9kiaR`Mao*gyaTKmY_l00ck)1V8`;KmY_lz$Fl~ z_LyJbGY7t*|Hk*;_&NLnE+6cG00@8p2!H?xfB*=900@8p2!Oy!CJ?s<$NU1XeR@xK z^doOxgkNAKA99Qh1V8`;KmY_l00ck)1V8`;KmY_>0=%`y`~u>0mlt0A52f|+3%Go+ z0|Fob0w4eaAOHd&00JNY0w4eaE17`3E&=lkRQ~$)7oL1`QiNY%B_DE(4Fo^{1V8`; zKmY_l00ck)1V8`;Tms$J9`g%aed+Qm-@E0zSKt?L`CtbGKmY_l00ck)1V8`;KmY_l z00dSt0ow(@`~u%ORQ$qcUyD5pzrad9~X(Q#=@18;M3E_lhDZ+Cly{l0UYsB|Dw$3;DF$ z^L=l%+Y#A+=~Q3;Pa_**+arC&{;&5x)wizqOTC$%*LyDYY)m}WHJDKOFY}}E@5krk z-LZd0ieUo*5CDM>0)g5FUbttFTkO*-1#Mn0%qwc%)aP_l*9@hgl{8Z;D5~itZXC(V z>6|Q%Wsb_H#I_a0$&Bb#N_j0wbL)9wgh<@fO5)j4HGfViYWi%^Cv(G6GW}ITQvF#{ zc^xktCUU!5$u;$|W|(TZ=Jk2~QbOJGA&G9flwTVc?i%DCY9)m(F>leHA~6zDul#&8Pes$J-jeD$X%69^=!$9%ahUT$jGISjLSZ?7E=k4 zY33<*{QK>llhDkCh~51 z)P%Cso6&&U!*5~p3Yw9x>orraR($q^VYib))`%%Wi^c+4*Q#z=GgWeZP-))Cee%fh z)PqK~qL8y)FQizuBvHfH-c+|Tr=flZY$6*_&?1s^%rsJN9u+#B89O;GyQzCcrsWA~ zs3R_nB-&w zfB*=900@8p2!H?xfB*=9z=|bc#S2(-4)$6jWbp#~zxnDPy!vGIwB;8dK?2x700ck) z1V8`;KmY_l00ck)1V8`;RyG0K1;G3Q-%E^M`q0$t{}b~LuIz)3(SZO6fB*=900@8p z2!H?xfB*=9fJ0!7^~wAKr{`;Lh#fm(@C!gcKmY_l00ck)1V8`;KmY_l00cl_RS@X6 zM#=mFKl_&px8MHp-#rGuz^XX*7$gXQ00@8p2!H?xfB*=900@8p2-vd-Fu#EIOD{Yw z{l{y62EPCl0|Y<-1V8`;KmY_l00ck)1V8`;Rt14|)+m`@pnByGGjD%-{|Nj7tK!&W zkRSj8AOHd&00JNY0w4eaAOHd&u--zz`~rVj)s@C!gOKmY_l00ck)1V8`; zKmY_l00cl_RS?)-sgJn?|HN5O3${$GhI?b=bz^@@weg+$JfQ4j_r;<(fLkv zC-(w(hU<=eE;6%~7Y+?@i!r@Y(B}2Ryi(NEl37$rs$nYmqLx3W6=>2eBUw3}lf|*j zQTdeErl>fX5oz(1zXfS(ATA6IaOJQV!&J?NL2GPnCx906HBA$w(_7-g!2zxjZZuz1 z^@^f9{T^u7EGy<~m?cTNzzc^7m+o-eTBT5{>J?Kl7it>q_m*}70%d(t0_oDs<~E1G zE!L=)s3c)oNu+2iK_pqad~PEz3=D9O4V&uOlGAtE?k1<*T>8kk>>NKal@Q6M7sQ-= zDkq-Ej!mSqr^Vy)>AiMFzOJdJR#1pu=f)=Fsa$&EgqWGkiJ9r~aq*};nw}odi6hh5 ztenXyZV_6!Q7gRn$_1^Yg(cHw{lsXuS%*UT*1>C4fE|B3kJfRN&xR0et*#lhYQ@kL zQ!i^u*&t^|PCh1QX+6LCw;)6dRkV5MEc1%coEcRwHR_s@uQn4hn-F~@_EfL zuCuh;adHq;-%z!By;@hwq#<>d;-RgNPG;q?V;Sq%iz$Xk60`ECOb+gdJSD1zp_zu4 zmyq^siVOD)a1s0bG0vBW8C2AUNlqPNXPA*0rs}u3wt*M!8RQoG+_O#1oBEt?>YAZ= zhucfs*mNRzdAQ=6KY3r8~YU!O_J|xjim-1`l!d-*hL#;+h726ZFZYfdDSrFt& z6Q$XHUbvf#iEAaroVP8dCuZ$ZVw}?9@z6G;*)>ZGDyl}&6V$(qAggq^(~=~ zYTiC4$@RsPwPq<1C~4^_fcl zNgXWco_wA~cg;2xQCBZ0rRuD&HQVs|z6T7h$0}%YsTvw7y&xr|gS~O#*kC(z>nV#e z?CtkmOz^_4UEJa!Tj!Mgj4~nT(nr&|w12-Rpm+WpNAs-Pgmc;g!_a)IU=^5COASpt zGCq04zIzZwsn|{Kdy@WJPeZHE>Et@*v*0W#(R6~b9A9DU^kT_=WB))Gxlc@T-hCn~ zq)_EyU%T52@$4TfoN}z+d7c;2WEksQ>&Hqc)e9A^uBe3qx!{ruBpqB=dm(}Hfx!fF zq%-ljaA%TRXqFg~#kZH`lnh95(j+xTt{O?-RfF}N{2-ur)+{03UH~f^kiZh9=}z){ zAnE%(z*j||nyI??kmAUF^2qVjgGRNYkh5Jcq*%5j zQN!2XRJSsxp?(HzA{$W9B8F~4snF@n*vVXwknXH}ocRSl^?!C8cx~|iy=rA}{qID`KWrcX0w4eaAOHd&00JNY z0w4eaAOHd@jzDL$Bf>?=ur^skV}60jvw!)|6Tk9}A6R|?5+r~P1V8`;KmY_l00ck) z1V8`;KmY_lU}Y27Yz>e31-|;LfA{bki}4KR7g*T`9isyQ5C8!X009sH0T2KI5C8!X z00D=9V0|*bfccG2?);<2Z~F=S0?-c-009sH0T2KI5C8!X009sH0T5Uf1gv=jUBAF{ zU0?i@Jx71<0Q>^0;@D%5AOHd&00JNY0w4eaAOHd&00JN|U?E`f0#E$b5C7HAUO4iX z@C!gOKmY_l00ck)1V8`;KmY_l00cl_RS?)}jgt8VE~sN`ANtdibMOnSieryKf&d7B z00@8p2!H?xfB*=900@AM ztN;1efBv@%pSZ1$r1kO7NBV00uk}CEH_-cB@BKY*_FU=NmUyO1O6dIad?x-@{Nebz z*weAy(I-0JiSFcH;LdQ}kg`oki92R4ks@X88fUWHW&_ceZX@YcmOI$cOz%{~+=8LLcQFN!@1MQk+#e5C3 zBuN)|;V|LS9d28z6lztyVk+iBO{4wZ(oR62tWQcHU7Fe4<`B5W8ub#DBrGe56m2Dl zBukgiZRCZ40q(J3Q$1U9`cB*34hn-F~@_EfLuCuh;adHq;-%z!By;@hw zq#<>d;-RgNPG;q?V;Sq%iz$Xk60`ECOb+gdJSD1zp_zu4myjZn=u7tua1s0bG0vBC zQ&ZH2NlqPN$(R`%rs}u3wt*M!8RQoG+_O#1oBEt?>YAZ=hucfs*mNRzdAQ=6K zY3r8~YU!O_J|xjim-1`L^<&U?{qR~=sA78w>y{GboCQIiG*O!E=Y_k;n7CFf&3W5W zdScctCB`Wo9uI9pnq9NBprUFNJwg4;2(n5y3$jwBTHg}-sOIf+l3ZUrS!<`wm|oV(RiCN!pVYyE?#bs_bk}TC5q0%~ zQmW4STC)wW?|Z=DdaQybm#U$W(hE{TI@lW*jt#amx1O>n!`^=1#RM&Hqc)e9A^uBe3qx!{ruBpqB=dm(}Hfx!fFq%-ljaA%TRXqFg~#kZH` zlnh95(j+xTt{O?-RfF}N{N$i_)+{03UH~f^kiZh9=}z){AnE%(z*j||nyI??kmAUF^2qVjgGRNYkh5Jcq*%5jQN!2XRJSsxp?(HzA{$W9 zB8F~4snF@n*vVXW)(Wf&;MKfeu4EP zL%;h+*UtPF;sq=!*gyaTKmY_l00ck)1V8`;KmY_lV8s*IPDZ$$#S8rE@o#m0=4-vr zTJZw?U8zX_ulMg?^U9i!^}W$o>xm}5ohT=w-JkEycD>#87|F&40w4eaAOHd&unGw% z+gis3dJ!%(vgX#N2WT`s^w5+h`X8Ah@X!jg^q%@Ugq&x-2s~sFT!Dx?FT&KHEbZA8 z7p^-L3{i@r3lC9>c6g=xgqc3j{fCTFMBt$vV(G>#dBH&lJk*So6Lxmv0uRZp3}@ZW zFuy=?B>m)uk>2xDC&x(utvFMymr1 zFZv4ou5KQ2ImhLk5}%}a>nZ4Z*K*l1h@gBhk{*76@M_|7x9<+!bsP_oS&+()c z7SLn;FlT;&+R>fAH2>c}_hT!A>whOg{$T?F5C8!X009sH0T2KI5C8!X009tKaRfS} z9TCo&U%+~g!1W9K{Lq_2pZ((DYkgf}WJ4^m%zFg!KK`xPRZRGjI(_J_33EFzUtVc3v1G;s@J^w`YlT z9*zcD*|AIq{1uzK;LnvlIi2K%-MhIfx;-gUKuq9^7?>U@m~|7*25MGK?>Ohx8ky~_ zKqjqBkJCreH2c6EMP;s9YLFQ`gV|?wGf!r#RBBp1uT@MkBc@WFVOm34^R;A$7?7zt z8}+(Y$&)!N*;Hs$kdsa4$ZVNKbum=Ue+*m9C6|!yzBMjP>~1%GQQ+_?!-21Zk+K%gO1UG00@8p2!H?xfB*=900@8p2!Mb?V2Aa| z;sw6*%%43v`a8J;@C!gcKmY_l00ck)1V8`;KmY_l00cl_RS>Y&7PyW11(erc-Sm$> z{(C=wUtm=ndkhi;KmY_l00ck)1V8`;KmY_l00iuH1({#qx4-<+?caOiQ~z#>bqeDI zS;~janz!)Xu2UGtI)w)ZxQ08In6sEOTWyej9cY?4EL6-l#SyD1VVy#(Q%I-Fan`(U zvFf2ak&d-otWs_@T^XB1D>pVFPvz1RC&bKTPRvY?J9Dm8w0Tpp3wbk|QL!_sUTW0I zBx==0#blGDxwXZk@@RT`JSTqG>0~3H*9_x2OS>Ka1+FCgt|q6#bU~Od=z}y}5Y{Qg zI)w#hbg6|cYvroFrXu}%wp7iZQ;M2CTXd~6&2iTb_bj?=wrPpFdO;~wXML^NOSt>K zk7Aud&D08tYO;0p5=}R8EGIPeEKBy!I*WA*u}-1Are!oRp)XUFoAFK;>lEIU>l8kh z@BGZCPBboJokE}I1G^vq0w4eaAOHd&00JNY0w4eaAh6;H*y|Kp&-nxE6bj!j{n&i& zukWzdDcr)nc%ADMc6_w+mFUaSW26GM_np9E*DhW-zK>hn<;Ek-Xlh-Vt!u=UZKwHs z)#0pElY={4!dKafSO{nP9o`E&c_B@DvaWqk=G0O{3%9X*`A*pNTFSR`q{Y~6WQ8{( z+gsr+EJbl2k%V(@UtYdjw`CvPTIPN7-c(sa3Wzy#JK^o2JD z(y>mV8$~xVoz2RboZ^P*(LfDn9a<8G(Bk^HT;+mRa$*>~%CuScC^YJ&R##m&-(09^ ztqQOxRXdNii6})e5H!OONn}~tQJEav5qV0aE6%3eyo7|nLj)dvkOB|MkJz2o4_Wpe zLH+nskN?}Rt{uTTh41^%dvpK+ zz8@EK00ck)1V8`;KmY_l00ck)1V8`;ZUlkdmJsF_SlIBymF>?w`vv#~ZUlPB009sH z0T2KI5C8!X009sH0T2Lz_nm;X=3t8X1^T~jY+duOH~c320`L2AK?gtp1V8`;KmY_l z00ck)1V8`;K;T9Y;4C4`FOW6<%TK??M}9PQa$Gkxai&@?lSxnyM%G56(a61`NQ(00 zZ;1S{&6>B+TGxPWc6;ngn@>Kn|I(=#$>jR4Sw*(@U+Wv_eXjTZo;Q20^lVE!(Bw1*Pkp+4a9|^0j?YtL+4$k0=Bjj;7ra+CheyMQ#3(3y(KOj9N-$^M%e_V zwDdr`W?3;`pIMTm3%qccaOn=W9hh8}3b;w85x#tGBQFdLaE}e!Gy2oM)BiU)h;r#8 z|r)i(*Uy^OX})(k_Pr4uL9^6zD4 z;60n-!aW1+7BHY{rwr52&6q|Q(+Gc%rV+07@xncW++v?={nfmw&*^p_)qK86R_svp zf|t0a=^EqZDP9HZO5dDl$c{~9OMXA_LZatI!TTiy2ylos>lDtZDk?6!#s)nJOyqOpM1wW?dzOx3-I6i4opM~nG{S4SdHlUzIBsaco{)w* z;=)L>ozZ0Kp&Vd)okP*;$7x=3~&u3pc-1ASZ^$58BcOv8; zHV^;-5C8!X009sH0T2KI5C8!X0D%=ppflPL;jBmYNo#1#FYv>!-2YEkeu0hA z7b1PN{@41SS@v1}3xQ|(_ip8tTZrdwz~{Mh9<)`odFK&5P1x4tSZgY1IoD`n%Bw9Q zUO@N9IJkbO)gCW^-~!PNK4|x(iYbflK~H;+D0yT+pV3>-la#W-o~}@*ra!jETOK*& zG4)PcuL!L^qw1wbT~q7_nXF~Eb~s$XXyl16-nh=vZpU56FqQca8C-yP0mKU+UI6g| z9Pws0SbB)**}C!*b-@6L9;-St>kD&EJx6}uWs zbbPe)mFUaSW88~tu5b@=A6e7e_hetJ_mb5XskX|u!>PD%a!WhW>%GwZkT#y z)=& z6%_K;T5fDYp30>sPPm8Ibr6kAXR~r9r?^F^8ad;CZOhx8j0R6 zkY6}y_AAiNK5$1-nX8r>#Az1HKC7GgD!I_tw0d5vnB)x?r8>j3hP38u$qtbw-myl# zu2u577IMBh+2kZ4;fBn6M!V=_QC$q>`Hx`>s0r!rTjRpS?sn%!;P5HKfv5|6V~$2_65xmnh)@L&+>UVdFkR)Q^i763L4jnrabpo#ED- zc%CNtlcjg{iWs?%y@rVo;?I}nKSXY&NngBM z>5x3dSSX*ZmO`ofmhA;i4B&#)@{GJU--n*=1TCJ62nj<}O;dWlwvxj?h zmwn?A5)k?-Ir8Y8b13s>ym5B$w0KXV-OCuxp=?pZuS%_6tZvTcVIFF+<0zy<;!00JNY0w4eaAOHd&00JNY0wA!m34F*J9(#}Avww5vr5*qH zovT=nU}Yb4j1B}q00ck)1V8`;KmY_l00ck)1RMf;tWV|_c)5Secfa%GSN|A(0q6$^ zfB*=900@8p2!H?xfB*=900^uK0(-4dGQYq(UnoD-yXN5+;TKpH#~yIWH(&e)`~s`u*kh0&00JNY0w4eaAOHd&00JNY0w6F%5DYQDz_wfd m_v3{>|Jr`|1)vxp00JNY0w4eaAOHd&00JNY0wA#R3H*PQE_3++ literal 0 HcmV?d00001 diff --git a/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite b/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..0b58f0dca7d6ce95907cb0b322dc4802f04688c0 GIT binary patch literal 4096 zcmWFz^vNtqRY=P(%1ta$FlG>7U}9o$P*7lCU|@t|AVoG{WYDXN;00+HAlr;ljiVtj n8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*O6ovo**AfQj literal 0 HcmV?d00001 diff --git a/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-shm b/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-shm new file mode 100644 index 0000000000000000000000000000000000000000..3721d2ecdb834451d55e84f6355a98bc936d79ff GIT binary patch literal 32768 zcmeI*IZgvn5QJgdYzBj|%{J^?3K0?afP{=2ki!)aCm`hndJ^5>3!zoSm5lj@W@tatot zU(x$jbxrkeud5sCrn;qic7gx`2q1s}0tg_000IagfB*srAbc0vRV1+l4q}bd;56PSN@59kYb_}e=xTfJXuR>Zy-eMkg>oC3Z9j3AIxz<1vf1ab=awnu_M iP66L@N)X5?;2VAk0yzchd$n^;&(ygEe49Hd5%>hfhAAlk literal 0 HcmV?d00001 diff --git a/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-wal b/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-wal new file mode 100644 index 0000000000000000000000000000000000000000..eea540a7b95cbb6e1a17f3865d835c461550c85e GIT binary patch literal 144232 zcmeI5eQYG>UB~zR#oqPaYc`wonhD{kC+EiZlH*d2BwqW*m*v*$yY-r!3(7Fw9p7!d zZ`m0;_BEuq7hO0+dqk9=76=7JiiDzyq7wBaIIgHcg(wOy6;dRi1wpH-1qG3+M*<4Z zGtWG`&&=$)zVJ`H_Z@k6c7D(A`906`d0y7@Xy^Ak_Q}Ynd>{Rn+kL(rK6)Gp{LUwC z|K>NwPCpZ#K05IyK_c>t^8Ba%eeQoAcqKb`CSNsmX{p(+Yo>IB zdF&%h&LMt5&Xzsly4S~m@4lzcCQ~lq-~!lS)xKUChm7iwn{zWnrwNu&UKobZIg_J86q#H4;f5%lLn>+NkJj)ykTx z8HR2eYDKT6?XyU^IEl`Rx2z= zq@Q`NCK%@;c`g)5k7xXKw`pU&zSyiW!{L60nUY|Z3FM1;e>9!R_%9h&e=sZTJ>cjA zCbQ{RD^f{0Tk=vbw{?wiluEf7Wv-N+IV}}tOHyGzpO;Q3)7klaNt&837L`It?WnLW zTWM9^f97r4`t5|MU8|U=$a+8c4D>o}^zK}aJuZ;2{$0WNmhvWmn zNc!Q7-)D^x_c0$Wf%#FXiN0Kd1V_xYXu0_FPkj}7nm zqn}PvA=^rzpKyQx2!H?xfB*=900@8p2!H?xfB*;#CIPxf(AgJQ%6$L3FW#6e4pAZY z1t=wm19W~b@YVmST>a%YgMWd2fes&h009sH0T2KI5C8!X z009sH0T2LzK_(EPjpM%;_`Uyr@y*qp_g1hkFvwkwwt)Z$fB*=900@8p2!H?xfB*=9 zK!-q-e&YKApUvL*%Wqr`ejfV*9X|K~0w4eaAOHd&00JNY0w4eaAOHe`On}BEi1B@a zAAa?VSJqQM{49+};7`5lBY!wR00ck)1V8`;KmY_l00ck)1V8`;29H265VT?y`f1bn zzCf%Ho%_!JW;1hV@+2stwA5_ZHB&l5LK6f6z6nVpsx*GV2zl{?{9fhy4PP=g?MuFqdM))r@Bz+*`*WJamwZ<|tpXg_vst9K3rMxiY3V&|bnuWQpR@mimKa*TdFv~^q zGmplsNadH~R-|%fiM_{ddn{T!lWoUH<-D!E0)< z>biRP{_`&7y4GISO|`7GTnXB$R;_7^HC;7UTCLi;V9acqTFoo2YinL%OK+F;1}T69 zJ3ptZht3OjTxq0o{S%LgEwsj1u251=Dn-_2arAa0y?0{FGo|U-qLMpVkWMKJ(kQ1Y zlQO3j5_o<}nUgG`F+78e^QVisnQU=^)|fScDy(X?6^~^AjBAsJ4g>G(nA?%vKc0L|!o?S-zndfSPaW0bQLXq@%#$WF&HMVIM zyq;Zz`x$0Rf>}G{{l&cBS`{uCR(~)n>^%CS48%aqyTXGHmie9eP zwVKfxU4>bqGM~>&Q;#T9r^xq=o&L~+(m|PxH{MOXN+M~EhFg8&GC00@8p2!H?xfB*=900@Ay40rDdO93TJ!AOHd&00JNY0w4eaAOHd&00M(ez;ZFje=o3DZ#+D4?>A4w z@8Do}I$8$;AOHd&00JNY0w4eaAOHd&00K6FVS48K0w@3aYft{%m0!AqeF2OQ5C8!X z009sH0T2KI5C8!X009uVB?u&FEBU^_+mHOC`lX+_+r+-WEz$RABnW^22!H?xfB*=9 z00@8p2!H?xSgr*4zQFe*!6)ynJ@^jx1u!r`00ck)1V8`;KmY_l00ck)1VG@HAV6ad zj_`c}-+%o4!(Xqb*03*dOY}V&2?8Jh0w4eaAOHd&00JNY0w4eaeu{wa3v9giz#kqT zj_#W~ldqb(wA5_ZHB&l5LK6f6z6nVpsv+{)Ltgx#`wCRX56d3E=yMMF?tA)dGIhhZ zJG9T2d?WQ*>V@Qmnq^iXF0CgtTi5RpOa@*M_Ic z7h5CI^!T{{*;F^Cxmc73N6Hb?*ps{+!y9uLy`l>{x~-Rjm05@--i>}P2)mKb1>w!d z5$6?k#1XfIRvd9hXk)HBCOMiwf{kZ)MAGx)exEh+j3;X3%Usn~OcE%$qOYkepD-iU z!yeJjZ0hDru+Zh=xZ0s6U2()&JA^pmOfb@Wtc#0CXNVi()L%+O)5kOZjj(fu8LG&g zm67aMhNvo(B8ugi;Yj*G#;?1Jp@HL>`9weSR&05`gc+msQeGHxg+I5surSy5VqgsS zGs)G2+H#Tn%%d?YQu*b0cdi@EeB8EI%1-7Lwxk-{aVqn+_MNHfict~fpw)VH(JTwH z?hJM*cW1FHL0i?THEpq`tHw&JRa+N~nN3rxdBt^Y%`0r_?Xuo5RT}L4oUR@^FW7OV z9ln2)#A9L$tudA>l$4W7ku4D7=#@&j8D*}NojL6a;66QDRB|T^(kW#@8s$_S>y9O~ z)*TWwJ-%5SbbAD|MZg$(DjH4i-|yczZjC~j>sl411+v%=m3jy_;009sH0T2KI5C8!X009sH0T38C0--?A=MRu(rD)UmzQEhZetgfBFHIb`oDS?5 zxJIKvAOHd&00JNY0w4eaAOHd&00JNY0!{+f-9O(K`1==s^%vgU@s<7XFW`iPD-Zwy z5C8!X009sH0T2KI5C8!X7*GN`X}kEoK>FI1+H=0Aj$vP5Kzkd_0s#;J0T2KI5C8!X z009sH0T2KICxKn`lJ5(A_%|QEp?u|A&tYG{2?$urDy6y^Usp00@8p2!H?xfB*=900@8p2!Mc-0F61ghwlsA z`OX8s_T_s%vJd+LPDr={0T2KI5C8!X009sH0T2KI5CDMzCE%y+;`;)>@$vHX-#q`& zzs{Y>laR;KQnOvxOzDVkhc6KDO-K?^4U^YC^5O^GR|xS7a<=RV*S$W5efK?mmKyS> z-lgfcr`}CoNZysWmY9fNP3(?87Ei@qicN=Kj|HRG!zV&7hxP@(9sG7LQ;>lG2)sW6 zNB2h3xqJFWIbW&i*d0yJjQcm_PLN#|BU+4z-i`L_N!#NIzw1#E zLJiYsyqriiiD&Lz8=fj(Y>h_$Epgf|~YoLAHlN8A!xal{>=jk)fao z(avn@=1j2A<>I*7p(b5%#92Fp$;|{Ky~nyCD0YUpAx{0JL^ORok zYXaN(c&vGL8R=)9s|m)rNS+Ht(&HI_y|dKVrdjZMb`kDpm?;Tn?U45u^L}epxMW!U z!K|?NfTItXY&)(B4Xv*CS`lm{CFN|%HT)}jxmwq1MrU*tW{JvtJ}*r@qD-A4-!pdl zLk~&^Wj5Y;H}xusq%|6PtyhwTRW;XJJ*!x5R>@jWUT&(@ij~{(CC`f-lkCnUQ!%*N zRAhaq9%kpzfxs4&4>9E+yN^hZkaLWF=KBIa@%ZnLe(tvCRT`(zpL)kf{&0W*2!H?x zfB*=900@8p2!H?xfB*;#9Dz_E==0P4fxWb8d|%)Pv*ADZ=dswmbYFn{NB{>2fB*=9 z00@8p2!H?xfB*=900@AR4-hzLD!R~al4g^2|1V8`;KmY_l z00ck)1V8`;YyxR|=KBJBkN)_0{e2n-$p_!rRI zMzz^6;9o!&9}I_o0sg2Vd(zK(cvg52xchXnAnZPwEC_G@IbZfPbhYAm&X+yCJ4&RE zM-REK*S|pOhVK&m3$!YlsaF_0_!oeGfufSlvkBCl!7euf-C68PfPVq_7pUoEIZ-te z{snZpuo4nId{;;(l@ z)c>b{0n4udx!1X!-tX{zf%o2f{?*HG{d$2uN011GeW^c9T^RY#k%i=6MP7+~G7=A8 z3`awsAPG1?00ah`fO$tWoxR7uv9sd?NANxk!eE;T!mzzetX$a-tv9MbTy&kLV8a{3` z8>&TplvROh^6Js*6CZiawyD;ei&f$>?@6sj?rpZ-_@2345QI|nHB)cUjn?;DkjKC# zWwpzP58iZ3h}S=o%Vo7oyP_NAcC}^FCg=@ebcqSe%{~c_YeGWTXtP~CN4~Sp=?&c= zu*@oTkj7mBvVqc7=JRVSJud3{{4u_k*> zxU{@x$g1+AY=|me1GRF6bVa2_ZhFXFDD@u4yut}1UsT}+&b3?RAC=Fc!!y#_Y$2~( zjc4!j*VkILW?OIfsWeaaX4Dp&EB%^krSUApEBp#uA-znjG?{pyg_CHV{*S}9&;?h! zqfS@79~dXm9}JJdPLX4E8M_S_c9k00JNY0w4eaAOHd& z00JNY0ycp==$Y>eTs?5#AI}wz{TTKIFg`#41V8`;KmY_l00ck)1V8`;K;V`ja3^gg z-xpB6@K52TE3fRpzQ8Tf_h=*tfB*=900@8p2!H?xfB*=900?}DBH;T1KlxLy{@eGe zZ~rXz1u!r`00ck)1V8`;KmY_l00ck)1VG@HAh3_Lav$Fp7{2o5`A>iBJ0G;spK=STt|e literal 0 HcmV?d00001 diff --git a/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite b/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..0b58f0dca7d6ce95907cb0b322dc4802f04688c0 GIT binary patch literal 4096 zcmWFz^vNtqRY=P(%1ta$FlG>7U}9o$P*7lCU|@t|AVoG{WYDXN;00+HAlr;ljiVtj n8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*O6ovo**AfQj literal 0 HcmV?d00001 diff --git a/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite-shm b/internal/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite-shm new file mode 100644 index 0000000000000000000000000000000000000000..52700bd09d4c73843906863553342fae94c0e136 GIT binary patch literal 32768 zcmeI*O)kSx7{%e%H}z4pw5a}lju=UdnV5)?kuVY~unMshW)@&EmSMs-<=Ssc4J3o6kxIgLLoW5Tk9Y4Na-CjIDbzeHaJg=`B zWJ8Z1Kl1%{&F?$@{SkFk9aG2E33XDPQm54!wW0c~pJ&xMbzWUio9d!E{Cr8Tm(`Zq zR#((jwWAKtboF{oU03~Idg`XyS1XkufB*srAb3iPm7}L&|B+{09MV@*pV!-nT+x2@Ln?#F{F{G~P5sVhMO_6G;*9jx7>P zpo%q4#%a7=j>Hn!iZxEgX}njF#1hz!HBQE9yit_I64;3~PR41xOP9nF*o`$##%a7o zn#2;=i#1NhX}mw3#1innev%^4&|LxT!EXPQ)0{ydl7R2XAPAHx;M-vc0%Z#LUL=A* znF78Mj37{^fbYN~2$U({+aC!6WeWJdQ-VO50>0^&AW)`&@3NvbF25ts9bH>#{#GY)O`8Ef$~yGPpyr zG(UiL_uk!mKSZ7&2S`ve--GG9d*AnYo_jy<`@DDe`t>~rJ|2BIbjSC1g+hZNx~1>S zzB{+?uHSqyBkvhHayDZmao+UL^)$aW{^b9hpF5G)bd@aC8_SAL_8HRQaOf~0M%l3O zOHvto>ofMf?8Mlc`aI{I4#S~4A6ytny&X!#PlfvLPCb`;D*1ZyQZk-+CLzXOH4?Fb z00@8p2!H?xfIv49xRi+TyLWS!59!KcRh3Ii@{A~Dr?OJkdz_psidjh{Qg$LQ5^oO4 zB#6D8Q`bnYAc;rCA~{~n&18!w$+UQK)G2UYskT%yk)NHglY~k<#ve*^mm^xOtX|N{ z7i6VWs<&!-Q!cAjRaeWhqO+ugeG$3Bl(;|w3X|CaVFDRfhfwJYv{b6s^hUi}RU1q& z9&E`i?6c%13k$I*Kc41R+pAPn7S$@V(HCsPEay{irU_0pxt7rqG&Z)bBslUjj^IWJ1kK+-5cfir@6Iu6H2+< zP@7Fwok*|+yQI&6ohDq7xEP;Kb5C3}yExOv_F-GA)wNlw-L17Uk;Db5y*t~ajowM5 z+>AIUWoM3)!mLCJ^Z7iP5|3o(^AeeyFBZjuBs)bcUudnAX*0b)VYg4amMyil*7Bm- za9ogGjg{F{S6gbWq_$-{7rVzQ$WWT~W;)1xA$MY4v`VbB7OPrGrta=9GRLJDmJ|d_mbJQH(#rGoyteDK}+1l7TR0#NH#ww603rKkz9df_Kc3R3L2Sa-4(sw*t9xVqFozp?=#n=&6Q}^ zX7_J8)(2mmRGjzJ)hpsCObBB=QGPPbg-o|?u2zj#bXjTXb@SXE#`iLh>KzMjN?@D` z^cc5U`aVxL`kwF17fk)v?X~9NH@-7K6RoKPdV~!GKmY_l00ck)1V8`;KmY_l00cmw zI|*X{vQDL z{|j!v_2^q)IgxuB^8(%6pidi z%a8sM^8yYZ9Do1_fB*=900@8p2!H?xfB*=9KsOVJ(#o-Uf!`i^GMxwUZ9(s9CZT$5C8!X009sH0T2KI5C8!X z00D~OZj$rU z14bg3dM{-B!3F{#00JNY0w4eaAOHd&00JNY0wBJoq8_yRPy!YrDQzuOhSym8b1}AjNjIGHTrV2ZWO=<0w4eaAn?Hx&D8MK2waNf2Yx%0vbWl4Ob^LdF(&KHYfL6V&!mT+sO{QiaAW^*lDYHb|m1+z`A0009sH0T2KI5C8!X009sH0rM*YY+j&n z<!f1V_DJ3 zJ|i?iI2<}mh*7lH`0X-&Y@_Q9Xd2s0ee5J)-y6E~!G(d;+aW&uVyOS_)N`q)lCLK( zCF6-_5@P(-_^H@r{Ine;U{*-BWBammKG=#UGB7KhVUdCF-4W&YrMZ?9 zd6<=ut$4BQbZ4-7JFu(~bea{;SV_Xe%R^?^;YaT6OIx~gJ8s?-M&$HSq50%$BuT4ZQGQrem?TNhZ>HnV8p|-B($@GZONY|bmuh&#kG5(-om2s5M_Swd&CAVC1U*U-Q49vX2+0AOY)2;Wv8-I)_a`v zbq#Nh-8YD_(SYW;f+QXli{yAQHVYBnI6A5}sh){{IzZbIT z)gE7ENr|@p&vHD4eIe{=ByU|U8;vJa`T{MLoS5iLUp&~7Ti9pGO%@hnQGPtltvbU8 z(<+#R2z;97G@<?AhpPj(L~7mmT$v zg*PQI&IEdl7pp$|009sH0T2KI5C8!X009sH zfi5Pnoz{)b3w-l;9{qar7v_5~FVMv;j;etG2!H?xfB*=900@8p2!H?xfPhP22R&r- z0!Ke}@YHu#ZhZsu0xlpNfdB}A00@8p2!H?xfB*=900@9U7Zcb?>&E5gT#(4|Vs0i|JV~a-lcQ!vsi7*mT9yq2Qf@|^ld?0% zNnutZh53A*Oo>Ob^LdF(&KHYfL6V&!mT+sO{QiZ@YE@lt*-~3kYUQeGT`;>8E2rMj z&SHX_mCh?kTy#l(=AYj*f-N zY=JO=jH^R9y)(-1PjhSShU`XQLVU0l)?8i_cA9YSjwrt`&9&O~TOEU`-x;jl?m=Gd zR+8}W@=%OVr@2S(Fx`jgV*9N=7hcgoH4*J<3m}gE~9|Ebn7SJI1zcN?gBx138)%(18+v8SMwYC` z_gG48Xvy^?k4VOLrbmQij2{`{t_+*b{C5gVr`^mQTv%UQ>F9@gL#Z`a>J5F}oiHKnABgg~kswF( zG>g2~bFyWB|7OL;Jz@iw5;1=FZtn6Svt!7mC3!}avQt?p>pf2Tx`sE$?i=*c<(#@k zas^2|Di+%xUuXg24&748M1FR{e4-&lsKnRb3zbr--l`e*R`))}k`itGpXGQ8Q^#h> zKCkWzv}8Oo=#6@{sy3KlJlK+3*k{R278YVremu>sI>QIkDp!?7waU)=f^C@PeCo{< zVKr)uhsKkGJ03D+@?y1KIwu5i!*I(AGWnx&*Cig zdK~gs0v;kcmxQKG4?w$xflZOe9E9rjp58AItC%X|alN^7yIm1OGf{z7dJ<8Ddz zK+am-FKJo!^}M$0v6yT9DnT714bH`dN*YJ z!3F{#00JNY0w4eaAOHd&00JNY0wB;t_QEW@yLno!TE8T^}~U_Zd_x(tuy(2N#c^?`rQq zBOXDMeG`~=Z}y#d_Az_JBe;f$Anxb2-EbmSuiwBSI()3ahpYeDEskcKl#3S&87+@c$Mmz$N zAOHd&00JNY0w4eaAOHd&00JP;Z3J$i6=LfJ{@~|s`1ZlizxsLbw)llk%)TdIJ zEq}e`SpUEG|6%|6elhujf_zhF15LHr)ao=QyJg_!AOmhej{!GP_&}B)-hM-r z|6mrdty?^4Ro?o0*caU0=AKE=e}H6b`;HfLGuh%vGA*8T)?+BTT9y^vT^ZFL5PWjJ zSQHDAEahgzIVn4H+!AiBl;6Lw+s3YCOKtG#k>C|q8?56hvC6X;@xDb|AMV1&3LoPS zrMb%yXR#|=*XLPs!IE~hb!nF4DePMt=rR>f?~L;M)7)CSAsZFb#RpqqP(SqLhhy6} zik{A{T;9iyc8qP?l)zpluzhUnrUb^Bz_y2%hhls>%{_XDPUFe5bhh8>^F5Y&J+9qi zjV_4`k~0znJEqT>g8lQFS48TAF+MxOT^@2euxCx2r~i9WhuV6wC(|QBUB;7>@pw%) zE= 0), + chain_id TEXT NOT NULL, + channel TEXT NOT NULL, + asset_type TEXT NOT NULL, + coingecko_id TEXT, + UNIQUE(chain_id, symbol) +); + +CREATE INDEX idx_assets_symbol ON assets(symbol); +CREATE INDEX idx_assets_chain_id ON assets(chain_id); +CREATE INDEX idx_assets_deleted_at ON assets(deleted_at); + + diff --git a/internal/migrations/006_prices_table.down.sql b/internal/migrations/006_prices_table.down.sql new file mode 100644 index 0000000..3bc3a94 --- /dev/null +++ b/internal/migrations/006_prices_table.down.sql @@ -0,0 +1 @@ +DROP TABLE prices; diff --git a/internal/migrations/006_prices_table.up.sql b/internal/migrations/006_prices_table.up.sql new file mode 100644 index 0000000..9333756 --- /dev/null +++ b/internal/migrations/006_prices_table.up.sql @@ -0,0 +1,28 @@ +-- Prices entity based on the Alternative.me API for crypto prices +CREATE TABLE prices ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + asset_id TEXT NOT NULL, + price_usd REAL, + price_btc REAL, + volume_24h_usd REAL, + market_cap_usd REAL, + available_supply REAL, + total_supply REAL, + max_supply REAL, + percent_change_1h REAL, + percent_change_24h REAL, + percent_change_7d REAL, + rank INTEGER, + last_updated TIMESTAMP NOT NULL, + FOREIGN KEY (asset_id) REFERENCES assets(id) +); + +CREATE INDEX idx_prices_asset_id ON prices(asset_id); +CREATE INDEX idx_prices_rank ON prices(rank); +CREATE INDEX idx_prices_last_updated ON prices(last_updated); +CREATE INDEX idx_prices_deleted_at ON prices(deleted_at); + + diff --git a/internal/migrations/007_price_conversions_table.down.sql b/internal/migrations/007_price_conversions_table.down.sql new file mode 100644 index 0000000..926f7d5 --- /dev/null +++ b/internal/migrations/007_price_conversions_table.down.sql @@ -0,0 +1 @@ +DROP TABLE price_conversions; diff --git a/internal/migrations/007_price_conversions_table.up.sql b/internal/migrations/007_price_conversions_table.up.sql new file mode 100644 index 0000000..3310f1b --- /dev/null +++ b/internal/migrations/007_price_conversions_table.up.sql @@ -0,0 +1,21 @@ +-- Currency conversion rates for crypto prices +CREATE TABLE price_conversions ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + price_id TEXT NOT NULL, + currency_code TEXT NOT NULL, + price REAL, + volume_24h REAL, + market_cap REAL, + last_updated TIMESTAMP NOT NULL, + FOREIGN KEY (price_id) REFERENCES prices(id), + UNIQUE(price_id, currency_code) +); + +CREATE INDEX idx_price_conversions_price_id ON price_conversions(price_id); +CREATE INDEX idx_price_conversions_currency_code ON price_conversions(currency_code); +CREATE INDEX idx_price_conversions_deleted_at ON price_conversions(deleted_at); + + diff --git a/internal/migrations/008_blockchains_table.down.sql b/internal/migrations/008_blockchains_table.down.sql new file mode 100644 index 0000000..922ed95 --- /dev/null +++ b/internal/migrations/008_blockchains_table.down.sql @@ -0,0 +1 @@ +DROP TABLE blockchains; diff --git a/internal/migrations/008_blockchains_table.up.sql b/internal/migrations/008_blockchains_table.up.sql new file mode 100644 index 0000000..0e58a5c --- /dev/null +++ b/internal/migrations/008_blockchains_table.up.sql @@ -0,0 +1,71 @@ +-- Blockchains table to store chain configuration parameters +CREATE TABLE blockchains ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + + -- Basic chain information + chain_name TEXT NOT NULL, + chain_id_cosmos TEXT, + chain_id_evm TEXT, + api_name TEXT, + bech_account_prefix TEXT, + bech_validator_prefix TEXT, + + -- Chain assets + main_asset_symbol TEXT, + main_asset_denom TEXT, + staking_asset_symbol TEXT, + staking_asset_denom TEXT, + is_stake_enabled BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_stake_enabled IN (0,1)), + + -- Chain images + chain_image TEXT, + main_asset_image TEXT, + staking_asset_image TEXT, + + -- Chain types and features + chain_type TEXT NOT NULL CHECK(json_valid(chain_type)), + is_support_mobile_wallet BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_mobile_wallet IN (0,1)), + is_support_extension_wallet BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_extension_wallet IN (0,1)), + is_support_erc20 BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_erc20 IN (0,1)), + + -- Descriptions in multiple languages + description_en TEXT, + description_ko TEXT, + description_ja TEXT, + + -- Genesis information + origin_genesis_time TIMESTAMP, + + -- Account types configuration + account_type TEXT NOT NULL CHECK(json_valid(account_type)), + + -- BTC staking specific + btc_staking TEXT CHECK(json_valid(btc_staking)), + + -- Cosmos fee information + cosmos_fee_info TEXT CHECK(json_valid(cosmos_fee_info)), + + -- EVM fee information + evm_fee_info TEXT CHECK(json_valid(evm_fee_info)), + + -- Endpoints + lcd_endpoint TEXT CHECK(json_valid(lcd_endpoint)), + grpc_endpoint TEXT CHECK(json_valid(grpc_endpoint)), + evm_rpc_endpoint TEXT CHECK(json_valid(evm_rpc_endpoint)), + + -- Explorer information + explorer TEXT CHECK(json_valid(explorer)), + + -- Social and documentation links + about TEXT CHECK(json_valid(about)), + forum TEXT CHECK(json_valid(forum)) +); + +CREATE INDEX idx_blockchains_chain_name ON blockchains(chain_name); +CREATE INDEX idx_blockchains_chain_id_cosmos ON blockchains(chain_id_cosmos); +CREATE INDEX idx_blockchains_chain_id_evm ON blockchains(chain_id_evm); +CREATE INDEX idx_blockchains_main_asset_symbol ON blockchains(main_asset_symbol); +CREATE INDEX idx_blockchains_deleted_at ON blockchains(deleted_at); diff --git a/internal/migrations/009_services_table.down.sql b/internal/migrations/009_services_table.down.sql new file mode 100644 index 0000000..4dd8073 --- /dev/null +++ b/internal/migrations/009_services_table.down.sql @@ -0,0 +1 @@ +DROP TABLE services; diff --git a/internal/migrations/009_services_table.up.sql b/internal/migrations/009_services_table.up.sql new file mode 100644 index 0000000..459f653 --- /dev/null +++ b/internal/migrations/009_services_table.up.sql @@ -0,0 +1,24 @@ +-- Service for Service Records sourced on chain +CREATE TABLE services ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + name TEXT NOT NULL, + description TEXT, + chain_id TEXT NOT NULL, + address TEXT NOT NULL, + owner_address TEXT NOT NULL, + metadata TEXT CHECK(json_valid(metadata)), + status TEXT NOT NULL, + block_height INTEGER NOT NULL, + FOREIGN KEY (chain_id) REFERENCES assets(chain_id), + UNIQUE(chain_id, address) +); + +CREATE INDEX idx_services_name ON services(name); +CREATE INDEX idx_services_chain_id ON services(chain_id); +CREATE INDEX idx_services_address ON services(address); +CREATE INDEX idx_services_owner_address ON services(owner_address); +CREATE INDEX idx_services_status ON services(status); +CREATE INDEX idx_services_deleted_at ON services(deleted_at); diff --git a/internal/migrations/010_activities_table.down.sql b/internal/migrations/010_activities_table.down.sql new file mode 100644 index 0000000..c0537ec --- /dev/null +++ b/internal/migrations/010_activities_table.down.sql @@ -0,0 +1 @@ +DROP TABLE activities; diff --git a/internal/migrations/010_activities_table.up.sql b/internal/migrations/010_activities_table.up.sql new file mode 100644 index 0000000..50d5757 --- /dev/null +++ b/internal/migrations/010_activities_table.up.sql @@ -0,0 +1,32 @@ + +-- Activity table for basic transaction broadcast activity +CREATE TABLE activities ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + account_id TEXT NOT NULL, + tx_hash TEXT, + tx_type TEXT NOT NULL, + status TEXT NOT NULL, + amount TEXT, + fee TEXT, + gas_used INTEGER, + gas_wanted INTEGER, + memo TEXT, + block_height INTEGER, + timestamp TIMESTAMP NOT NULL, + raw_log TEXT, + error TEXT, + FOREIGN KEY (account_id) REFERENCES accounts(id) +); + +CREATE INDEX idx_activities_account_id ON activities(account_id); +CREATE INDEX idx_activities_tx_hash ON activities(tx_hash); +CREATE INDEX idx_activities_tx_type ON activities(tx_type); +CREATE INDEX idx_activities_status ON activities(status); +CREATE INDEX idx_activities_timestamp ON activities(timestamp); +CREATE INDEX idx_activities_block_height ON activities(block_height); +CREATE INDEX idx_activities_deleted_at ON activities(deleted_at); + + diff --git a/internal/migrations/011_health_table.down.sql b/internal/migrations/011_health_table.down.sql new file mode 100644 index 0000000..7f234e7 --- /dev/null +++ b/internal/migrations/011_health_table.down.sql @@ -0,0 +1 @@ +DROP TABLE health; diff --git a/internal/migrations/011_health_table.up.sql b/internal/migrations/011_health_table.up.sql new file mode 100644 index 0000000..47b30ce --- /dev/null +++ b/internal/migrations/011_health_table.up.sql @@ -0,0 +1,28 @@ +-- Health table for scheduled checks for API endpoints +CREATE TABLE health ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + endpoint_url TEXT NOT NULL, + endpoint_type TEXT NOT NULL, + chain_id TEXT, + status TEXT NOT NULL, + response_time_ms INTEGER, + last_checked TIMESTAMP NOT NULL, + next_check TIMESTAMP, + failure_count INTEGER NOT NULL DEFAULT 0, + success_count INTEGER NOT NULL DEFAULT 0, + response_data TEXT, + error_message TEXT, + FOREIGN KEY (chain_id) REFERENCES assets(chain_id) +); + +CREATE INDEX idx_health_endpoint_url ON health(endpoint_url); +CREATE INDEX idx_health_endpoint_type ON health(endpoint_type); +CREATE INDEX idx_health_chain_id ON health(chain_id); +CREATE INDEX idx_health_status ON health(status); +CREATE INDEX idx_health_last_checked ON health(last_checked); +CREATE INDEX idx_health_next_check ON health(next_check); +CREATE INDEX idx_health_deleted_at ON health(deleted_at); + diff --git a/internal/migrations/012_global_market_table.down.sql b/internal/migrations/012_global_market_table.down.sql new file mode 100644 index 0000000..04d1d4b --- /dev/null +++ b/internal/migrations/012_global_market_table.down.sql @@ -0,0 +1 @@ +DROP TABLE global_market; diff --git a/internal/migrations/012_global_market_table.up.sql b/internal/migrations/012_global_market_table.up.sql new file mode 100644 index 0000000..8216c4b --- /dev/null +++ b/internal/migrations/012_global_market_table.up.sql @@ -0,0 +1,19 @@ +-- Global market data from Alternative.me API +CREATE TABLE global_market ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + total_market_cap_usd REAL, + total_24h_volume_usd REAL, + bitcoin_percentage_of_market_cap REAL, + active_currencies INTEGER, + active_assets INTEGER, + active_markets INTEGER, + last_updated TIMESTAMP NOT NULL +); + +CREATE INDEX idx_global_market_last_updated ON global_market(last_updated); +CREATE INDEX idx_global_market_deleted_at ON global_market(deleted_at); + + diff --git a/internal/migrations/013_fear_greed_index_table.down.sql b/internal/migrations/013_fear_greed_index_table.down.sql new file mode 100644 index 0000000..0eedee7 --- /dev/null +++ b/internal/migrations/013_fear_greed_index_table.down.sql @@ -0,0 +1 @@ +DROP TABLE fear_greed_index; diff --git a/internal/migrations/013_fear_greed_index_table.up.sql b/internal/migrations/013_fear_greed_index_table.up.sql new file mode 100644 index 0000000..8db8eef --- /dev/null +++ b/internal/migrations/013_fear_greed_index_table.up.sql @@ -0,0 +1,15 @@ +-- Fear and Greed Index data from Alternative.me +CREATE TABLE fear_greed_index ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + value INTEGER NOT NULL, + value_classification TEXT NOT NULL, + timestamp TIMESTAMP NOT NULL, + time_until_update TEXT +); + +CREATE INDEX idx_fear_greed_index_timestamp ON fear_greed_index(timestamp); +CREATE INDEX idx_fear_greed_index_value ON fear_greed_index(value); +CREATE INDEX idx_fear_greed_index_deleted_at ON fear_greed_index(deleted_at); diff --git a/internal/migrations/014_crypto_listings_table.down.sql b/internal/migrations/014_crypto_listings_table.down.sql new file mode 100644 index 0000000..5dc3abc --- /dev/null +++ b/internal/migrations/014_crypto_listings_table.down.sql @@ -0,0 +1 @@ +DROP TABLE crypto_listings; diff --git a/internal/migrations/014_crypto_listings_table.up.sql b/internal/migrations/014_crypto_listings_table.up.sql new file mode 100644 index 0000000..f190d07 --- /dev/null +++ b/internal/migrations/014_crypto_listings_table.up.sql @@ -0,0 +1,18 @@ +-- Listings data from Alternative.me API +CREATE TABLE crypto_listings ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + api_id TEXT NOT NULL, + name TEXT NOT NULL, + symbol TEXT NOT NULL, + website_slug TEXT NOT NULL, + UNIQUE(api_id) +); + + +CREATE INDEX idx_crypto_listings_api_id ON crypto_listings(api_id); +CREATE INDEX idx_crypto_listings_symbol ON crypto_listings(symbol); +CREATE INDEX idx_crypto_listings_website_slug ON crypto_listings(website_slug); +CREATE INDEX idx_crypto_listings_deleted_at ON crypto_listings(deleted_at); diff --git a/internal/migrations/Taskfile.yml b/internal/migrations/Taskfile.yml new file mode 100644 index 0000000..979f707 --- /dev/null +++ b/internal/migrations/Taskfile.yml @@ -0,0 +1,200 @@ +# yaml-language-server: $schema=https://taskfile.dev/schema.json +version: "3" +silent: true + +tasks: + default: + cmds: + - task: migrate + + initialize: + cmds: + - task: migrate:accounts:up + - task: migrate:credentials:up + - task: migrate:profiles:up + - task: migrate:vaults:up + - task: migrate:assets:up + - task: migrate:prices:up + - task: migrate:price_conversions:up + - task: migrate:blockchains:up + - task: migrate:services:up + - task: migrate:activities:up + - task: migrate:health:up + - task: migrate:global_market:up + - task: migrate:fear_greed_index:up + - task: migrate:crypto_listings:up + + migrate: + cmds: + - task: migrate:accounts + - task: migrate:credentials + - task: migrate:profiles + - task: migrate:vaults + - task: migrate:assets + - task: migrate:prices + - task: migrate:price_conversions + - task: migrate:blockchains + - task: migrate:services + - task: migrate:activities + - task: migrate:health + - task: migrate:global_market + - task: migrate:fear_greed_index + - task: migrate:crypto_listings + + # --------------- + # Main Tasks + # --------------- + migrate:accounts: + cmds: + - task: migrate:accounts:down + - task: migrate:accounts:up + + migrate:accounts:up: + cmd: wrangler d1 execute USERS_DB --file 001_accounts_table.up.sql --remote -y + + migrate:accounts:down: + cmd: wrangler d1 execute USERS_DB --file 001_accounts_table.down.sql --remote -y + + migrate:credentials: + cmds: + - task: migrate:credentials:down + - task: migrate:credentials:up + + migrate:credentials:up: + cmd: wrangler d1 execute USERS_DB --file 002_credentials_table.up.sql --remote -y + + migrate:credentials:down: + cmd: wrangler d1 execute USERS_DB --file 002_credentials_table.down.sql --remote -y + + migrate:profiles: + cmds: + - task: migrate:profiles:down + - task: migrate:profiles:up + + migrate:profiles:up: + cmd: wrangler d1 execute USERS_DB --file 003_profiles_table.up.sql --remote -y + + migrate:profiles:down: + cmd: wrangler d1 execute USERS_DB --file 003_profiles_table.down.sql --remote -y + + migrate:vaults: + cmds: + - task: migrate:vaults:down + - task: migrate:vaults:up + + migrate:vaults:down: + cmd: wrangler d1 execute USERS_DB --file 004_vaults_table.down.sql --remote -y + + migrate:vaults:up: + cmd: wrangler d1 execute USERS_DB --file 004_vaults_table.up.sql --remote -y + + migrate:assets: + cmds: + - task: migrate:assets:down + - task: migrate:assets:up + + migrate:assets:up: + cmd: wrangler d1 execute NETWORK_DB --file 005_assets_table.up.sql --remote -y + + migrate:assets:down: + cmd: wrangler d1 execute NETWORK_DB --file 005_assets_table.down.sql --remote -y + + migrate:prices: + cmds: + - task: migrate:prices:down + - task: migrate:prices:up + + migrate:prices:up: + cmd: wrangler d1 execute NETWORK_DB --file 006_prices_table.up.sql --remote -y + + migrate:prices:down: + cmd: wrangler d1 execute NETWORK_DB --file 006_prices_table.down.sql --remote -y + + migrate:price_conversions: + cmds: + - task: migrate:price_conversions:down + - task: migrate:price_conversions:up + + migrate:price_conversions:up: + cmd: wrangler d1 execute NETWORK_DB --file 007_price_conversions_table.up.sql --remote -y + + migrate:price_conversions:down: + cmd: wrangler d1 execute NETWORK_DB --file 007_price_conversions_table.down.sql --remote -y + + migrate:blockchains: + cmds: + - task: migrate:blockchains:down + - task: migrate:blockchains:up + + migrate:blockchains:up: + cmd: wrangler d1 execute NETWORK_DB --file 008_blockchains_table.up.sql --remote -y + + migrate:blockchains:down: + cmd: wrangler d1 execute NETWORK_DB --file 008_blockchains_table.down.sql --remote -y + + migrate:services: + cmds: + - task: migrate:services:down + - task: migrate:services:up + + migrate:services:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 009_services_table.up.sql --remote -y + + migrate:services:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 009_services_table.down.sql --remote -y + + migrate:activities: + cmds: + - task: migrate:activities:down + - task: migrate:activities:up + + migrate:activities:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 010_activities_table.up.sql --remote -y + + migrate:activities:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 010_activities_table.down.sql --remote -y + + migrate:health: + cmds: + - task: migrate:health:down + - task: migrate:health:up + + migrate:health:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 011_health_table.up.sql --remote -y + + migrate:health:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 011_health_table.down.sql --remote -y + + migrate:global_market: + cmds: + - task: global_market:down + - task: global_market:up + + migrate:global_market:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 012_global_market_table.up.sql --remote -y + + migrate:global_market:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 012_global_market_table.down.sql --remote -y + + migrate:fear_greed_index: + cmds: + - task: migrate:fear_greed_index:down + - task: migrate:fear_greed_index:up + + migrate:fear_greed_index:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 013_fear_greed_index_table.up.sql --remote -y + + migrate:fear_greed_index:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 013_fear_greed_index_table.down.sql --remote -y + + migrate:crypto_listings: + cmds: + - task: migrate:crypto_listings:down + - task: migrate:crypto_listings:up + + migrate:crypto_listings:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 014_crypto_listings_table.up.sql --remote -y + + migrate:crypto_listings:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 014_crypto_listings_table.down.sql --remote -y + diff --git a/internal/migrations/node_modules/.cache/wrangler/wrangler-account.json b/internal/migrations/node_modules/.cache/wrangler/wrangler-account.json new file mode 100644 index 0000000..25faf23 --- /dev/null +++ b/internal/migrations/node_modules/.cache/wrangler/wrangler-account.json @@ -0,0 +1,6 @@ +{ + "account": { + "id": "eb37925850388bca807b7fab964c12bb", + "name": "Sonr" + } +} \ No newline at end of file diff --git a/internal/migrations/node_modules/.mf/cf.json b/internal/migrations/node_modules/.mf/cf.json new file mode 100644 index 0000000..d437f69 --- /dev/null +++ b/internal/migrations/node_modules/.mf/cf.json @@ -0,0 +1 @@ +{"clientTcpRtt":8,"requestHeaderNames":{},"httpProtocol":"HTTP/1.1","tlsCipher":"AEAD-AES256-GCM-SHA384","continent":"NA","asn":701,"clientAcceptEncoding":"br, gzip, deflate","verifiedBotCategory":"","country":"US","region":"Virginia","tlsClientCiphersSha1":"kXrN3VEKDdzz2cPKTQaKzpxVTxQ=","tlsClientAuth":{"certIssuerDNLegacy":"","certIssuerSKI":"","certSubjectDNRFC2253":"","certSubjectDNLegacy":"","certFingerprintSHA256":"","certNotBefore":"","certSKI":"","certSerial":"","certIssuerDN":"","certVerified":"NONE","certNotAfter":"","certSubjectDN":"","certPresented":"0","certRevoked":"0","certIssuerSerial":"","certIssuerDNRFC2253":"","certFingerprintSHA1":""},"tlsClientRandom":"KHkBe8nH4XNP9wnNS5nCDWBpe+Ha+8+BUuP0iev0P7Q=","tlsExportedAuthenticator":{"clientFinished":"c71857a631b6612f8bdfda376b597ddb0ccf62688fc7f50086006daba82f54c412501557ccfce73754bc550a1e09a6b9","clientHandshake":"8d0a2b64f7b6d0d1c2a77d7535feca90c9703a46c457b4951670146a8b5e2fe89357c6d8666c4e7f864e6814e7bb1d0f","serverHandshake":"429ef59250f50d719b076c2efdf97ecd5d1a50c15fdf979df5894d078793865ff44c7680213365147c44daedbc92bec6","serverFinished":"6e46d6694b01edbbc7d5daa9316565f17fb3a626713c96286d07487a7ddb7482aea03a84971fc74231d848d2f037af41"},"tlsClientHelloLength":"383","colo":"IAD","timezone":"America/New_York","longitude":"-77.53900","latitude":"39.01800","edgeRequestKeepAliveStatus":1,"requestPriority":"","postalCode":"20147","city":"Ashburn","tlsVersion":"TLSv1.3","regionCode":"VA","asOrganization":"Verizon Fios","metroCode":"511","tlsClientExtensionsSha1Le":"u4wtEMFQBY18l3BzHAvORm+KGRw=","tlsClientExtensionsSha1":"1eY97BUYYO8vDaTfHQywB1pcNdM=","botManagement":{"corporateProxy":false,"verifiedBot":false,"jsDetection":{"passed":false},"staticResource":false,"detectionIds":{},"score":99}} \ No newline at end of file diff --git a/internal/migrations/wrangler.toml b/internal/migrations/wrangler.toml new file mode 100644 index 0000000..b898e6a --- /dev/null +++ b/internal/migrations/wrangler.toml @@ -0,0 +1,64 @@ +# Top-level configuration +name = "motr-worker" +main = "worker.mjs" +compatibility_date = "2025-04-14" + +routes = [ + { pattern = "sonr.id", custom_domain = true }, +] + +[build] +command = "devbox run build:worker" + +[dev] +port = 6969 + +[observability] +enabled = true + +[triggers] +crons = ["0 */1 * * *"] + +[[d1_databases]] +binding = "ACTIVITY_DB" +database_name = "motr-activity" +database_id = "a7ccb4bb-c529-4f42-8029-92564a3aecb8" + +[[d1_databases]] +binding = "NETWORK_DB" +database_name = "motr-network" +database_id = "acb75499-3502-4052-9604-263a913e077a" + +[[d1_databases]] +binding = "USERS_DB" +database_name = "motr-users" +database_id = "8ed4d399-5932-419c-b92f-9c20d7a36ad2" + +[[kv_namespaces]] +binding = "SESSIONS_KV" +id = "ea5de66fcfc14b5eba170395e29432ee" + +[[kv_namespaces]] +binding = "HANDLES_KV" +id = "271d47087a8842b2aac5ee79cf7bb203" + +[[r2_buckets]] +binding = 'PROFILES' +bucket_name = 'profiles' + +[vars] +SONR_CHAIN_ID = 'sonr-testnet-1' +IPFS_GATEWAY = 'https://ipfs.sonr.land' +SONR_API_URL = 'https://api.sonr.land' +SONR_RPC_URL = 'https://rpc.sonr.land' +SONR_GRPC_URL = 'https://grpc.sonr.land' +MATRIX_SERVER = 'https://bm.chat' +MOTR_GATEWAY = 'https://sonr.id' +MOTR_VAULT = 'https://did.run' + +[durable_objects] +bindings = [{name = "VAULT", class_name = "Vault"}] + +[[migrations]] +tag = "v1" # Should be unique for each entry +new_classes = ["Vault"] # List the classes that should be created diff --git a/internal/ui/charts/area_chart.templ b/internal/ui/charts/area_chart.templ new file mode 100644 index 0000000..e981d57 --- /dev/null +++ b/internal/ui/charts/area_chart.templ @@ -0,0 +1,20 @@ +package charts + +import "fmt" + +type DateValue struct { + Date string + Value int +} + +templ AreaChart(data []DateValue) { + for _, d := range data { +
+
+ ${ d.Date } + ${ fmt.Sprintf("%d", d.Value) } +
+
+
+ } +} diff --git a/internal/ui/charts/area_chart_templ.go b/internal/ui/charts/area_chart_templ.go new file mode 100644 index 0000000..cfc2d1a --- /dev/null +++ b/internal/ui/charts/area_chart_templ.go @@ -0,0 +1,75 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package charts + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +import "fmt" + +type DateValue struct { + Date string + Value int +} + +func AreaChart(data []DateValue) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + for _, d := range data { + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "
$") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var2 string + templ_7745c5c3_Var2, templ_7745c5c3_Err = templ.JoinStringErrs(d.Date) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/charts/area_chart.templ`, Line: 14, Col: 61} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var2)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, " $") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var3 string + templ_7745c5c3_Var3, templ_7745c5c3_Err = templ.JoinStringErrs(fmt.Sprintf("%d", d.Value)) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/charts/area_chart.templ`, Line: 15, Col: 81} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var3)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 3, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + } + return nil + }) +} + +var _ = templruntime.GeneratedTemplate diff --git a/internal/ui/charts/bar_chart.templ b/internal/ui/charts/bar_chart.templ new file mode 100644 index 0000000..f38d582 --- /dev/null +++ b/internal/ui/charts/bar_chart.templ @@ -0,0 +1,21 @@ +package charts + +import "fmt" + +type KeyValue struct { + Key string + Value int + Color string +} + +templ BarChart(data []KeyValue) { + for _, d := range data { +
+
+ ${ d.Key } + ${ fmt.Sprintf("%d", d.Value) } +
+
+
+ } +} diff --git a/internal/ui/charts/bar_chart_templ.go b/internal/ui/charts/bar_chart_templ.go new file mode 100644 index 0000000..167d01f --- /dev/null +++ b/internal/ui/charts/bar_chart_templ.go @@ -0,0 +1,76 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package charts + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +import "fmt" + +type KeyValue struct { + Key string + Value int + Color string +} + +func BarChart(data []KeyValue) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + for _, d := range data { + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "
$") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var2 string + templ_7745c5c3_Var2, templ_7745c5c3_Err = templ.JoinStringErrs(d.Key) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/charts/bar_chart.templ`, Line: 15, Col: 60} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var2)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, " $") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var3 string + templ_7745c5c3_Var3, templ_7745c5c3_Err = templ.JoinStringErrs(fmt.Sprintf("%d", d.Value)) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/charts/bar_chart.templ`, Line: 16, Col: 81} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var3)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 3, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + } + return nil + }) +} + +var _ = templruntime.GeneratedTemplate diff --git a/internal/ui/charts/candle_chart.templ b/internal/ui/charts/candle_chart.templ new file mode 100644 index 0000000..aab20af --- /dev/null +++ b/internal/ui/charts/candle_chart.templ @@ -0,0 +1,106 @@ +package charts + +import ( + "encoding/json" + "time" +) + +type CandleData struct { + Open float64 + Close float64 + High float64 + Low float64 + Date time.Time +} + +// D3 script handle for deduplication +var d3Handle = templ.NewOnceHandle() + +// D3 component for loading D3.js +templ D3() { + @d3Handle.Once() { + + } +} + +// CandleChart component +templ CandleChart(data []CandleData) { + @D3() +
+
+
+ +} + +// formatDataForJS converts the Go data structure to a JavaScript-compatible JSON string +func formatDataForJS(data []CandleData) string { + type jsData struct { + Date string `json:"date"` + Open float64 `json:"open"` + Close float64 `json:"close"` + High float64 `json:"high"` + Low float64 `json:"low"` + } + + jsDataArray := make([]jsData, len(data)) + for i, d := range data { + jsDataArray[i] = jsData{ + Date: d.Date.Format(time.RFC3339), + Open: d.Open, + Close: d.Close, + High: d.High, + Low: d.Low, + } + } + + jsonBytes, err := json.Marshal(jsDataArray) + if err != nil { + return "[]" + } + return string(jsonBytes) +} diff --git a/internal/ui/charts/candle_chart_templ.go b/internal/ui/charts/candle_chart_templ.go new file mode 100644 index 0000000..24a029c --- /dev/null +++ b/internal/ui/charts/candle_chart_templ.go @@ -0,0 +1,137 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package charts + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +import ( + "encoding/json" + "time" +) + +type CandleData struct { + Open float64 + Close float64 + High float64 + Low float64 + Date time.Time +} + +// D3 script handle for deduplication +var d3Handle = templ.NewOnceHandle() + +// D3 component for loading D3.js +func D3() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Var2 := templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) + templ_7745c5c3_Err = d3Handle.Once().Render(templ.WithChildren(ctx, templ_7745c5c3_Var2), templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// CandleChart component +func CandleChart(data []CandleData) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var3 := templ.GetChildren(ctx) + if templ_7745c5c3_Var3 == nil { + templ_7745c5c3_Var3 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = D3().Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// formatDataForJS converts the Go data structure to a JavaScript-compatible JSON string +func formatDataForJS(data []CandleData) string { + type jsData struct { + Date string `json:"date"` + Open float64 `json:"open"` + Close float64 `json:"close"` + High float64 `json:"high"` + Low float64 `json:"low"` + } + + jsDataArray := make([]jsData, len(data)) + for i, d := range data { + jsDataArray[i] = jsData{ + Date: d.Date.Format(time.RFC3339), + Open: d.Open, + Close: d.Close, + High: d.High, + Low: d.Low, + } + } + + jsonBytes, err := json.Marshal(jsDataArray) + if err != nil { + return "[]" + } + return string(jsonBytes) +} + +var _ = templruntime.GeneratedTemplate diff --git a/internal/ui/charts/line_chart.templ b/internal/ui/charts/line_chart.templ new file mode 100644 index 0000000..d877cdd --- /dev/null +++ b/internal/ui/charts/line_chart.templ @@ -0,0 +1,15 @@ +package charts + +import "fmt" + +templ LineChart(data []DateValue) { + for _, d := range data { +
+
+ ${ d.Date } + ${ fmt.Sprintf("%d", d.Value) } +
+
+
+ } +} diff --git a/internal/ui/charts/line_chart_templ.go b/internal/ui/charts/line_chart_templ.go new file mode 100644 index 0000000..7a6e216 --- /dev/null +++ b/internal/ui/charts/line_chart_templ.go @@ -0,0 +1,70 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package charts + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +import "fmt" + +func LineChart(data []DateValue) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + for _, d := range data { + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "
$") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var2 string + templ_7745c5c3_Var2, templ_7745c5c3_Err = templ.JoinStringErrs(d.Date) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/charts/line_chart.templ`, Line: 9, Col: 61} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var2)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, " $") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var3 string + templ_7745c5c3_Var3, templ_7745c5c3_Err = templ.JoinStringErrs(fmt.Sprintf("%d", d.Value)) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/charts/line_chart.templ`, Line: 10, Col: 81} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var3)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 3, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + } + return nil + }) +} + +var _ = templruntime.GeneratedTemplate diff --git a/internal/ui/charts/pie_chart.templ b/internal/ui/charts/pie_chart.templ new file mode 100644 index 0000000..b3484f3 --- /dev/null +++ b/internal/ui/charts/pie_chart.templ @@ -0,0 +1,22 @@ +package charts + +import "fmt" + +type CategoryValue struct { + Category string + Value int + ColorFrom string + ColorTo string +} + +templ PieChart(data []CategoryValue) { + for _, d := range data { +
+
+ ${ d.Category } + ${ fmt.Sprintf("%d", d.Value) } +
+
+
+ } +} diff --git a/internal/ui/charts/pie_chart_templ.go b/internal/ui/charts/pie_chart_templ.go new file mode 100644 index 0000000..92169e3 --- /dev/null +++ b/internal/ui/charts/pie_chart_templ.go @@ -0,0 +1,77 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package charts + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +import "fmt" + +type CategoryValue struct { + Category string + Value int + ColorFrom string + ColorTo string +} + +func PieChart(data []CategoryValue) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + for _, d := range data { + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "
$") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var2 string + templ_7745c5c3_Var2, templ_7745c5c3_Err = templ.JoinStringErrs(d.Category) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/charts/pie_chart.templ`, Line: 16, Col: 65} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var2)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, " $") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var3 string + templ_7745c5c3_Var3, templ_7745c5c3_Err = templ.JoinStringErrs(fmt.Sprintf("%d", d.Value)) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/charts/pie_chart.templ`, Line: 17, Col: 81} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var3)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 3, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + } + return nil + }) +} + +var _ = templruntime.GeneratedTemplate diff --git a/internal/ui/layout.templ b/internal/ui/layout.templ new file mode 100644 index 0000000..b069a88 --- /dev/null +++ b/internal/ui/layout.templ @@ -0,0 +1,113 @@ +package ui + +// Body is a component that renders the body tag +templ Body() { + + { children... } + +} + +// Head is a component that renders the head of the document +templ Head() { + + + + @ApexCharts() + @Helia() + @Dexie() + @Htmx() + @Tailwind() + @Shoelace() + @DefaultStyles() + { children... } + +} + +// HTML is a component that renders the html tag +templ HTML() { + + + { children... } + +} + +// Columns is a component that renders a responsive flex container that stacks on mobile +templ Columns() { +
+ { children... } +
+} + +// Container is a component that renders a full screen container +templ Container() { +
+
+
+
+ { children... } +
+
+
+
+} + +// Tailwind css dependencies +templ Tailwind() { + @tailwindHandle.Once() { + + } +} + +// Nav is a component that renders the navigation bar +templ Nav() { + +} + +// NavCTA is a component that renders a call to action button +templ NavCTA(href string, text string) { + { text } +} + +// NavItem is a component that renders a navigation item +templ NavItem(href string, text string) { + { text } +} + +// NavLogo is a component that renders a logo +templ NavLogo(title string) { + + { children... } + { title } + +} + +// NavLeft is a component that renders the left side of the navigation bar +templ NavLeft() { +
+ { children... } +
+} + +templ NavRight() { +
+ { children... } +
+} + +// Rows is a component that renders a responsive flex container that wraps on mobile +templ Rows() { +
+ { children... } +
+} + +templ Separator(text string) { +
+
+
+ { text } +
+
+} diff --git a/internal/ui/layout_templ.go b/internal/ui/layout_templ.go new file mode 100644 index 0000000..756f56a --- /dev/null +++ b/internal/ui/layout_templ.go @@ -0,0 +1,633 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package ui + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +// Body is a component that renders the body tag +func Body() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var1.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// Head is a component that renders the head of the document +func Head() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var2 := templ.GetChildren(ctx) + if templ_7745c5c3_Var2 == nil { + templ_7745c5c3_Var2 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 3, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = ApexCharts().Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = Helia().Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = Dexie().Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = Htmx().Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = Tailwind().Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = Shoelace().Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = DefaultStyles().Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var2.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 4, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// HTML is a component that renders the html tag +func HTML() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var3 := templ.GetChildren(ctx) + if templ_7745c5c3_Var3 == nil { + templ_7745c5c3_Var3 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 5, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var3.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 6, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// Columns is a component that renders a responsive flex container that stacks on mobile +func Columns() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var4 := templ.GetChildren(ctx) + if templ_7745c5c3_Var4 == nil { + templ_7745c5c3_Var4 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 7, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var4.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 8, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// Container is a component that renders a full screen container +func Container() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var5 := templ.GetChildren(ctx) + if templ_7745c5c3_Var5 == nil { + templ_7745c5c3_Var5 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 9, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var5.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 10, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// Tailwind css dependencies +func Tailwind() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var6 := templ.GetChildren(ctx) + if templ_7745c5c3_Var6 == nil { + templ_7745c5c3_Var6 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Var7 := templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 11, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) + templ_7745c5c3_Err = tailwindHandle.Once().Render(templ.WithChildren(ctx, templ_7745c5c3_Var7), templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// Nav is a component that renders the navigation bar +func Nav() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var8 := templ.GetChildren(ctx) + if templ_7745c5c3_Var8 == nil { + templ_7745c5c3_Var8 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 12, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// NavCTA is a component that renders a call to action button +func NavCTA(href string, text string) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var9 := templ.GetChildren(ctx) + if templ_7745c5c3_Var9 == nil { + templ_7745c5c3_Var9 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 14, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var11 string + templ_7745c5c3_Var11, templ_7745c5c3_Err = templ.JoinStringErrs(text) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/layout.templ`, Line: 70, Col: 47} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var11)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 16, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// NavItem is a component that renders a navigation item +func NavItem(href string, text string) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var12 := templ.GetChildren(ctx) + if templ_7745c5c3_Var12 == nil { + templ_7745c5c3_Var12 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 17, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var14 string + templ_7745c5c3_Var14, templ_7745c5c3_Err = templ.JoinStringErrs(text) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/layout.templ`, Line: 75, Col: 44} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var14)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 19, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// NavLogo is a component that renders a logo +func NavLogo(title string) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var15 := templ.GetChildren(ctx) + if templ_7745c5c3_Var15 == nil { + templ_7745c5c3_Var15 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 20, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var15.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 21, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var16 string + templ_7745c5c3_Var16, templ_7745c5c3_Err = templ.JoinStringErrs(title) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/layout.templ`, Line: 82, Col: 48} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var16)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 22, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// NavLeft is a component that renders the left side of the navigation bar +func NavLeft() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var17 := templ.GetChildren(ctx) + if templ_7745c5c3_Var17 == nil { + templ_7745c5c3_Var17 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 23, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var17.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 24, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +func NavRight() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var18 := templ.GetChildren(ctx) + if templ_7745c5c3_Var18 == nil { + templ_7745c5c3_Var18 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 25, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var18.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 26, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// Rows is a component that renders a responsive flex container that wraps on mobile +func Rows() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var19 := templ.GetChildren(ctx) + if templ_7745c5c3_Var19 == nil { + templ_7745c5c3_Var19 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 27, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templ_7745c5c3_Var19.Render(ctx, templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 28, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +func Separator(text string) templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var20 := templ.GetChildren(ctx) + if templ_7745c5c3_Var20 == nil { + templ_7745c5c3_Var20 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 29, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + var templ_7745c5c3_Var21 string + templ_7745c5c3_Var21, templ_7745c5c3_Err = templ.JoinStringErrs(text) + if templ_7745c5c3_Err != nil { + return templ.Error{Err: templ_7745c5c3_Err, FileName: `internal/ui/layout.templ`, Line: 110, Col: 45} + } + _, templ_7745c5c3_Err = templ_7745c5c3_Buffer.WriteString(templ.EscapeString(templ_7745c5c3_Var21)) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 30, "
") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +var _ = templruntime.GeneratedTemplate diff --git a/internal/ui/providers.templ b/internal/ui/providers.templ new file mode 100644 index 0000000..76dd400 --- /dev/null +++ b/internal/ui/providers.templ @@ -0,0 +1,139 @@ +package ui + +import "fmt" + +var ( + apexChartsHandle = templ.NewOnceHandle() + d3Handle = templ.NewOnceHandle() + dexieHandle = templ.NewOnceHandle() + heliaHandle = templ.NewOnceHandle() + htmxHandle = templ.NewOnceHandle() + tailwindHandle = templ.NewOnceHandle() +) + +// ApexCharts is a component that renders the ApexCharts.js library +templ ApexCharts() { + @apexChartsHandle.Once() { + + } +} + +// d3 is a component that renders the D3.js library +templ D3() { + @d3Handle.Once() { + + } +} + +// dexie is a component that renders the Dexie.js library +templ Dexie() { + @dexieHandle.Once() { + + + } +} + +// In package deps +templ Helia() { + @heliaHandle.Once() { + + + + + + } +} + +// Htmx is a component that renders the Htmx.js library +templ Htmx() { + @htmxHandle.Once() { + + + + + + + } +} + +// jsDelivrURL returns the URL of a package on jsDelivr +func jsDelivrURL(pkg string, version string, path string) string { + return fmt.Sprintf("https://cdn.jsdelivr.net/npm/%s/%s/%s", pkg, version, path) +} diff --git a/internal/ui/providers_templ.go b/internal/ui/providers_templ.go new file mode 100644 index 0000000..556da3f --- /dev/null +++ b/internal/ui/providers_templ.go @@ -0,0 +1,371 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package ui + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +import "fmt" + +var ( + apexChartsHandle = templ.NewOnceHandle() + d3Handle = templ.NewOnceHandle() + dexieHandle = templ.NewOnceHandle() + heliaHandle = templ.NewOnceHandle() + htmxHandle = templ.NewOnceHandle() + tailwindHandle = templ.NewOnceHandle() +) + +// ApexCharts is a component that renders the ApexCharts.js library +func ApexCharts() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Var2 := templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) + templ_7745c5c3_Err = apexChartsHandle.Once().Render(templ.WithChildren(ctx, templ_7745c5c3_Var2), templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// d3 is a component that renders the D3.js library +func D3() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var3 := templ.GetChildren(ctx) + if templ_7745c5c3_Var3 == nil { + templ_7745c5c3_Var3 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Var4 := templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) + templ_7745c5c3_Err = d3Handle.Once().Render(templ.WithChildren(ctx, templ_7745c5c3_Var4), templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// dexie is a component that renders the Dexie.js library +func Dexie() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var5 := templ.GetChildren(ctx) + if templ_7745c5c3_Var5 == nil { + templ_7745c5c3_Var5 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Var6 := templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 3, " ") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) + templ_7745c5c3_Err = dexieHandle.Once().Render(templ.WithChildren(ctx, templ_7745c5c3_Var6), templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// In package deps +func Helia() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var9 := templ.GetChildren(ctx) + if templ_7745c5c3_Var9 == nil { + templ_7745c5c3_Var9 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Var10 := templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 6, " ") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) + templ_7745c5c3_Err = heliaHandle.Once().Render(templ.WithChildren(ctx, templ_7745c5c3_Var10), templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// Htmx is a component that renders the Htmx.js library +func Htmx() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var11 := templ.GetChildren(ctx) + if templ_7745c5c3_Var11 == nil { + templ_7745c5c3_Var11 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Var12 := templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 7, " ") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) + templ_7745c5c3_Err = htmxHandle.Once().Render(templ.WithChildren(ctx, templ_7745c5c3_Var12), templ_7745c5c3_Buffer) + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// jsDelivrURL returns the URL of a package on jsDelivr +func jsDelivrURL(pkg string, version string, path string) string { + return fmt.Sprintf("https://cdn.jsdelivr.net/npm/%s/%s/%s", pkg, version, path) +} + +var _ = templruntime.GeneratedTemplate diff --git a/internal/ui/styles.templ b/internal/ui/styles.templ new file mode 100644 index 0000000..4a27eb0 --- /dev/null +++ b/internal/ui/styles.templ @@ -0,0 +1,54 @@ +package ui + +templ DefaultStyles() { + +} + +// Shoelace dependencies +templ Shoelace() { + + + +} diff --git a/internal/ui/styles_templ.go b/internal/ui/styles_templ.go new file mode 100644 index 0000000..74ef1c9 --- /dev/null +++ b/internal/ui/styles_templ.go @@ -0,0 +1,70 @@ +// Code generated by templ - DO NOT EDIT. + +// templ: version: v0.3.857 +package ui + +//lint:file-ignore SA4006 This context is only used if a nested component is present. + +import "github.com/a-h/templ" +import templruntime "github.com/a-h/templ/runtime" + +func DefaultStyles() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var1 := templ.GetChildren(ctx) + if templ_7745c5c3_Var1 == nil { + templ_7745c5c3_Var1 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +// Shoelace dependencies +func Shoelace() templ.Component { + return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { + templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context + if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { + return templ_7745c5c3_CtxErr + } + templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) + if !templ_7745c5c3_IsBuffer { + defer func() { + templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) + if templ_7745c5c3_Err == nil { + templ_7745c5c3_Err = templ_7745c5c3_BufErr + } + }() + } + ctx = templ.InitializeContext(ctx) + templ_7745c5c3_Var2 := templ.GetChildren(ctx) + if templ_7745c5c3_Var2 == nil { + templ_7745c5c3_Var2 = templ.NopComponent + } + ctx = templ.ClearChildren(ctx) + templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 2, "") + if templ_7745c5c3_Err != nil { + return templ_7745c5c3_Err + } + return nil + }) +} + +var _ = templruntime.GeneratedTemplate diff --git a/main.go b/main.go new file mode 100644 index 0000000..cd0a8a0 --- /dev/null +++ b/main.go @@ -0,0 +1,72 @@ +//go:build js && wasm +// +build js,wasm + +package main + +import ( + "context" + "fmt" + "net/http" + + "github.com/labstack/echo/v4" + "github.com/sonr-io/motr/config" + "github.com/sonr-io/motr/middleware/database" + "github.com/sonr-io/motr/middleware/kvstore" + "github.com/sonr-io/motr/middleware/session" + "github.com/sonr-io/motr/middleware/webauthn" + "github.com/syumai/workers" + "github.com/syumai/workers/cloudflare/cron" + + _ "github.com/syumai/workers/cloudflare/d1" +) + +// ╭──────────────────────────────────────────────────╮ +// │ Initialization │ +// ╰──────────────────────────────────────────────────╯ + +// Setup the HTTP handler +func loadHandler() http.Handler { + e := echo.New() + e.Use( + session.Middleware(), + database.Middleware(), + kvstore.Middleware(), + webauthn.Middleware(), + ) + config.RegisterViews(e) + config.RegisterPartials(e) + return e +} + +// Setup the cron task +func loadTask() cron.Task { + return func(ctx context.Context) error { + e, err := cron.NewEvent(ctx) + if err != nil { + return err + } + fmt.Println(e.ScheduledTime.Unix()) + return nil + } +} + +// ╭─────────────────────────────────────────────────╮ +// │ Main Function │ +// ╰─────────────────────────────────────────────────╯ + +func main() { + // Setup CRON jobs + e := loadHandler() + t := loadTask() + + // Configure Worker + cron.ScheduleTaskNonBlock(t) + workers.ServeNonBlock(e) + workers.Ready() + + // Block until handler/task is done + select { + case <-workers.Done(): + case <-cron.Done(): + } +} diff --git a/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite b/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..0b58f0dca7d6ce95907cb0b322dc4802f04688c0 GIT binary patch literal 4096 zcmWFz^vNtqRY=P(%1ta$FlG>7U}9o$P*7lCU|@t|AVoG{WYDXN;00+HAlr;ljiVtj n8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*O6ovo**AfQj literal 0 HcmV?d00001 diff --git a/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite-shm b/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/9d05e031a8e5a9c48905f4b85a8de4ad246d43f6c9251fb531fb640a87ba6029.sqlite-shm new file mode 100644 index 0000000000000000000000000000000000000000..4b157f3d8cc051cdcd69b0170f0b98f69cba9ba3 GIT binary patch literal 32768 zcmeI5yHZnO6on50f`9@d5D6+b<$jlowLP>)*-$u|E@+y{A`4Kbz-` zN1oSL-DXRFuOH<8<;t%+zWy`ODY`_rcrJQGuXrK)M8EJ^KMshO;*}T_Lt(5KS>D-ii0(gJ^xm&!@z+m=Uw$;d^uX|D*UM=EZ_&z3=by=NHA2 zSQab7w%3Fm6AWMg0~o*n1~7mD3}65Q7{CAqFn|FJU;qOczyJm?fB_6(00S7n00uCC z0SsUO0~o*n1~7mD3}65Q7{CAqFn|FJU;qOczyJm?fB_6(00S7nKzRnfly}Q*x?}em zkcfc}bx4d?b82VUjvQ~}nXhfUc3GEpqX&r?aNkN9`BuhlK7qS*mko&+aMMs3`Ci6t zK7qS*>k^3=aK}~|aU&Ut7;t|Xi5PIZ9LZ(CZF?kQz`cqjmw~BV<22sxP9g?w02Wd;O~rIFjIVk=hCC~>NcK^4DY4akeh<*~z|;PrE(e_g1^n$o@;G`ucwwNhF?%^cDNR-v3nJy52AKW_n)l zxzw|fBw_;r5C8!X009sH0T2Lz$i)ON?ApaG9x~OlB~8iCC=+rneKeg*`}ccBvT`~n zi@EfXaar`|h^d6=Z0EEEaV(RQkI7l_M0RWf>zQ@t)QqTOG-GM7|R@$Pl;^`i<23VRY-YlNDp+y zg@Z}1(JaU?RkLBRI^FGs(1HO0G)X$o^Fo>oV_kFW)e9A^uBe4VT{B!EUG0Sg$_Hcx za-=iyxNv8ZTWFRTlEt@|<&+Faand9;78mYHat}4PpD(I=Ef%Esa$&EgqWGkiJ9r~aq*};nw}odi6hh5tenXyZV{^cjauQoSN3$+ zlT4enhi`Y6b%drDG$UWvYo=bU*rTDT3_BfQum)3v7L5h8u2tQ#W~yX(D$N_YPaZj* zdeEp=6mqufg%rz{B-#zLudBi^tI$C%GS=^TYqNgmyZ(Zn9Usv*Z~gJN`m99iO0f2@fdB}A00@8p2!H?x zfB*=900@8p2&_y3mXE;o3+(*LPo9416Mys3gp~-tfJFft2!H?xfB*=900@8p2!H?x zfB*=rWCHd(0POvOv&TR7^yAg<|9kiaR`Mao*gyaTKmY_l00ck)1V8`;KmY_lz$Fl~ z_LyJbGY7t*|Hk*;_&NLnE+6cG00@8p2!H?xfB*=900@8p2!Oy!CJ?s<$NU1XeR@xK z^doOxgkNAKA99Qh1V8`;KmY_l00ck)1V8`;KmY_>0=%`y`~u>0mlt0A52f|+3%Go+ z0|Fob0w4eaAOHd&00JNY0w4eaE17`3E&=lkRQ~$)7oL1`QiNY%B_DE(4Fo^{1V8`; zKmY_l00ck)1V8`;Tms$J9`g%aed+Qm-@E0zSKt?L`CtbGKmY_l00ck)1V8`;KmY_l z00dSt0ow(@`~u%ORQ$qcUyD5pzrad9~X(Q#=@18;M3E_lhDZ+Cly{l0UYsB|Dw$3;DF$ z^L=l%+Y#A+=~Q3;Pa_**+arC&{;&5x)wizqOTC$%*LyDYY)m}WHJDKOFY}}E@5krk z-LZd0ieUo*5CDM>0)g5FUbttFTkO*-1#Mn0%qwc%)aP_l*9@hgl{8Z;D5~itZXC(V z>6|Q%Wsb_H#I_a0$&Bb#N_j0wbL)9wgh<@fO5)j4HGfViYWi%^Cv(G6GW}ITQvF#{ zc^xktCUU!5$u;$|W|(TZ=Jk2~QbOJGA&G9flwTVc?i%DCY9)m(F>leHA~6zDul#&8Pes$J-jeD$X%69^=!$9%ahUT$jGISjLSZ?7E=k4 zY33<*{QK>llhDkCh~51 z)P%Cso6&&U!*5~p3Yw9x>orraR($q^VYib))`%%Wi^c+4*Q#z=GgWeZP-))Cee%fh z)PqK~qL8y)FQizuBvHfH-c+|Tr=flZY$6*_&?1s^%rsJN9u+#B89O;GyQzCcrsWA~ zs3R_nB-&w zfB*=900@8p2!H?xfB*=9z=|bc#S2(-4)$6jWbp#~zxnDPy!vGIwB;8dK?2x700ck) z1V8`;KmY_l00ck)1V8`;RyG0K1;G3Q-%E^M`q0$t{}b~LuIz)3(SZO6fB*=900@8p z2!H?xfB*=9fJ0!7^~wAKr{`;Lh#fm(@C!gcKmY_l00ck)1V8`;KmY_l00cl_RS@X6 zM#=mFKl_&px8MHp-#rGuz^XX*7$gXQ00@8p2!H?xfB*=900@8p2-vd-Fu#EIOD{Yw z{l{y62EPCl0|Y<-1V8`;KmY_l00ck)1V8`;Rt14|)+m`@pnByGGjD%-{|Nj7tK!&W zkRSj8AOHd&00JNY0w4eaAOHd&u--zz`~rVj)s@C!gOKmY_l00ck)1V8`; zKmY_l00cl_RS?)-sgJn?|HN5O3${$GhI?b=bz^@@weg+$JfQ4j_r;<(fLkv zC-(w(hU<=eE;6%~7Y+?@i!r@Y(B}2Ryi(NEl37$rs$nYmqLx3W6=>2eBUw3}lf|*j zQTdeErl>fX5oz(1zXfS(ATA6IaOJQV!&J?NL2GPnCx906HBA$w(_7-g!2zxjZZuz1 z^@^f9{T^u7EGy<~m?cTNzzc^7m+o-eTBT5{>J?Kl7it>q_m*}70%d(t0_oDs<~E1G zE!L=)s3c)oNu+2iK_pqad~PEz3=D9O4V&uOlGAtE?k1<*T>8kk>>NKal@Q6M7sQ-= zDkq-Ej!mSqr^Vy)>AiMFzOJdJR#1pu=f)=Fsa$&EgqWGkiJ9r~aq*};nw}odi6hh5 ztenXyZV_6!Q7gRn$_1^Yg(cHw{lsXuS%*UT*1>C4fE|B3kJfRN&xR0et*#lhYQ@kL zQ!i^u*&t^|PCh1QX+6LCw;)6dRkV5MEc1%coEcRwHR_s@uQn4hn-F~@_EfL zuCuh;adHq;-%z!By;@hwq#<>d;-RgNPG;q?V;Sq%iz$Xk60`ECOb+gdJSD1zp_zu4 zmyq^siVOD)a1s0bG0vBW8C2AUNlqPNXPA*0rs}u3wt*M!8RQoG+_O#1oBEt?>YAZ= zhucfs*mNRzdAQ=6KY3r8~YU!O_J|xjim-1`l!d-*hL#;+h726ZFZYfdDSrFt& z6Q$XHUbvf#iEAaroVP8dCuZ$ZVw}?9@z6G;*)>ZGDyl}&6V$(qAggq^(~=~ zYTiC4$@RsPwPq<1C~4^_fcl zNgXWco_wA~cg;2xQCBZ0rRuD&HQVs|z6T7h$0}%YsTvw7y&xr|gS~O#*kC(z>nV#e z?CtkmOz^_4UEJa!Tj!Mgj4~nT(nr&|w12-Rpm+WpNAs-Pgmc;g!_a)IU=^5COASpt zGCq04zIzZwsn|{Kdy@WJPeZHE>Et@*v*0W#(R6~b9A9DU^kT_=WB))Gxlc@T-hCn~ zq)_EyU%T52@$4TfoN}z+d7c;2WEksQ>&Hqc)e9A^uBe3qx!{ruBpqB=dm(}Hfx!fF zq%-ljaA%TRXqFg~#kZH`lnh95(j+xTt{O?-RfF}N{2-ur)+{03UH~f^kiZh9=}z){ zAnE%(z*j||nyI??kmAUF^2qVjgGRNYkh5Jcq*%5j zQN!2XRJSsxp?(HzA{$W9B8F~4snF@n*vVXwknXH}ocRSl^?!C8cx~|iy=rA}{qID`KWrcX0w4eaAOHd&00JNY z0w4eaAOHd@jzDL$Bf>?=ur^skV}60jvw!)|6Tk9}A6R|?5+r~P1V8`;KmY_l00ck) z1V8`;KmY_lU}Y27Yz>e31-|;LfA{bki}4KR7g*T`9isyQ5C8!X009sH0T2KI5C8!X z00D=9V0|*bfccG2?);<2Z~F=S0?-c-009sH0T2KI5C8!X009sH0T5Uf1gv=jUBAF{ zU0?i@Jx71<0Q>^0;@D%5AOHd&00JNY0w4eaAOHd&00JN|U?E`f0#E$b5C7HAUO4iX z@C!gOKmY_l00ck)1V8`;KmY_l00cl_RS?)}jgt8VE~sN`ANtdibMOnSieryKf&d7B z00@8p2!H?xfB*=900@AM ztN;1efBv@%pSZ1$r1kO7NBV00uk}CEH_-cB@BKY*_FU=NmUyO1O6dIad?x-@{Nebz z*weAy(I-0JiSFcH;LdQ}kg`oki92R4ks@X88fUWHW&_ceZX@YcmOI$cOz%{~+=8LLcQFN!@1MQk+#e5C3 zBuN)|;V|LS9d28z6lztyVk+iBO{4wZ(oR62tWQcHU7Fe4<`B5W8ub#DBrGe56m2Dl zBukgiZRCZ40q(J3Q$1U9`cB*34hn-F~@_EfLuCuh;adHq;-%z!By;@hw zq#<>d;-RgNPG;q?V;Sq%iz$Xk60`ECOb+gdJSD1zp_zu4myjZn=u7tua1s0bG0vBC zQ&ZH2NlqPN$(R`%rs}u3wt*M!8RQoG+_O#1oBEt?>YAZ=hucfs*mNRzdAQ=6K zY3r8~YU!O_J|xjim-1`L^<&U?{qR~=sA78w>y{GboCQIiG*O!E=Y_k;n7CFf&3W5W zdScctCB`Wo9uI9pnq9NBprUFNJwg4;2(n5y3$jwBTHg}-sOIf+l3ZUrS!<`wm|oV(RiCN!pVYyE?#bs_bk}TC5q0%~ zQmW4STC)wW?|Z=DdaQybm#U$W(hE{TI@lW*jt#amx1O>n!`^=1#RM&Hqc)e9A^uBe3qx!{ruBpqB=dm(}Hfx!fFq%-ljaA%TRXqFg~#kZH` zlnh95(j+xTt{O?-RfF}N{N$i_)+{03UH~f^kiZh9=}z){AnE%(z*j||nyI??kmAUF^2qVjgGRNYkh5Jcq*%5jQN!2XRJSsxp?(HzA{$W9 zB8F~4snF@n*vVXW)(Wf&;MKfeu4EP zL%;h+*UtPF;sq=!*gyaTKmY_l00ck)1V8`;KmY_lV8s*IPDZ$$#S8rE@o#m0=4-vr zTJZw?U8zX_ulMg?^U9i!^}W$o>xm}5ohT=w-JkEycD>#87|F&40w4eaAOHd&unGw% z+gis3dJ!%(vgX#N2WT`s^w5+h`X8Ah@X!jg^q%@Ugq&x-2s~sFT!Dx?FT&KHEbZA8 z7p^-L3{i@r3lC9>c6g=xgqc3j{fCTFMBt$vV(G>#dBH&lJk*So6Lxmv0uRZp3}@ZW zFuy=?B>m)uk>2xDC&x(utvFMymr1 zFZv4ou5KQ2ImhLk5}%}a>nZ4Z*K*l1h@gBhk{*76@M_|7x9<+!bsP_oS&+()c z7SLn;FlT;&+R>fAH2>c}_hT!A>whOg{$T?F5C8!X009sH0T2KI5C8!X009tKaRfS} z9TCo&U%+~g!1W9K{Lq_2pZ((DYkgf}WJ4^m%zFg!KK`xPRZRGjI(_J_33EFzUtVc3v1G;s@J^w`YlT z9*zcD*|AIq{1uzK;LnvlIi2K%-MhIfx;-gUKuq9^7?>U@m~|7*25MGK?>Ohx8ky~_ zKqjqBkJCreH2c6EMP;s9YLFQ`gV|?wGf!r#RBBp1uT@MkBc@WFVOm34^R;A$7?7zt z8}+(Y$&)!N*;Hs$kdsa4$ZVNKbum=Ue+*m9C6|!yzBMjP>~1%GQQ+_?!-21Zk+K%gO1UG00@8p2!H?xfB*=900@8p2!Mb?V2Aa| z;sw6*%%43v`a8J;@C!gcKmY_l00ck)1V8`;KmY_l00cl_RS>Y&7PyW11(erc-Sm$> z{(C=wUtm=ndkhi;KmY_l00ck)1V8`;KmY_l00iuH1({#qx4-<+?caOiQ~z#>bqeDI zS;~janz!)Xu2UGtI)w)ZxQ08In6sEOTWyej9cY?4EL6-l#SyD1VVy#(Q%I-Fan`(U zvFf2ak&d-otWs_@T^XB1D>pVFPvz1RC&bKTPRvY?J9Dm8w0Tpp3wbk|QL!_sUTW0I zBx==0#blGDxwXZk@@RT`JSTqG>0~3H*9_x2OS>Ka1+FCgt|q6#bU~Od=z}y}5Y{Qg zI)w#hbg6|cYvroFrXu}%wp7iZQ;M2CTXd~6&2iTb_bj?=wrPpFdO;~wXML^NOSt>K zk7Aud&D08tYO;0p5=}R8EGIPeEKBy!I*WA*u}-1Are!oRp)XUFoAFK;>lEIU>l8kh z@BGZCPBboJokE}I1G^vq0w4eaAOHd&00JNY0w4eaAh6;H*y|Kp&-nxE6bj!j{n&i& zukWzdDcr)nc%ADMc6_w+mFUaSW26GM_np9E*DhW-zK>hn<;Ek-Xlh-Vt!u=UZKwHs z)#0pElY={4!dKafSO{nP9o`E&c_B@DvaWqk=G0O{3%9X*`A*pNTFSR`q{Y~6WQ8{( z+gsr+EJbl2k%V(@UtYdjw`CvPTIPN7-c(sa3Wzy#JK^o2JD z(y>mV8$~xVoz2RboZ^P*(LfDn9a<8G(Bk^HT;+mRa$*>~%CuScC^YJ&R##m&-(09^ ztqQOxRXdNii6})e5H!OONn}~tQJEav5qV0aE6%3eyo7|nLj)dvkOB|MkJz2o4_Wpe zLH+nskN?}Rt{uTTh41^%dvpK+ zz8@EK00ck)1V8`;KmY_l00ck)1V8`;ZUlkdmJsF_SlIBymF>?w`vv#~ZUlPB009sH z0T2KI5C8!X009sH0T2Lz_nm;X=3t8X1^T~jY+duOH~c320`L2AK?gtp1V8`;KmY_l z00ck)1V8`;K;T9Y;4C4`FOW6<%TK??M}9PQa$Gkxai&@?lSxnyM%G56(a61`NQ(00 zZ;1S{&6>B+TGxPWc6;ngn@>Kn|I(=#$>jR4Sw*(@U+Wv_eXjTZo;Q20^lVE!(Bw1*Pkp+4a9|^0j?YtL+4$k0=Bjj;7ra+CheyMQ#3(3y(KOj9N-$^M%e_V zwDdr`W?3;`pIMTm3%qccaOn=W9hh8}3b;w85x#tGBQFdLaE}e!Gy2oM)BiU)h;r#8 z|r)i(*Uy^OX})(k_Pr4uL9^6zD4 z;60n-!aW1+7BHY{rwr52&6q|Q(+Gc%rV+07@xncW++v?={nfmw&*^p_)qK86R_svp zf|t0a=^EqZDP9HZO5dDl$c{~9OMXA_LZatI!TTiy2ylos>lDtZDk?6!#s)nJOyqOpM1wW?dzOx3-I6i4opM~nG{S4SdHlUzIBsaco{)w* z;=)L>ozZ0Kp&Vd)okP*;$7x=3~&u3pc-1ASZ^$58BcOv8; zHV^;-5C8!X009sH0T2KI5C8!X0D%=ppflPL;jBmYNo#1#FYv>!-2YEkeu0hA z7b1PN{@41SS@v1}3xQ|(_ip8tTZrdwz~{Mh9<)`odFK&5P1x4tSZgY1IoD`n%Bw9Q zUO@N9IJkbO)gCW^-~!PNK4|x(iYbflK~H;+D0yT+pV3>-la#W-o~}@*ra!jETOK*& zG4)PcuL!L^qw1wbT~q7_nXF~Eb~s$XXyl16-nh=vZpU56FqQca8C-yP0mKU+UI6g| z9Pws0SbB)**}C!*b-@6L9;-St>kD&EJx6}uWs zbbPe)mFUaSW88~tu5b@=A6e7e_hetJ_mb5XskX|u!>PD%a!WhW>%GwZkT#y z)=& z6%_K;T5fDYp30>sPPm8Ibr6kAXR~r9r?^F^8ad;CZOhx8j0R6 zkY6}y_AAiNK5$1-nX8r>#Az1HKC7GgD!I_tw0d5vnB)x?r8>j3hP38u$qtbw-myl# zu2u577IMBh+2kZ4;fBn6M!V=_QC$q>`Hx`>s0r!rTjRpS?sn%!;P5HKfv5|6V~$2_65xmnh)@L&+>UVdFkR)Q^i763L4jnrabpo#ED- zc%CNtlcjg{iWs?%y@rVo;?I}nKSXY&NngBM z>5x3dSSX*ZmO`ofmhA;i4B&#)@{GJU--n*=1TCJ62nj<}O;dWlwvxj?h zmwn?A5)k?-Ir8Y8b13s>ym5B$w0KXV-OCuxp=?pZuS%_6tZvTcVIFF+<0zy<;!00JNY0w4eaAOHd&00JNY0wA!m34F*J9(#}Avww5vr5*qH zovT=nU}Yb4j1B}q00ck)1V8`;KmY_l00ck)1RMf;tWV|_c)5Secfa%GSN|A(0q6$^ zfB*=900@8p2!H?xfB*=900^uK0(-4dGQYq(UnoD-yXN5+;TKpH#~yIWH(&e)`~s`u*kh0&00JNY0w4eaAOHd&00JNY0w6F%5DYQDz_wfd m_v3{>|Jr`|1)vxp00JNY0w4eaAOHd&00JNY0wA#R3H*PQE_3++ literal 0 HcmV?d00001 diff --git a/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite b/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..0b58f0dca7d6ce95907cb0b322dc4802f04688c0 GIT binary patch literal 4096 zcmWFz^vNtqRY=P(%1ta$FlG>7U}9o$P*7lCU|@t|AVoG{WYDXN;00+HAlr;ljiVtj n8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*O6ovo**AfQj literal 0 HcmV?d00001 diff --git a/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-shm b/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-shm new file mode 100644 index 0000000000000000000000000000000000000000..3721d2ecdb834451d55e84f6355a98bc936d79ff GIT binary patch literal 32768 zcmeI*IZgvn5QJgdYzBj|%{J^?3K0?afP{=2ki!)aCm`hndJ^5>3!zoSm5lj@W@tatot zU(x$jbxrkeud5sCrn;qic7gx`2q1s}0tg_000IagfB*srAbc0vRV1+l4q}bd;56PSN@59kYb_}e=xTfJXuR>Zy-eMkg>oC3Z9j3AIxz<1vf1ab=awnu_M iP66L@N)X5?;2VAk0yzchd$n^;&(ygEe49Hd5%>hfhAAlk literal 0 HcmV?d00001 diff --git a/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-wal b/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/acea1af20ead3d6ec34949d9ac31e6e7a8b420c5faa6382a5016d4645bcd0518.sqlite-wal new file mode 100644 index 0000000000000000000000000000000000000000..eea540a7b95cbb6e1a17f3865d835c461550c85e GIT binary patch literal 144232 zcmeI5eQYG>UB~zR#oqPaYc`wonhD{kC+EiZlH*d2BwqW*m*v*$yY-r!3(7Fw9p7!d zZ`m0;_BEuq7hO0+dqk9=76=7JiiDzyq7wBaIIgHcg(wOy6;dRi1wpH-1qG3+M*<4Z zGtWG`&&=$)zVJ`H_Z@k6c7D(A`906`d0y7@Xy^Ak_Q}Ynd>{Rn+kL(rK6)Gp{LUwC z|K>NwPCpZ#K05IyK_c>t^8Ba%eeQoAcqKb`CSNsmX{p(+Yo>IB zdF&%h&LMt5&Xzsly4S~m@4lzcCQ~lq-~!lS)xKUChm7iwn{zWnrwNu&UKobZIg_J86q#H4;f5%lLn>+NkJj)ykTx z8HR2eYDKT6?XyU^IEl`Rx2z= zq@Q`NCK%@;c`g)5k7xXKw`pU&zSyiW!{L60nUY|Z3FM1;e>9!R_%9h&e=sZTJ>cjA zCbQ{RD^f{0Tk=vbw{?wiluEf7Wv-N+IV}}tOHyGzpO;Q3)7klaNt&837L`It?WnLW zTWM9^f97r4`t5|MU8|U=$a+8c4D>o}^zK}aJuZ;2{$0WNmhvWmn zNc!Q7-)D^x_c0$Wf%#FXiN0Kd1V_xYXu0_FPkj}7nm zqn}PvA=^rzpKyQx2!H?xfB*=900@8p2!H?xfB*;#CIPxf(AgJQ%6$L3FW#6e4pAZY z1t=wm19W~b@YVmST>a%YgMWd2fes&h009sH0T2KI5C8!X z009sH0T2LzK_(EPjpM%;_`Uyr@y*qp_g1hkFvwkwwt)Z$fB*=900@8p2!H?xfB*=9 zK!-q-e&YKApUvL*%Wqr`ejfV*9X|K~0w4eaAOHd&00JNY0w4eaAOHe`On}BEi1B@a zAAa?VSJqQM{49+};7`5lBY!wR00ck)1V8`;KmY_l00ck)1V8`;29H265VT?y`f1bn zzCf%Ho%_!JW;1hV@+2stwA5_ZHB&l5LK6f6z6nVpsx*GV2zl{?{9fhy4PP=g?MuFqdM))r@Bz+*`*WJamwZ<|tpXg_vst9K3rMxiY3V&|bnuWQpR@mimKa*TdFv~^q zGmplsNadH~R-|%fiM_{ddn{T!lWoUH<-D!E0)< z>biRP{_`&7y4GISO|`7GTnXB$R;_7^HC;7UTCLi;V9acqTFoo2YinL%OK+F;1}T69 zJ3ptZht3OjTxq0o{S%LgEwsj1u251=Dn-_2arAa0y?0{FGo|U-qLMpVkWMKJ(kQ1Y zlQO3j5_o<}nUgG`F+78e^QVisnQU=^)|fScDy(X?6^~^AjBAsJ4g>G(nA?%vKc0L|!o?S-zndfSPaW0bQLXq@%#$WF&HMVIM zyq;Zz`x$0Rf>}G{{l&cBS`{uCR(~)n>^%CS48%aqyTXGHmie9eP zwVKfxU4>bqGM~>&Q;#T9r^xq=o&L~+(m|PxH{MOXN+M~EhFg8&GC00@8p2!H?xfB*=900@Ay40rDdO93TJ!AOHd&00JNY0w4eaAOHd&00M(ez;ZFje=o3DZ#+D4?>A4w z@8Do}I$8$;AOHd&00JNY0w4eaAOHd&00K6FVS48K0w@3aYft{%m0!AqeF2OQ5C8!X z009sH0T2KI5C8!X009uVB?u&FEBU^_+mHOC`lX+_+r+-WEz$RABnW^22!H?xfB*=9 z00@8p2!H?xSgr*4zQFe*!6)ynJ@^jx1u!r`00ck)1V8`;KmY_l00ck)1VG@HAV6ad zj_`c}-+%o4!(Xqb*03*dOY}V&2?8Jh0w4eaAOHd&00JNY0w4eaeu{wa3v9giz#kqT zj_#W~ldqb(wA5_ZHB&l5LK6f6z6nVpsv+{)Ltgx#`wCRX56d3E=yMMF?tA)dGIhhZ zJG9T2d?WQ*>V@Qmnq^iXF0CgtTi5RpOa@*M_Ic z7h5CI^!T{{*;F^Cxmc73N6Hb?*ps{+!y9uLy`l>{x~-Rjm05@--i>}P2)mKb1>w!d z5$6?k#1XfIRvd9hXk)HBCOMiwf{kZ)MAGx)exEh+j3;X3%Usn~OcE%$qOYkepD-iU z!yeJjZ0hDru+Zh=xZ0s6U2()&JA^pmOfb@Wtc#0CXNVi()L%+O)5kOZjj(fu8LG&g zm67aMhNvo(B8ugi;Yj*G#;?1Jp@HL>`9weSR&05`gc+msQeGHxg+I5surSy5VqgsS zGs)G2+H#Tn%%d?YQu*b0cdi@EeB8EI%1-7Lwxk-{aVqn+_MNHfict~fpw)VH(JTwH z?hJM*cW1FHL0i?THEpq`tHw&JRa+N~nN3rxdBt^Y%`0r_?Xuo5RT}L4oUR@^FW7OV z9ln2)#A9L$tudA>l$4W7ku4D7=#@&j8D*}NojL6a;66QDRB|T^(kW#@8s$_S>y9O~ z)*TWwJ-%5SbbAD|MZg$(DjH4i-|yczZjC~j>sl411+v%=m3jy_;009sH0T2KI5C8!X009sH0T38C0--?A=MRu(rD)UmzQEhZetgfBFHIb`oDS?5 zxJIKvAOHd&00JNY0w4eaAOHd&00JNY0!{+f-9O(K`1==s^%vgU@s<7XFW`iPD-Zwy z5C8!X009sH0T2KI5C8!X7*GN`X}kEoK>FI1+H=0Aj$vP5Kzkd_0s#;J0T2KI5C8!X z009sH0T2KICxKn`lJ5(A_%|QEp?u|A&tYG{2?$urDy6y^Usp00@8p2!H?xfB*=900@8p2!Mc-0F61ghwlsA z`OX8s_T_s%vJd+LPDr={0T2KI5C8!X009sH0T2KI5CDMzCE%y+;`;)>@$vHX-#q`& zzs{Y>laR;KQnOvxOzDVkhc6KDO-K?^4U^YC^5O^GR|xS7a<=RV*S$W5efK?mmKyS> z-lgfcr`}CoNZysWmY9fNP3(?87Ei@qicN=Kj|HRG!zV&7hxP@(9sG7LQ;>lG2)sW6 zNB2h3xqJFWIbW&i*d0yJjQcm_PLN#|BU+4z-i`L_N!#NIzw1#E zLJiYsyqriiiD&Lz8=fj(Y>h_$Epgf|~YoLAHlN8A!xal{>=jk)fao z(avn@=1j2A<>I*7p(b5%#92Fp$;|{Ky~nyCD0YUpAx{0JL^ORok zYXaN(c&vGL8R=)9s|m)rNS+Ht(&HI_y|dKVrdjZMb`kDpm?;Tn?U45u^L}epxMW!U z!K|?NfTItXY&)(B4Xv*CS`lm{CFN|%HT)}jxmwq1MrU*tW{JvtJ}*r@qD-A4-!pdl zLk~&^Wj5Y;H}xusq%|6PtyhwTRW;XJJ*!x5R>@jWUT&(@ij~{(CC`f-lkCnUQ!%*N zRAhaq9%kpzfxs4&4>9E+yN^hZkaLWF=KBIa@%ZnLe(tvCRT`(zpL)kf{&0W*2!H?x zfB*=900@8p2!H?xfB*;#9Dz_E==0P4fxWb8d|%)Pv*ADZ=dswmbYFn{NB{>2fB*=9 z00@8p2!H?xfB*=900@AR4-hzLD!R~al4g^2|1V8`;KmY_l z00ck)1V8`;YyxR|=KBJBkN)_0{e2n-$p_!rRI zMzz^6;9o!&9}I_o0sg2Vd(zK(cvg52xchXnAnZPwEC_G@IbZfPbhYAm&X+yCJ4&RE zM-REK*S|pOhVK&m3$!YlsaF_0_!oeGfufSlvkBCl!7euf-C68PfPVq_7pUoEIZ-te z{snZpuo4nId{;;(l@ z)c>b{0n4udx!1X!-tX{zf%o2f{?*HG{d$2uN011GeW^c9T^RY#k%i=6MP7+~G7=A8 z3`awsAPG1?00ah`fO$tWoxR7uv9sd?NANxk!eE;T!mzzetX$a-tv9MbTy&kLV8a{3` z8>&TplvROh^6Js*6CZiawyD;ei&f$>?@6sj?rpZ-_@2345QI|nHB)cUjn?;DkjKC# zWwpzP58iZ3h}S=o%Vo7oyP_NAcC}^FCg=@ebcqSe%{~c_YeGWTXtP~CN4~Sp=?&c= zu*@oTkj7mBvVqc7=JRVSJud3{{4u_k*> zxU{@x$g1+AY=|me1GRF6bVa2_ZhFXFDD@u4yut}1UsT}+&b3?RAC=Fc!!y#_Y$2~( zjc4!j*VkILW?OIfsWeaaX4Dp&EB%^krSUApEBp#uA-znjG?{pyg_CHV{*S}9&;?h! zqfS@79~dXm9}JJdPLX4E8M_S_c9k00JNY0w4eaAOHd& z00JNY0ycp==$Y>eTs?5#AI}wz{TTKIFg`#41V8`;KmY_l00ck)1V8`;K;V`ja3^gg z-xpB6@K52TE3fRpzQ8Tf_h=*tfB*=900@8p2!H?xfB*=900?}DBH;T1KlxLy{@eGe zZ~rXz1u!r`00ck)1V8`;KmY_l00ck)1VG@HAh3_Lav$Fp7{2o5`A>iBJ0G;spK=STt|e literal 0 HcmV?d00001 diff --git a/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite b/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..0b58f0dca7d6ce95907cb0b322dc4802f04688c0 GIT binary patch literal 4096 zcmWFz^vNtqRY=P(%1ta$FlG>7U}9o$P*7lCU|@t|AVoG{WYDXN;00+HAlr;ljiVtj n8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*O6ovo**AfQj literal 0 HcmV?d00001 diff --git a/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite-shm b/migrations/.wrangler/state/v3/d1/miniflare-D1DatabaseObject/c056329e4df97ff69a503ff209faf6e3c10ecb9c167bf5f298f50c3fca22f7bf.sqlite-shm new file mode 100644 index 0000000000000000000000000000000000000000..52700bd09d4c73843906863553342fae94c0e136 GIT binary patch literal 32768 zcmeI*O)kSx7{%e%H}z4pw5a}lju=UdnV5)?kuVY~unMshW)@&EmSMs-<=Ssc4J3o6kxIgLLoW5Tk9Y4Na-CjIDbzeHaJg=`B zWJ8Z1Kl1%{&F?$@{SkFk9aG2E33XDPQm54!wW0c~pJ&xMbzWUio9d!E{Cr8Tm(`Zq zR#((jwWAKtboF{oU03~Idg`XyS1XkufB*srAb3iPm7}L&|B+{09MV@*pV!-nT+x2@Ln?#F{F{G~P5sVhMO_6G;*9jx7>P zpo%q4#%a7=j>Hn!iZxEgX}njF#1hz!HBQE9yit_I64;3~PR41xOP9nF*o`$##%a7o zn#2;=i#1NhX}mw3#1innev%^4&|LxT!EXPQ)0{ydl7R2XAPAHx;M-vc0%Z#LUL=A* znF78Mj37{^fbYN~2$U({+aC!6WeWJdQ-VO50>0^&AW)`&@3NvbF25ts9bH>#{#GY)O`8Ef$~yGPpyr zG(UiL_uk!mKSZ7&2S`ve--GG9d*AnYo_jy<`@DDe`t>~rJ|2BIbjSC1g+hZNx~1>S zzB{+?uHSqyBkvhHayDZmao+UL^)$aW{^b9hpF5G)bd@aC8_SAL_8HRQaOf~0M%l3O zOHvto>ofMf?8Mlc`aI{I4#S~4A6ytny&X!#PlfvLPCb`;D*1ZyQZk-+CLzXOH4?Fb z00@8p2!H?xfIv49xRi+TyLWS!59!KcRh3Ii@{A~Dr?OJkdz_psidjh{Qg$LQ5^oO4 zB#6D8Q`bnYAc;rCA~{~n&18!w$+UQK)G2UYskT%yk)NHglY~k<#ve*^mm^xOtX|N{ z7i6VWs<&!-Q!cAjRaeWhqO+ugeG$3Bl(;|w3X|CaVFDRfhfwJYv{b6s^hUi}RU1q& z9&E`i?6c%13k$I*Kc41R+pAPn7S$@V(HCsPEay{irU_0pxt7rqG&Z)bBslUjj^IWJ1kK+-5cfir@6Iu6H2+< zP@7Fwok*|+yQI&6ohDq7xEP;Kb5C3}yExOv_F-GA)wNlw-L17Uk;Db5y*t~ajowM5 z+>AIUWoM3)!mLCJ^Z7iP5|3o(^AeeyFBZjuBs)bcUudnAX*0b)VYg4amMyil*7Bm- za9ogGjg{F{S6gbWq_$-{7rVzQ$WWT~W;)1xA$MY4v`VbB7OPrGrta=9GRLJDmJ|d_mbJQH(#rGoyteDK}+1l7TR0#NH#ww603rKkz9df_Kc3R3L2Sa-4(sw*t9xVqFozp?=#n=&6Q}^ zX7_J8)(2mmRGjzJ)hpsCObBB=QGPPbg-o|?u2zj#bXjTXb@SXE#`iLh>KzMjN?@D` z^cc5U`aVxL`kwF17fk)v?X~9NH@-7K6RoKPdV~!GKmY_l00ck)1V8`;KmY_l00cmw zI|*X{vQDL z{|j!v_2^q)IgxuB^8(%6pidi z%a8sM^8yYZ9Do1_fB*=900@8p2!H?xfB*=9KsOVJ(#o-Uf!`i^GMxwUZ9(s9CZT$5C8!X009sH0T2KI5C8!X z00D~OZj$rU z14bg3dM{-B!3F{#00JNY0w4eaAOHd&00JNY0wBJoq8_yRPy!YrDQzuOhSym8b1}AjNjIGHTrV2ZWO=<0w4eaAn?Hx&D8MK2waNf2Yx%0vbWl4Ob^LdF(&KHYfL6V&!mT+sO{QiaAW^*lDYHb|m1+z`A0009sH0T2KI5C8!X009sH0rM*YY+j&n z<!f1V_DJ3 zJ|i?iI2<}mh*7lH`0X-&Y@_Q9Xd2s0ee5J)-y6E~!G(d;+aW&uVyOS_)N`q)lCLK( zCF6-_5@P(-_^H@r{Ine;U{*-BWBammKG=#UGB7KhVUdCF-4W&YrMZ?9 zd6<=ut$4BQbZ4-7JFu(~bea{;SV_Xe%R^?^;YaT6OIx~gJ8s?-M&$HSq50%$BuT4ZQGQrem?TNhZ>HnV8p|-B($@GZONY|bmuh&#kG5(-om2s5M_Swd&CAVC1U*U-Q49vX2+0AOY)2;Wv8-I)_a`v zbq#Nh-8YD_(SYW;f+QXli{yAQHVYBnI6A5}sh){{IzZbIT z)gE7ENr|@p&vHD4eIe{=ByU|U8;vJa`T{MLoS5iLUp&~7Ti9pGO%@hnQGPtltvbU8 z(<+#R2z;97G@<?AhpPj(L~7mmT$v zg*PQI&IEdl7pp$|009sH0T2KI5C8!X009sH zfi5Pnoz{)b3w-l;9{qar7v_5~FVMv;j;etG2!H?xfB*=900@8p2!H?xfPhP22R&r- z0!Ke}@YHu#ZhZsu0xlpNfdB}A00@8p2!H?xfB*=900@9U7Zcb?>&E5gT#(4|Vs0i|JV~a-lcQ!vsi7*mT9yq2Qf@|^ld?0% zNnutZh53A*Oo>Ob^LdF(&KHYfL6V&!mT+sO{QiZ@YE@lt*-~3kYUQeGT`;>8E2rMj z&SHX_mCh?kTy#l(=AYj*f-N zY=JO=jH^R9y)(-1PjhSShU`XQLVU0l)?8i_cA9YSjwrt`&9&O~TOEU`-x;jl?m=Gd zR+8}W@=%OVr@2S(Fx`jgV*9N=7hcgoH4*J<3m}gE~9|Ebn7SJI1zcN?gBx138)%(18+v8SMwYC` z_gG48Xvy^?k4VOLrbmQij2{`{t_+*b{C5gVr`^mQTv%UQ>F9@gL#Z`a>J5F}oiHKnABgg~kswF( zG>g2~bFyWB|7OL;Jz@iw5;1=FZtn6Svt!7mC3!}avQt?p>pf2Tx`sE$?i=*c<(#@k zas^2|Di+%xUuXg24&748M1FR{e4-&lsKnRb3zbr--l`e*R`))}k`itGpXGQ8Q^#h> zKCkWzv}8Oo=#6@{sy3KlJlK+3*k{R278YVremu>sI>QIkDp!?7waU)=f^C@PeCo{< zVKr)uhsKkGJ03D+@?y1KIwu5i!*I(AGWnx&*Cig zdK~gs0v;kcmxQKG4?w$xflZOe9E9rjp58AItC%X|alN^7yIm1OGf{z7dJ<8Ddz zK+am-FKJo!^}M$0v6yT9DnT714bH`dN*YJ z!3F{#00JNY0w4eaAOHd&00JNY0wB;t_QEW@yLno!TE8T^}~U_Zd_x(tuy(2N#c^?`rQq zBOXDMeG`~=Z}y#d_Az_JBe;f$Anxb2-EbmSuiwBSI()3ahpYeDEskcKl#3S&87+@c$Mmz$N zAOHd&00JNY0w4eaAOHd&00JP;Z3J$i6=LfJ{@~|s`1ZlizxsLbw)llk%)TdIJ zEq}e`SpUEG|6%|6elhujf_zhF15LHr)ao=QyJg_!AOmhej{!GP_&}B)-hM-r z|6mrdty?^4Ro?o0*caU0=AKE=e}H6b`;HfLGuh%vGA*8T)?+BTT9y^vT^ZFL5PWjJ zSQHDAEahgzIVn4H+!AiBl;6Lw+s3YCOKtG#k>C|q8?56hvC6X;@xDb|AMV1&3LoPS zrMb%yXR#|=*XLPs!IE~hb!nF4DePMt=rR>f?~L;M)7)CSAsZFb#RpqqP(SqLhhy6} zik{A{T;9iyc8qP?l)zpluzhUnrUb^Bz_y2%hhls>%{_XDPUFe5bhh8>^F5Y&J+9qi zjV_4`k~0znJEqT>g8lQFS48TAF+MxOT^@2euxCx2r~i9WhuV6wC(|QBUB;7>@pw%) zE= 0), + chain_id TEXT NOT NULL, + channel TEXT NOT NULL, + asset_type TEXT NOT NULL, + coingecko_id TEXT, + UNIQUE(chain_id, symbol) +); + +CREATE INDEX idx_assets_symbol ON assets(symbol); +CREATE INDEX idx_assets_chain_id ON assets(chain_id); +CREATE INDEX idx_assets_deleted_at ON assets(deleted_at); + + diff --git a/migrations/006_prices_table.down.sql b/migrations/006_prices_table.down.sql new file mode 100644 index 0000000..3bc3a94 --- /dev/null +++ b/migrations/006_prices_table.down.sql @@ -0,0 +1 @@ +DROP TABLE prices; diff --git a/migrations/006_prices_table.up.sql b/migrations/006_prices_table.up.sql new file mode 100644 index 0000000..9333756 --- /dev/null +++ b/migrations/006_prices_table.up.sql @@ -0,0 +1,28 @@ +-- Prices entity based on the Alternative.me API for crypto prices +CREATE TABLE prices ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + asset_id TEXT NOT NULL, + price_usd REAL, + price_btc REAL, + volume_24h_usd REAL, + market_cap_usd REAL, + available_supply REAL, + total_supply REAL, + max_supply REAL, + percent_change_1h REAL, + percent_change_24h REAL, + percent_change_7d REAL, + rank INTEGER, + last_updated TIMESTAMP NOT NULL, + FOREIGN KEY (asset_id) REFERENCES assets(id) +); + +CREATE INDEX idx_prices_asset_id ON prices(asset_id); +CREATE INDEX idx_prices_rank ON prices(rank); +CREATE INDEX idx_prices_last_updated ON prices(last_updated); +CREATE INDEX idx_prices_deleted_at ON prices(deleted_at); + + diff --git a/migrations/007_price_conversions_table.down.sql b/migrations/007_price_conversions_table.down.sql new file mode 100644 index 0000000..926f7d5 --- /dev/null +++ b/migrations/007_price_conversions_table.down.sql @@ -0,0 +1 @@ +DROP TABLE price_conversions; diff --git a/migrations/007_price_conversions_table.up.sql b/migrations/007_price_conversions_table.up.sql new file mode 100644 index 0000000..3310f1b --- /dev/null +++ b/migrations/007_price_conversions_table.up.sql @@ -0,0 +1,21 @@ +-- Currency conversion rates for crypto prices +CREATE TABLE price_conversions ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + price_id TEXT NOT NULL, + currency_code TEXT NOT NULL, + price REAL, + volume_24h REAL, + market_cap REAL, + last_updated TIMESTAMP NOT NULL, + FOREIGN KEY (price_id) REFERENCES prices(id), + UNIQUE(price_id, currency_code) +); + +CREATE INDEX idx_price_conversions_price_id ON price_conversions(price_id); +CREATE INDEX idx_price_conversions_currency_code ON price_conversions(currency_code); +CREATE INDEX idx_price_conversions_deleted_at ON price_conversions(deleted_at); + + diff --git a/migrations/008_blockchains_table.down.sql b/migrations/008_blockchains_table.down.sql new file mode 100644 index 0000000..922ed95 --- /dev/null +++ b/migrations/008_blockchains_table.down.sql @@ -0,0 +1 @@ +DROP TABLE blockchains; diff --git a/migrations/008_blockchains_table.up.sql b/migrations/008_blockchains_table.up.sql new file mode 100644 index 0000000..0e58a5c --- /dev/null +++ b/migrations/008_blockchains_table.up.sql @@ -0,0 +1,71 @@ +-- Blockchains table to store chain configuration parameters +CREATE TABLE blockchains ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + + -- Basic chain information + chain_name TEXT NOT NULL, + chain_id_cosmos TEXT, + chain_id_evm TEXT, + api_name TEXT, + bech_account_prefix TEXT, + bech_validator_prefix TEXT, + + -- Chain assets + main_asset_symbol TEXT, + main_asset_denom TEXT, + staking_asset_symbol TEXT, + staking_asset_denom TEXT, + is_stake_enabled BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_stake_enabled IN (0,1)), + + -- Chain images + chain_image TEXT, + main_asset_image TEXT, + staking_asset_image TEXT, + + -- Chain types and features + chain_type TEXT NOT NULL CHECK(json_valid(chain_type)), + is_support_mobile_wallet BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_mobile_wallet IN (0,1)), + is_support_extension_wallet BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_extension_wallet IN (0,1)), + is_support_erc20 BOOLEAN NOT NULL DEFAULT FALSE CHECK(is_support_erc20 IN (0,1)), + + -- Descriptions in multiple languages + description_en TEXT, + description_ko TEXT, + description_ja TEXT, + + -- Genesis information + origin_genesis_time TIMESTAMP, + + -- Account types configuration + account_type TEXT NOT NULL CHECK(json_valid(account_type)), + + -- BTC staking specific + btc_staking TEXT CHECK(json_valid(btc_staking)), + + -- Cosmos fee information + cosmos_fee_info TEXT CHECK(json_valid(cosmos_fee_info)), + + -- EVM fee information + evm_fee_info TEXT CHECK(json_valid(evm_fee_info)), + + -- Endpoints + lcd_endpoint TEXT CHECK(json_valid(lcd_endpoint)), + grpc_endpoint TEXT CHECK(json_valid(grpc_endpoint)), + evm_rpc_endpoint TEXT CHECK(json_valid(evm_rpc_endpoint)), + + -- Explorer information + explorer TEXT CHECK(json_valid(explorer)), + + -- Social and documentation links + about TEXT CHECK(json_valid(about)), + forum TEXT CHECK(json_valid(forum)) +); + +CREATE INDEX idx_blockchains_chain_name ON blockchains(chain_name); +CREATE INDEX idx_blockchains_chain_id_cosmos ON blockchains(chain_id_cosmos); +CREATE INDEX idx_blockchains_chain_id_evm ON blockchains(chain_id_evm); +CREATE INDEX idx_blockchains_main_asset_symbol ON blockchains(main_asset_symbol); +CREATE INDEX idx_blockchains_deleted_at ON blockchains(deleted_at); diff --git a/migrations/009_services_table.down.sql b/migrations/009_services_table.down.sql new file mode 100644 index 0000000..4dd8073 --- /dev/null +++ b/migrations/009_services_table.down.sql @@ -0,0 +1 @@ +DROP TABLE services; diff --git a/migrations/009_services_table.up.sql b/migrations/009_services_table.up.sql new file mode 100644 index 0000000..459f653 --- /dev/null +++ b/migrations/009_services_table.up.sql @@ -0,0 +1,24 @@ +-- Service for Service Records sourced on chain +CREATE TABLE services ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + name TEXT NOT NULL, + description TEXT, + chain_id TEXT NOT NULL, + address TEXT NOT NULL, + owner_address TEXT NOT NULL, + metadata TEXT CHECK(json_valid(metadata)), + status TEXT NOT NULL, + block_height INTEGER NOT NULL, + FOREIGN KEY (chain_id) REFERENCES assets(chain_id), + UNIQUE(chain_id, address) +); + +CREATE INDEX idx_services_name ON services(name); +CREATE INDEX idx_services_chain_id ON services(chain_id); +CREATE INDEX idx_services_address ON services(address); +CREATE INDEX idx_services_owner_address ON services(owner_address); +CREATE INDEX idx_services_status ON services(status); +CREATE INDEX idx_services_deleted_at ON services(deleted_at); diff --git a/migrations/010_activities_table.down.sql b/migrations/010_activities_table.down.sql new file mode 100644 index 0000000..c0537ec --- /dev/null +++ b/migrations/010_activities_table.down.sql @@ -0,0 +1 @@ +DROP TABLE activities; diff --git a/migrations/010_activities_table.up.sql b/migrations/010_activities_table.up.sql new file mode 100644 index 0000000..50d5757 --- /dev/null +++ b/migrations/010_activities_table.up.sql @@ -0,0 +1,32 @@ + +-- Activity table for basic transaction broadcast activity +CREATE TABLE activities ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + account_id TEXT NOT NULL, + tx_hash TEXT, + tx_type TEXT NOT NULL, + status TEXT NOT NULL, + amount TEXT, + fee TEXT, + gas_used INTEGER, + gas_wanted INTEGER, + memo TEXT, + block_height INTEGER, + timestamp TIMESTAMP NOT NULL, + raw_log TEXT, + error TEXT, + FOREIGN KEY (account_id) REFERENCES accounts(id) +); + +CREATE INDEX idx_activities_account_id ON activities(account_id); +CREATE INDEX idx_activities_tx_hash ON activities(tx_hash); +CREATE INDEX idx_activities_tx_type ON activities(tx_type); +CREATE INDEX idx_activities_status ON activities(status); +CREATE INDEX idx_activities_timestamp ON activities(timestamp); +CREATE INDEX idx_activities_block_height ON activities(block_height); +CREATE INDEX idx_activities_deleted_at ON activities(deleted_at); + + diff --git a/migrations/011_health_table.down.sql b/migrations/011_health_table.down.sql new file mode 100644 index 0000000..7f234e7 --- /dev/null +++ b/migrations/011_health_table.down.sql @@ -0,0 +1 @@ +DROP TABLE health; diff --git a/migrations/011_health_table.up.sql b/migrations/011_health_table.up.sql new file mode 100644 index 0000000..47b30ce --- /dev/null +++ b/migrations/011_health_table.up.sql @@ -0,0 +1,28 @@ +-- Health table for scheduled checks for API endpoints +CREATE TABLE health ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + endpoint_url TEXT NOT NULL, + endpoint_type TEXT NOT NULL, + chain_id TEXT, + status TEXT NOT NULL, + response_time_ms INTEGER, + last_checked TIMESTAMP NOT NULL, + next_check TIMESTAMP, + failure_count INTEGER NOT NULL DEFAULT 0, + success_count INTEGER NOT NULL DEFAULT 0, + response_data TEXT, + error_message TEXT, + FOREIGN KEY (chain_id) REFERENCES assets(chain_id) +); + +CREATE INDEX idx_health_endpoint_url ON health(endpoint_url); +CREATE INDEX idx_health_endpoint_type ON health(endpoint_type); +CREATE INDEX idx_health_chain_id ON health(chain_id); +CREATE INDEX idx_health_status ON health(status); +CREATE INDEX idx_health_last_checked ON health(last_checked); +CREATE INDEX idx_health_next_check ON health(next_check); +CREATE INDEX idx_health_deleted_at ON health(deleted_at); + diff --git a/migrations/012_global_market_table.down.sql b/migrations/012_global_market_table.down.sql new file mode 100644 index 0000000..04d1d4b --- /dev/null +++ b/migrations/012_global_market_table.down.sql @@ -0,0 +1 @@ +DROP TABLE global_market; diff --git a/migrations/012_global_market_table.up.sql b/migrations/012_global_market_table.up.sql new file mode 100644 index 0000000..8216c4b --- /dev/null +++ b/migrations/012_global_market_table.up.sql @@ -0,0 +1,19 @@ +-- Global market data from Alternative.me API +CREATE TABLE global_market ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + total_market_cap_usd REAL, + total_24h_volume_usd REAL, + bitcoin_percentage_of_market_cap REAL, + active_currencies INTEGER, + active_assets INTEGER, + active_markets INTEGER, + last_updated TIMESTAMP NOT NULL +); + +CREATE INDEX idx_global_market_last_updated ON global_market(last_updated); +CREATE INDEX idx_global_market_deleted_at ON global_market(deleted_at); + + diff --git a/migrations/013_fear_greed_index_table.down.sql b/migrations/013_fear_greed_index_table.down.sql new file mode 100644 index 0000000..0eedee7 --- /dev/null +++ b/migrations/013_fear_greed_index_table.down.sql @@ -0,0 +1 @@ +DROP TABLE fear_greed_index; diff --git a/migrations/013_fear_greed_index_table.up.sql b/migrations/013_fear_greed_index_table.up.sql new file mode 100644 index 0000000..8db8eef --- /dev/null +++ b/migrations/013_fear_greed_index_table.up.sql @@ -0,0 +1,15 @@ +-- Fear and Greed Index data from Alternative.me +CREATE TABLE fear_greed_index ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + value INTEGER NOT NULL, + value_classification TEXT NOT NULL, + timestamp TIMESTAMP NOT NULL, + time_until_update TEXT +); + +CREATE INDEX idx_fear_greed_index_timestamp ON fear_greed_index(timestamp); +CREATE INDEX idx_fear_greed_index_value ON fear_greed_index(value); +CREATE INDEX idx_fear_greed_index_deleted_at ON fear_greed_index(deleted_at); diff --git a/migrations/014_crypto_listings_table.down.sql b/migrations/014_crypto_listings_table.down.sql new file mode 100644 index 0000000..5dc3abc --- /dev/null +++ b/migrations/014_crypto_listings_table.down.sql @@ -0,0 +1 @@ +DROP TABLE crypto_listings; diff --git a/migrations/014_crypto_listings_table.up.sql b/migrations/014_crypto_listings_table.up.sql new file mode 100644 index 0000000..f190d07 --- /dev/null +++ b/migrations/014_crypto_listings_table.up.sql @@ -0,0 +1,18 @@ +-- Listings data from Alternative.me API +CREATE TABLE crypto_listings ( + id TEXT PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at TIMESTAMP, + api_id TEXT NOT NULL, + name TEXT NOT NULL, + symbol TEXT NOT NULL, + website_slug TEXT NOT NULL, + UNIQUE(api_id) +); + + +CREATE INDEX idx_crypto_listings_api_id ON crypto_listings(api_id); +CREATE INDEX idx_crypto_listings_symbol ON crypto_listings(symbol); +CREATE INDEX idx_crypto_listings_website_slug ON crypto_listings(website_slug); +CREATE INDEX idx_crypto_listings_deleted_at ON crypto_listings(deleted_at); diff --git a/migrations/Taskfile.yml b/migrations/Taskfile.yml new file mode 100644 index 0000000..979f707 --- /dev/null +++ b/migrations/Taskfile.yml @@ -0,0 +1,200 @@ +# yaml-language-server: $schema=https://taskfile.dev/schema.json +version: "3" +silent: true + +tasks: + default: + cmds: + - task: migrate + + initialize: + cmds: + - task: migrate:accounts:up + - task: migrate:credentials:up + - task: migrate:profiles:up + - task: migrate:vaults:up + - task: migrate:assets:up + - task: migrate:prices:up + - task: migrate:price_conversions:up + - task: migrate:blockchains:up + - task: migrate:services:up + - task: migrate:activities:up + - task: migrate:health:up + - task: migrate:global_market:up + - task: migrate:fear_greed_index:up + - task: migrate:crypto_listings:up + + migrate: + cmds: + - task: migrate:accounts + - task: migrate:credentials + - task: migrate:profiles + - task: migrate:vaults + - task: migrate:assets + - task: migrate:prices + - task: migrate:price_conversions + - task: migrate:blockchains + - task: migrate:services + - task: migrate:activities + - task: migrate:health + - task: migrate:global_market + - task: migrate:fear_greed_index + - task: migrate:crypto_listings + + # --------------- + # Main Tasks + # --------------- + migrate:accounts: + cmds: + - task: migrate:accounts:down + - task: migrate:accounts:up + + migrate:accounts:up: + cmd: wrangler d1 execute USERS_DB --file 001_accounts_table.up.sql --remote -y + + migrate:accounts:down: + cmd: wrangler d1 execute USERS_DB --file 001_accounts_table.down.sql --remote -y + + migrate:credentials: + cmds: + - task: migrate:credentials:down + - task: migrate:credentials:up + + migrate:credentials:up: + cmd: wrangler d1 execute USERS_DB --file 002_credentials_table.up.sql --remote -y + + migrate:credentials:down: + cmd: wrangler d1 execute USERS_DB --file 002_credentials_table.down.sql --remote -y + + migrate:profiles: + cmds: + - task: migrate:profiles:down + - task: migrate:profiles:up + + migrate:profiles:up: + cmd: wrangler d1 execute USERS_DB --file 003_profiles_table.up.sql --remote -y + + migrate:profiles:down: + cmd: wrangler d1 execute USERS_DB --file 003_profiles_table.down.sql --remote -y + + migrate:vaults: + cmds: + - task: migrate:vaults:down + - task: migrate:vaults:up + + migrate:vaults:down: + cmd: wrangler d1 execute USERS_DB --file 004_vaults_table.down.sql --remote -y + + migrate:vaults:up: + cmd: wrangler d1 execute USERS_DB --file 004_vaults_table.up.sql --remote -y + + migrate:assets: + cmds: + - task: migrate:assets:down + - task: migrate:assets:up + + migrate:assets:up: + cmd: wrangler d1 execute NETWORK_DB --file 005_assets_table.up.sql --remote -y + + migrate:assets:down: + cmd: wrangler d1 execute NETWORK_DB --file 005_assets_table.down.sql --remote -y + + migrate:prices: + cmds: + - task: migrate:prices:down + - task: migrate:prices:up + + migrate:prices:up: + cmd: wrangler d1 execute NETWORK_DB --file 006_prices_table.up.sql --remote -y + + migrate:prices:down: + cmd: wrangler d1 execute NETWORK_DB --file 006_prices_table.down.sql --remote -y + + migrate:price_conversions: + cmds: + - task: migrate:price_conversions:down + - task: migrate:price_conversions:up + + migrate:price_conversions:up: + cmd: wrangler d1 execute NETWORK_DB --file 007_price_conversions_table.up.sql --remote -y + + migrate:price_conversions:down: + cmd: wrangler d1 execute NETWORK_DB --file 007_price_conversions_table.down.sql --remote -y + + migrate:blockchains: + cmds: + - task: migrate:blockchains:down + - task: migrate:blockchains:up + + migrate:blockchains:up: + cmd: wrangler d1 execute NETWORK_DB --file 008_blockchains_table.up.sql --remote -y + + migrate:blockchains:down: + cmd: wrangler d1 execute NETWORK_DB --file 008_blockchains_table.down.sql --remote -y + + migrate:services: + cmds: + - task: migrate:services:down + - task: migrate:services:up + + migrate:services:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 009_services_table.up.sql --remote -y + + migrate:services:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 009_services_table.down.sql --remote -y + + migrate:activities: + cmds: + - task: migrate:activities:down + - task: migrate:activities:up + + migrate:activities:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 010_activities_table.up.sql --remote -y + + migrate:activities:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 010_activities_table.down.sql --remote -y + + migrate:health: + cmds: + - task: migrate:health:down + - task: migrate:health:up + + migrate:health:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 011_health_table.up.sql --remote -y + + migrate:health:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 011_health_table.down.sql --remote -y + + migrate:global_market: + cmds: + - task: global_market:down + - task: global_market:up + + migrate:global_market:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 012_global_market_table.up.sql --remote -y + + migrate:global_market:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 012_global_market_table.down.sql --remote -y + + migrate:fear_greed_index: + cmds: + - task: migrate:fear_greed_index:down + - task: migrate:fear_greed_index:up + + migrate:fear_greed_index:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 013_fear_greed_index_table.up.sql --remote -y + + migrate:fear_greed_index:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 013_fear_greed_index_table.down.sql --remote -y + + migrate:crypto_listings: + cmds: + - task: migrate:crypto_listings:down + - task: migrate:crypto_listings:up + + migrate:crypto_listings:up: + cmd: wrangler d1 execute ACTIVITY_DB --file 014_crypto_listings_table.up.sql --remote -y + + migrate:crypto_listings:down: + cmd: wrangler d1 execute ACTIVITY_DB --file 014_crypto_listings_table.down.sql --remote -y + diff --git a/migrations/node_modules/.cache/wrangler/wrangler-account.json b/migrations/node_modules/.cache/wrangler/wrangler-account.json new file mode 100644 index 0000000..25faf23 --- /dev/null +++ b/migrations/node_modules/.cache/wrangler/wrangler-account.json @@ -0,0 +1,6 @@ +{ + "account": { + "id": "eb37925850388bca807b7fab964c12bb", + "name": "Sonr" + } +} \ No newline at end of file diff --git a/migrations/node_modules/.mf/cf.json b/migrations/node_modules/.mf/cf.json new file mode 100644 index 0000000..d437f69 --- /dev/null +++ b/migrations/node_modules/.mf/cf.json @@ -0,0 +1 @@ +{"clientTcpRtt":8,"requestHeaderNames":{},"httpProtocol":"HTTP/1.1","tlsCipher":"AEAD-AES256-GCM-SHA384","continent":"NA","asn":701,"clientAcceptEncoding":"br, gzip, deflate","verifiedBotCategory":"","country":"US","region":"Virginia","tlsClientCiphersSha1":"kXrN3VEKDdzz2cPKTQaKzpxVTxQ=","tlsClientAuth":{"certIssuerDNLegacy":"","certIssuerSKI":"","certSubjectDNRFC2253":"","certSubjectDNLegacy":"","certFingerprintSHA256":"","certNotBefore":"","certSKI":"","certSerial":"","certIssuerDN":"","certVerified":"NONE","certNotAfter":"","certSubjectDN":"","certPresented":"0","certRevoked":"0","certIssuerSerial":"","certIssuerDNRFC2253":"","certFingerprintSHA1":""},"tlsClientRandom":"KHkBe8nH4XNP9wnNS5nCDWBpe+Ha+8+BUuP0iev0P7Q=","tlsExportedAuthenticator":{"clientFinished":"c71857a631b6612f8bdfda376b597ddb0ccf62688fc7f50086006daba82f54c412501557ccfce73754bc550a1e09a6b9","clientHandshake":"8d0a2b64f7b6d0d1c2a77d7535feca90c9703a46c457b4951670146a8b5e2fe89357c6d8666c4e7f864e6814e7bb1d0f","serverHandshake":"429ef59250f50d719b076c2efdf97ecd5d1a50c15fdf979df5894d078793865ff44c7680213365147c44daedbc92bec6","serverFinished":"6e46d6694b01edbbc7d5daa9316565f17fb3a626713c96286d07487a7ddb7482aea03a84971fc74231d848d2f037af41"},"tlsClientHelloLength":"383","colo":"IAD","timezone":"America/New_York","longitude":"-77.53900","latitude":"39.01800","edgeRequestKeepAliveStatus":1,"requestPriority":"","postalCode":"20147","city":"Ashburn","tlsVersion":"TLSv1.3","regionCode":"VA","asOrganization":"Verizon Fios","metroCode":"511","tlsClientExtensionsSha1Le":"u4wtEMFQBY18l3BzHAvORm+KGRw=","tlsClientExtensionsSha1":"1eY97BUYYO8vDaTfHQywB1pcNdM=","botManagement":{"corporateProxy":false,"verifiedBot":false,"jsDetection":{"passed":false},"staticResource":false,"detectionIds":{},"score":99}} \ No newline at end of file diff --git a/migrations/wrangler.toml b/migrations/wrangler.toml new file mode 100644 index 0000000..b898e6a --- /dev/null +++ b/migrations/wrangler.toml @@ -0,0 +1,64 @@ +# Top-level configuration +name = "motr-worker" +main = "worker.mjs" +compatibility_date = "2025-04-14" + +routes = [ + { pattern = "sonr.id", custom_domain = true }, +] + +[build] +command = "devbox run build:worker" + +[dev] +port = 6969 + +[observability] +enabled = true + +[triggers] +crons = ["0 */1 * * *"] + +[[d1_databases]] +binding = "ACTIVITY_DB" +database_name = "motr-activity" +database_id = "a7ccb4bb-c529-4f42-8029-92564a3aecb8" + +[[d1_databases]] +binding = "NETWORK_DB" +database_name = "motr-network" +database_id = "acb75499-3502-4052-9604-263a913e077a" + +[[d1_databases]] +binding = "USERS_DB" +database_name = "motr-users" +database_id = "8ed4d399-5932-419c-b92f-9c20d7a36ad2" + +[[kv_namespaces]] +binding = "SESSIONS_KV" +id = "ea5de66fcfc14b5eba170395e29432ee" + +[[kv_namespaces]] +binding = "HANDLES_KV" +id = "271d47087a8842b2aac5ee79cf7bb203" + +[[r2_buckets]] +binding = 'PROFILES' +bucket_name = 'profiles' + +[vars] +SONR_CHAIN_ID = 'sonr-testnet-1' +IPFS_GATEWAY = 'https://ipfs.sonr.land' +SONR_API_URL = 'https://api.sonr.land' +SONR_RPC_URL = 'https://rpc.sonr.land' +SONR_GRPC_URL = 'https://grpc.sonr.land' +MATRIX_SERVER = 'https://bm.chat' +MOTR_GATEWAY = 'https://sonr.id' +MOTR_VAULT = 'https://did.run' + +[durable_objects] +bindings = [{name = "VAULT", class_name = "Vault"}] + +[[migrations]] +tag = "v1" # Should be unique for each entry +new_classes = ["Vault"] # List the classes that should be created diff --git a/package.json b/package.json new file mode 100644 index 0000000..16fb0a8 --- /dev/null +++ b/package.json @@ -0,0 +1,15 @@ +{ + "name": "@sonr-io/radar-worker", + "version": "0.0.1", + "private": true, + "dependencies": { + "@extism/extism": "^2.0.0-rc11", + "@helia/dag-cbor": "^1.0.1", + "@helia/dag-json": "^1.0.1", + "@helia/json": "^1.0.1", + "@helia/strings": "^1.0.1", + "@helia/unixfs": "^1.4.1", + "helia": "^2.1.0", + "sonr-cosmes": "^0.0.5" + } +} diff --git a/wrangler.toml b/wrangler.toml new file mode 100644 index 0000000..0124f25 --- /dev/null +++ b/wrangler.toml @@ -0,0 +1,59 @@ +# Top-level configuration +name = "motr-radar" +main = "build/worker.mjs" +compatibility_date = "2025-04-14" + +routes = [ + { pattern = "did.run", custom_domain = true }, +] + +[build] +command = "make build" + +[dev] +port = 4242 + +[observability] +enabled = true + +[triggers] +crons = ["0 */1 * * *"] + + +[[d1_databases]] +binding = "ACTIVITY_DB" +database_name = "motr-activity" +database_id = "a7ccb4bb-c529-4f42-8029-92564a3aecb8" + +[[d1_databases]] +binding = "NETWORK_DB" +database_name = "motr-network" +database_id = "acb75499-3502-4052-9604-263a913e077a" + +[[d1_databases]] +binding = "USERS_DB" +database_name = "motr-users" +database_id = "8ed4d399-5932-419c-b92f-9c20d7a36ad2" + +[[kv_namespaces]] +binding = "SESSIONS_KV" +id = "ea5de66fcfc14b5eba170395e29432ee" + +[[kv_namespaces]] +binding = "HANDLES_KV" +id = "271d47087a8842b2aac5ee79cf7bb203" + +[[r2_buckets]] +binding = 'PROFILES' +bucket_name = 'profiles' + +[vars] +SONR_CHAIN_ID = 'sonr-testnet-1' +IPFS_GATEWAY = 'https://ipfs.sonr.land' +SONR_API_URL = 'https://api.sonr.land' +SONR_RPC_URL = 'https://rpc.sonr.land' +SONR_GRPC_URL = 'https://grpc.sonr.land' +MATRIX_SERVER = 'https://bm.chat' +MOTR_GATEWAY = 'https://sonr.id' +MOTR_VAULT = 'https://did.run' +MOTR_MODE = 'controller'