From 2ae3e1e3b0838ea4bea7b4dd9180eb3cb26210bf Mon Sep 17 00:00:00 2001 From: nick Date: Fri, 9 Aug 2024 15:45:11 +0900 Subject: [PATCH] wip --- .github/workflows/api.test.yaml | 18 +- .github/workflows/node.test.yaml | 2 + dockerfiles/orakl-api.Dockerfile | 10 +- node/cmd/api/.version | 1 + node/cmd/api/main.go | 73 +++++ .../api/000001_initialize_tables.down.sql | 25 ++ .../api/000001_initialize_tables.up.sql | 124 ++++++++ node/migrations/api/000002_blocks.down.sql | 2 + node/migrations/api/000002_blocks.up.sql | 10 + node/pkg/api/apierr/controller.go | 83 +++++ node/pkg/api/apierr/queries.go | 11 + node/pkg/api/apierr/route.go | 14 + node/pkg/api/blocks/controller.go | 117 +++++++ node/pkg/api/blocks/queries.go | 51 +++ node/pkg/api/blocks/route.go | 15 + node/pkg/api/chain/controller.go | 83 +++++ node/pkg/api/chain/queries.go | 15 + node/pkg/api/chain/route.go | 15 + node/pkg/api/l2aggregator/controller.go | 34 ++ node/pkg/api/l2aggregator/queries.go | 7 + node/pkg/api/l2aggregator/route.go | 11 + node/pkg/api/listener/controller.go | 153 +++++++++ node/pkg/api/listener/queries.go | 66 ++++ node/pkg/api/listener/route.go | 15 + node/pkg/api/proxy/controller.go | 101 ++++++ node/pkg/api/proxy/queries.go | 20 ++ node/pkg/api/proxy/route.go | 15 + node/pkg/api/reporter/controller.go | 248 +++++++++++++++ node/pkg/api/reporter/queries.go | 65 ++++ node/pkg/api/reporter/route.go | 16 + node/pkg/api/secrets/secrets.go | 77 +++++ node/pkg/api/service/controller.go | 78 +++++ node/pkg/api/service/queries.go | 27 ++ node/pkg/api/service/route.go | 15 + node/pkg/api/tests/apierr_test.go | 76 +++++ node/pkg/api/tests/chain_test.go | 68 ++++ node/pkg/api/tests/listener_test.go | 97 ++++++ node/pkg/api/tests/proxy_test.go | 79 +++++ node/pkg/api/tests/reporter_test.go | 99 ++++++ node/pkg/api/tests/service_test.go | 68 ++++ node/pkg/api/tests/vrf_test.go | 94 ++++++ node/pkg/api/utils/custom_types.go | 234 ++++++++++++++ node/pkg/api/utils/test_helper.go | 142 +++++++++ node/pkg/api/utils/utils.go | 297 ++++++++++++++++++ node/pkg/api/vrf/controller.go | 142 +++++++++ node/pkg/api/vrf/queries.go | 43 +++ node/pkg/api/vrf/route.go | 15 + 47 files changed, 3055 insertions(+), 16 deletions(-) create mode 100644 node/cmd/api/.version create mode 100644 node/cmd/api/main.go create mode 100644 node/migrations/api/000001_initialize_tables.down.sql create mode 100644 node/migrations/api/000001_initialize_tables.up.sql create mode 100644 node/migrations/api/000002_blocks.down.sql create mode 100644 node/migrations/api/000002_blocks.up.sql create mode 100644 node/pkg/api/apierr/controller.go create mode 100644 node/pkg/api/apierr/queries.go create mode 100644 node/pkg/api/apierr/route.go create mode 100644 node/pkg/api/blocks/controller.go create mode 100644 node/pkg/api/blocks/queries.go create mode 100644 node/pkg/api/blocks/route.go create mode 100644 node/pkg/api/chain/controller.go create mode 100644 node/pkg/api/chain/queries.go create mode 100644 node/pkg/api/chain/route.go create mode 100644 node/pkg/api/l2aggregator/controller.go create mode 100644 node/pkg/api/l2aggregator/queries.go create mode 100644 node/pkg/api/l2aggregator/route.go create mode 100644 node/pkg/api/listener/controller.go create mode 100644 node/pkg/api/listener/queries.go create mode 100644 node/pkg/api/listener/route.go create mode 100644 node/pkg/api/proxy/controller.go create mode 100644 node/pkg/api/proxy/queries.go create mode 100644 node/pkg/api/proxy/route.go create mode 100644 node/pkg/api/reporter/controller.go create mode 100644 node/pkg/api/reporter/queries.go create mode 100644 node/pkg/api/reporter/route.go create mode 100644 node/pkg/api/secrets/secrets.go create mode 100644 node/pkg/api/service/controller.go create mode 100644 node/pkg/api/service/queries.go create mode 100644 node/pkg/api/service/route.go create mode 100644 node/pkg/api/tests/apierr_test.go create mode 100644 node/pkg/api/tests/chain_test.go create mode 100644 node/pkg/api/tests/listener_test.go create mode 100644 node/pkg/api/tests/proxy_test.go create mode 100644 node/pkg/api/tests/reporter_test.go create mode 100644 node/pkg/api/tests/service_test.go create mode 100644 node/pkg/api/tests/vrf_test.go create mode 100644 node/pkg/api/utils/custom_types.go create mode 100644 node/pkg/api/utils/test_helper.go create mode 100644 node/pkg/api/utils/utils.go create mode 100644 node/pkg/api/vrf/controller.go create mode 100644 node/pkg/api/vrf/queries.go create mode 100644 node/pkg/api/vrf/route.go diff --git a/.github/workflows/api.test.yaml b/.github/workflows/api.test.yaml index 964b52a32..900e5fb3f 100644 --- a/.github/workflows/api.test.yaml +++ b/.github/workflows/api.test.yaml @@ -5,7 +5,7 @@ on: branches-ignore: - "master" paths: - - "api/**" + - "node/pkg/api/**" workflow_dispatch: jobs: @@ -45,27 +45,23 @@ jobs: go-version: "1.22.3" check-latest: true cache-dependency-path: | - ./api/go.sum + ./node/go.sum - name: Install golang-migrate run: | curl -L https://github.com/golang-migrate/migrate/releases/download/v4.17.0/migrate.linux-amd64.tar.gz | tar xvz sudo mv ./migrate /usr/bin - name: Migrate up run: | - cd ./api - migrate -database "postgresql://postgres:postgres@localhost:5432/orakl-test?search_path=public&sslmode=disable" -verbose -path ./migrations up + cd ./node + migrate -database "postgresql://postgres:postgres@localhost:5432/orakl-test?search_path=public&sslmode=disable" -verbose -path ./migrations/api up - name: Install dependencies run: | - cd ./api + cd ./node go mod tidy - - name: Build - run: | - cd ./api - go build - name: Run test run: | - cd ./api - go test ./tests -v + cd ./node + go test ./node/pkg/api/tests -v env: DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/orakl-test?search_path=public" ENCRYPT_PASSWORD: "abc123" diff --git a/.github/workflows/node.test.yaml b/.github/workflows/node.test.yaml index 3dd3ac14a..514070a1d 100644 --- a/.github/workflows/node.test.yaml +++ b/.github/workflows/node.test.yaml @@ -13,6 +13,8 @@ on: - "!node/pkg/logscribe/**" - "!node/migrations/logscribe/**" - "!node/pkg/logscribeconsumer/**" + - "!node/pkg/api/**" + - "!node/migrations/api/**" workflow_dispatch: jobs: diff --git a/dockerfiles/orakl-api.Dockerfile b/dockerfiles/orakl-api.Dockerfile index dbcd4b5b7..5cf951ed1 100644 --- a/dockerfiles/orakl-api.Dockerfile +++ b/dockerfiles/orakl-api.Dockerfile @@ -5,11 +5,11 @@ RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/* WORKDIR /app -COPY api api +COPY node node -WORKDIR /app/api +WORKDIR /app/node -RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o apibin -ldflags="-w -s" . +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o apibin -ldflags="-w -s" ./cmd/api/main.go # debian:bullseye-slim FROM debian@sha256:4b48997afc712259da850373fdbc60315316ee72213a4e77fc5a66032d790b2a @@ -23,9 +23,9 @@ WORKDIR /app RUN mkdir /app/migrations -COPY --from=builder /app/api/migrations /app/migrations +COPY --from=builder /app/node/migrations/api /app/migrations -COPY --from=builder /app/api/apibin /usr/bin +COPY --from=builder /app/node/apibin /usr/bin COPY dockerfiles/start-go.sh . diff --git a/node/cmd/api/.version b/node/cmd/api/.version new file mode 100644 index 000000000..8a9ecc2ea --- /dev/null +++ b/node/cmd/api/.version @@ -0,0 +1 @@ +0.0.1 \ No newline at end of file diff --git a/node/cmd/api/main.go b/node/cmd/api/main.go new file mode 100644 index 000000000..cee66bd37 --- /dev/null +++ b/node/cmd/api/main.go @@ -0,0 +1,73 @@ +package main + +import ( + _ "embed" + "log" + + "bisonai.com/orakl/node/pkg/api/apierr" + "bisonai.com/orakl/node/pkg/api/blocks" + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/listener" + "bisonai.com/orakl/node/pkg/api/proxy" + "bisonai.com/orakl/node/pkg/api/reporter" + "bisonai.com/orakl/node/pkg/api/service" + "bisonai.com/orakl/node/pkg/api/utils" + "bisonai.com/orakl/node/pkg/api/vrf" + + "github.com/gofiber/fiber/v2" + "github.com/joho/godotenv" +) + +//go:embed .version +var version string + +func main() { + err := godotenv.Load() + if err != nil { + log.Println("env file is not found, continuing without .env file") + } + config, err := utils.LoadEnvVars() + if err != nil { + panic(err) + } + + appConfig, err := utils.Setup(version) + if err != nil { + panic(err) + } + + postgres := appConfig.Postgres + app := appConfig.App + + defer postgres.Close() + + v1 := app.Group("/api/v1") + SetRouter(v1) + + var port string + if val, ok := config["APP_PORT"].(string); ok { + port = val + } else { + port = "3000" + } + + err = app.Listen(":" + port) + if err != nil { + panic(err) + } +} + +func SetRouter(_router fiber.Router) { + (_router).Get("", func(c *fiber.Ctx) error { + return c.SendString("Orakl Network API") + }) + + apierr.Routes(_router) + chain.Routes(_router) + listener.Routes(_router) + proxy.Routes(_router) + reporter.Routes(_router) + service.Routes(_router) + vrf.Routes(_router) + blocks.Routes(_router) +} diff --git a/node/migrations/api/000001_initialize_tables.down.sql b/node/migrations/api/000001_initialize_tables.down.sql new file mode 100644 index 000000000..ba1dfa056 --- /dev/null +++ b/node/migrations/api/000001_initialize_tables.down.sql @@ -0,0 +1,25 @@ +-- Drop foreign key constraints first +ALTER TABLE IF EXISTS "feeds" DROP CONSTRAINT IF EXISTS "feeds_adapter_id_fkey"; +ALTER TABLE IF EXISTS "aggregates" DROP CONSTRAINT IF EXISTS "aggregates_aggregator_id_fkey"; +ALTER TABLE IF EXISTS "aggregators" DROP CONSTRAINT IF EXISTS "aggregators_adapter_id_fkey"; +ALTER TABLE IF EXISTS "aggregators" DROP CONSTRAINT IF EXISTS "aggregators_chain_id_fkey"; +ALTER TABLE IF EXISTS "data" DROP CONSTRAINT IF EXISTS "data_aggregator_id_fkey"; +ALTER TABLE IF EXISTS "data" DROP CONSTRAINT IF EXISTS "data_feed_id_fkey"; +ALTER TABLE IF EXISTS "listeners" DROP CONSTRAINT IF EXISTS "listeners_chain_id_fkey"; +ALTER TABLE IF EXISTS "listeners" DROP CONSTRAINT IF EXISTS "listeners_service_id_fkey"; +ALTER TABLE IF EXISTS "reporters" DROP CONSTRAINT IF EXISTS "reporters_chain_id_fkey"; +ALTER TABLE IF EXISTS "reporters" DROP CONSTRAINT IF EXISTS "reporters_service_id_fkey"; + +-- Drop tables in reverse order of creation +DROP TABLE IF EXISTS "vrf_keys"; +DROP TABLE IF EXISTS "services"; +DROP TABLE IF EXISTS "reporters"; +DROP TABLE IF EXISTS "proxies"; +DROP TABLE IF EXISTS "listeners"; +DROP TABLE IF EXISTS "feeds"; +DROP TABLE IF EXISTS "error"; +DROP TABLE IF EXISTS "data"; +DROP TABLE IF EXISTS "chains"; +DROP TABLE IF EXISTS "aggregators"; +DROP TABLE IF EXISTS "aggregates"; +DROP TABLE IF EXISTS "adapters"; diff --git a/node/migrations/api/000001_initialize_tables.up.sql b/node/migrations/api/000001_initialize_tables.up.sql new file mode 100644 index 000000000..8325c1362 --- /dev/null +++ b/node/migrations/api/000001_initialize_tables.up.sql @@ -0,0 +1,124 @@ +CREATE TABLE IF NOT EXISTS "adapters" ( + adapter_hash TEXT NOT NULL, + adapter_id BIGSERIAL NOT NULL, + decimals INTEGER NOT NULL, + name TEXT NOT NULL, + CONSTRAINT "adapters_adapter_hash_key" UNIQUE ("adapter_hash"), + CONSTRAINT "adapters_pkey" PRIMARY KEY ("adapter_id") +); + +CREATE TABLE IF NOT EXISTS "chains" ( + chain_id BIGSERIAL NOT NULL, + name TEXT NOT NULL, + CONSTRAINT "chains_name_key" UNIQUE ("name"), + CONSTRAINT "chains_pkey" PRIMARY KEY ("chain_id") +); + +CREATE TABLE IF NOT EXISTS "services" ( + name TEXT NOT NULL, + service_id BIGSERIAL NOT NULL, + CONSTRAINT "services_name_key" UNIQUE ("name"), + CONSTRAINT "services_pkey" PRIMARY KEY ("service_id") +); + +CREATE TABLE IF NOT EXISTS "aggregators" ( + absolute_threshold DOUBLE PRECISION NOT NULL, + active BOOLEAN NOT NULL DEFAULT false, + adapter_id BIGINT NOT NULL, + address TEXT NOT NULL, + aggregator_hash TEXT NOT NULL, + aggregator_id BIGSERIAL NOT NULL, + chain_id BIGINT NOT NULL, + fetcher_type INTEGER NOT NULL, + heartbeat INTEGER NOT NULL, + name TEXT NOT NULL, + threshold DOUBLE PRECISION NOT NULL, + CONSTRAINT "aggregators_address_key" UNIQUE ("address"), + CONSTRAINT "aggregators_adapter_id_fkey" FOREIGN KEY ("adapter_id") REFERENCES "public"."adapters" ("adapter_id"), + CONSTRAINT "aggregators_chain_id_fkey" FOREIGN KEY ("chain_id") REFERENCES "public"."chains" ("chain_id"), + CONSTRAINT "aggregators_pkey" PRIMARY KEY ("aggregator_id") +); + +CREATE TABLE IF NOT EXISTS "aggregates" ( + aggregate_id BIGSERIAL NOT NULL, + aggregator_id BIGINT NOT NULL, + timestamp TIMESTAMP WITH TIME ZONE NOT NULL, + value BIGINT NOT NULL, + CONSTRAINT "aggregates_aggregator_id_fkey" FOREIGN KEY ("aggregator_id") REFERENCES "public"."aggregators" ("aggregator_id") ON DELETE CASCADE, + CONSTRAINT "aggregates_pkey" PRIMARY KEY ("aggregate_id") +); + +CREATE TABLE IF NOT EXISTS "feeds" ( + adapter_id BIGINT NOT NULL, + definition JSONB NOT NULL, + feed_id BIGSERIAL NOT NULL, + name TEXT NOT NULL, + CONSTRAINT "feeds_adapter_id_fkey" FOREIGN KEY ("adapter_id") REFERENCES "public"."adapters" ("adapter_id") ON DELETE CASCADE, + CONSTRAINT "feeds_pkey" PRIMARY KEY ("feed_id") +); + +CREATE TABLE IF NOT EXISTS "data" ( + aggregator_id BIGINT NOT NULL, + data_id BIGSERIAL NOT NULL, + feed_id BIGINT NOT NULL, + timestamp TIMESTAMP WITH TIME ZONE NOT NULL, + value BIGINT NOT NULL, + CONSTRAINT "data_aggregator_id_fkey" FOREIGN KEY ("aggregator_id") REFERENCES "public"."aggregators" ("aggregator_id") ON DELETE CASCADE, + CONSTRAINT "data_pkey" PRIMARY KEY ("data_id"), + CONSTRAINT "data_feed_id_fkey" FOREIGN KEY ("feed_id") REFERENCES "public"."feeds" ("feed_id") +); + +CREATE TABLE IF NOT EXISTS "error" ( + code TEXT NOT NULL, + error_id BIGSERIAL NOT NULL, + name TEXT NOT NULL, + request_id TEXT NOT NULL, + stack TEXT NOT NULL, + timestamp TIMESTAMP WITH TIME ZONE NOT NULL, + CONSTRAINT "error_pkey" PRIMARY KEY ("error_id") +); + +CREATE TABLE IF NOT EXISTS "listeners" ( + address CHARACTER VARYING(42) NOT NULL, + chain_id BIGINT NOT NULL, + event_name CHARACTER VARYING(255) NOT NULL, + listener_id BIGSERIAL NOT NULL, + service_id BIGINT NOT NULL, + CONSTRAINT "listeners_chain_id_fkey" FOREIGN KEY ("chain_id") REFERENCES "public"."chains" ("chain_id"), + CONSTRAINT "listeners_service_id_fkey" FOREIGN KEY ("service_id") REFERENCES "public"."services" ("service_id"), + CONSTRAINT "listeners_pkey" PRIMARY KEY ("listener_id") +); + +CREATE TABLE IF NOT EXISTS "proxies" ( + host TEXT NOT NULL, + id BIGSERIAL NOT NULL, + location TEXT, + port INTEGER NOT NULL, + protocol TEXT NOT NULL, + CONSTRAINT "proxies_protocol_host_port_key" UNIQUE ("protocol", "host", "port"), + CONSTRAINT "proxies_pkey" PRIMARY KEY ("id") +); + +CREATE TABLE IF NOT EXISTS "reporters" ( + address CHARACTER VARYING(42) NOT NULL, + chain_id BIGINT NOT NULL, + "oracleAddress" CHARACTER VARYING(42) NOT NULL, + "privateKey" CHARACTER VARYING(164) NOT NULL, + reporter_id BIGSERIAL NOT NULL, + service_id BIGINT NOT NULL, + CONSTRAINT "reporters_chain_id_fkey" FOREIGN KEY ("chain_id") REFERENCES "public"."chains" ("chain_id"), + CONSTRAINT "reporters_service_id_fkey" FOREIGN KEY ("service_id") REFERENCES "public"."services" ("service_id"), + CONSTRAINT "reporters_pkey" PRIMARY KEY ("reporter_id") +); + +CREATE TABLE IF NOT EXISTS "vrf_keys" ( + chain_id BIGINT NOT NULL, + key_hash CHARACTER VARYING(66) NOT NULL, + pk CHARACTER VARYING(130) NOT NULL, + pk_x CHARACTER VARYING(78) NOT NULL, + pk_y CHARACTER VARYING(78) NOT NULL, + sk CHARACTER VARYING(64) NOT NULL, + vrf_key_id BIGSERIAL NOT NULL, + CONSTRAINT "vrf_keys_chain_id_fkey" FOREIGN KEY ("chain_id") REFERENCES "public"."chains" ("chain_id"), + CONSTRAINT "vrf_keys_pkey" PRIMARY KEY ("vrf_key_id") +); \ No newline at end of file diff --git a/node/migrations/api/000002_blocks.down.sql b/node/migrations/api/000002_blocks.down.sql new file mode 100644 index 000000000..613b57bfe --- /dev/null +++ b/node/migrations/api/000002_blocks.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS "observed_blocks"; +DROP TABLE IF EXISTS "unprocessed_blocks"; diff --git a/node/migrations/api/000002_blocks.up.sql b/node/migrations/api/000002_blocks.up.sql new file mode 100644 index 000000000..53047e4da --- /dev/null +++ b/node/migrations/api/000002_blocks.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS "observed_blocks" ( + service TEXT NOT NULL UNIQUE, + block_number BIGINT NOT NULL +); + +CREATE TABLE IF NOT EXISTS "unprocessed_blocks" ( + service TEXT NOT NULL, + block_number BIGINT NOT NULL, + UNIQUE (service, block_number) +); \ No newline at end of file diff --git a/node/pkg/api/apierr/controller.go b/node/pkg/api/apierr/controller.go new file mode 100644 index 000000000..2b26b8278 --- /dev/null +++ b/node/pkg/api/apierr/controller.go @@ -0,0 +1,83 @@ +package apierr + +import ( + "fmt" + + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ErrorInsertModel struct { + RequestId string `db:"request_id" json:"requestId" validate:"required"` + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Code string `db:"code" json:"code" validate:"required"` + Name string `db:"name" json:"name" validate:"required"` + Stack string `db:"stack" json:"stack" validate:"required"` +} + +type ErrorModel struct { + ERROR_ID *utils.CustomInt64 `db:"error_id" json:"id"` + RequestId string `db:"request_id" json:"requestId" validate:"required"` + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Code string `db:"code" json:"code" validate:"required"` + Name string `db:"name" json:"name" validate:"required"` + Stack string `db:"stack" json:"stack" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ErrorInsertModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + result, err := utils.QueryRow[ErrorModel](c, InsertError, map[string]any{ + "request_id": payload.RequestId, + "timestamp": payload.Timestamp.String(), + "code": payload.Code, + "name": payload.Name, + "stack": payload.Stack}) + + if err != nil { + return err + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[ErrorModel](c, GetError, nil) + if err != nil { + return err + } + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ErrorModel](c, GetErrorById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + if !utils.IsTesting(c) { + panic(fmt.Errorf("not allowed")) + } + id := c.Params("id") + result, err := utils.QueryRow[ErrorModel](c, RemoveErrorById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} diff --git a/node/pkg/api/apierr/queries.go b/node/pkg/api/apierr/queries.go new file mode 100644 index 000000000..02da65e07 --- /dev/null +++ b/node/pkg/api/apierr/queries.go @@ -0,0 +1,11 @@ +package apierr + +const ( + InsertError = `INSERT INTO error (request_id, timestamp, code, name, stack) VALUES (@request_id, @timestamp::timestamptz, @code, @name, @stack) RETURNING *` + + GetError = `SELECT * FROM error;` + + GetErrorById = `SELECT * FROM error WHERE error_id = @id` + + RemoveErrorById = `DELETE FROM error WHERE error_id = @id RETURNING *;` +) diff --git a/node/pkg/api/apierr/route.go b/node/pkg/api/apierr/route.go new file mode 100644 index 000000000..94f216f94 --- /dev/null +++ b/node/pkg/api/apierr/route.go @@ -0,0 +1,14 @@ +package apierr + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + apierr := router.Group("/error") + + apierr.Post("", insert) + apierr.Get("", get) + apierr.Get("/:id", getById) + apierr.Delete("/:id", deleteById) +} diff --git a/node/pkg/api/blocks/controller.go b/node/pkg/api/blocks/controller.go new file mode 100644 index 000000000..3e45657c6 --- /dev/null +++ b/node/pkg/api/blocks/controller.go @@ -0,0 +1,117 @@ +package blocks + +import ( + "bisonai.com/orakl/node/pkg/api/utils" + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type BlockModel struct { + Service string `db:"service" json:"service" validate:"required"` + BlockNumber int64 `db:"block_number" json:"blockNumber" validate:"isZeroOrPositive"` +} + +type BlocksModel struct { + Service string `db:"service" json:"service" validate:"required"` + Blocks []int64 `db:"blocks" json:"blocks" validate:"dive,isZeroOrPositive"` +} + +var validate *validator.Validate + +func init() { + validate = validator.New() + _ = validate.RegisterValidation("isZeroOrPositive", func(fl validator.FieldLevel) bool { + return fl.Field().Int() >= 0 + }) +} + +func validateBlockPayload(payload interface{}) error { + return validate.Struct(payload) +} + +func getObservedBlock(c *fiber.Ctx) error { + service := c.Query("service") + if service == "" { + return fiber.NewError(fiber.StatusBadRequest, "service is required") + } + result, err := utils.QueryRow[BlockModel](c, GetObservedBlock, map[string]any{ + "service": service, + }) + if err != nil { + return err + } + if result.Service == "" { + return c.JSON(nil) + } + + return c.JSON(result) +} + +func upsertObservedBlock(c *fiber.Ctx) error { + payload := new(BlockModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + if err := validateBlockPayload(payload); err != nil { + return err + } + + result, err := utils.QueryRow[BlockModel](c, UpsertObservedBlock, map[string]any{ + "service": payload.Service, + "block_number": payload.BlockNumber, + }) + if err != nil { + return err + } + + return c.JSON(result) +} + +func getUnprocessedBlocks(c *fiber.Ctx) error { + service := c.Query("service") + if service == "" { + return fiber.NewError(fiber.StatusBadRequest, "service is required") + } + result, err := utils.QueryRows[BlockModel](c, GetUnprocessedBlocks, map[string]any{ + "service": service, + }) + if err != nil { + return err + } + + return c.JSON(result) +} + +func insertUnprocessedBlocks(c *fiber.Ctx) error { + payload := new(BlocksModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + if err := validateBlockPayload(payload); err != nil { + return err + } + + result, err := utils.QueryRows[BlocksModel](c, GenerateInsertBlocksQuery(payload.Blocks, payload.Service), map[string]any{}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func deleteUnprocessedBlock(c *fiber.Ctx) error { + service := c.Params("service") + blockNumber := c.Params("blockNumber") + + result, err := utils.QueryRow[BlockModel](c, DeleteUnprocessedBlock, map[string]any{ + "service": service, + "block_number": blockNumber, + }) + if err != nil { + return err + } + + return c.JSON(result) +} diff --git a/node/pkg/api/blocks/queries.go b/node/pkg/api/blocks/queries.go new file mode 100644 index 000000000..46e6715fd --- /dev/null +++ b/node/pkg/api/blocks/queries.go @@ -0,0 +1,51 @@ +package blocks + +import ( + "fmt" + "strings" +) + +const ( + // get observedBlock given service + GetObservedBlock = ` + SELECT * FROM observed_blocks + WHERE service = @service; + ` + + // upsert to observed_blocks given service and block_number + UpsertObservedBlock = ` + INSERT INTO observed_blocks (service, block_number) + VALUES (@service, @block_number) + ON CONFLICT (service) DO UPDATE SET block_number = GREATEST(observed_blocks.block_number, EXCLUDED.block_number) + RETURNING *; + ` + + // get all unprocessed blocks given service + GetUnprocessedBlocks = ` + SELECT * FROM unprocessed_blocks + WHERE service = @service; + ` + + // delete unprocessed block given service and block_number + DeleteUnprocessedBlock = ` + DELETE FROM unprocessed_blocks + WHERE service = @service AND block_number = @block_number + RETURNING *; + ` +) + +func GenerateInsertBlocksQuery(blocks []int64, service string) string { + baseQuery := ` + INSERT INTO unprocessed_blocks (service, block_number) + VALUES + ` + onConflict := ` + ON CONFLICT (service, block_number) DO NOTHING; + ` + values := make([]string, 0, len(blocks)) + for _, block := range blocks { + values = append(values, fmt.Sprintf("('%s', %d)", service, block)) + } + + return baseQuery + strings.Join(values, ",") + onConflict +} \ No newline at end of file diff --git a/node/pkg/api/blocks/route.go b/node/pkg/api/blocks/route.go new file mode 100644 index 000000000..954504ba1 --- /dev/null +++ b/node/pkg/api/blocks/route.go @@ -0,0 +1,15 @@ +package blocks + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + blocks := router.Group("/blocks") + + blocks.Get("/observed", getObservedBlock) + blocks.Post("/observed", upsertObservedBlock) + blocks.Post("/unprocessed", insertUnprocessedBlocks) + blocks.Get("/unprocessed", getUnprocessedBlocks) + blocks.Delete("/unprocessed/:service/:blockNumber", deleteUnprocessedBlock) +} \ No newline at end of file diff --git a/node/pkg/api/chain/controller.go b/node/pkg/api/chain/controller.go new file mode 100644 index 000000000..d7fe6fb74 --- /dev/null +++ b/node/pkg/api/chain/controller.go @@ -0,0 +1,83 @@ +package chain + +import ( + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ChainInsertModel struct { + Name string `db:"name" json:"name" validate:"required"` +} + +type ChainModel struct { + ChainId *utils.CustomInt64 `db:"chain_id" json:"id"` + Name string `db:"name" json:"name" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ChainInsertModel) + + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + result, err := utils.QueryRow[ChainModel](c, InsertChain, map[string]any{"name": payload.Name}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[ChainModel](c, GetChain, nil) + if err != nil { + return err + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ChainModel](c, GetChainByID, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func patchById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(ChainInsertModel) + + if err := c.BodyParser(payload); err != nil { + return err + } + + result, err := utils.QueryRow[ChainModel](c, UpdateChain, map[string]any{"name": payload.Name, "id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + + result, err := utils.QueryRow[ChainModel](c, RemoveChain, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} diff --git a/node/pkg/api/chain/queries.go b/node/pkg/api/chain/queries.go new file mode 100644 index 000000000..ac5678732 --- /dev/null +++ b/node/pkg/api/chain/queries.go @@ -0,0 +1,15 @@ +package chain + +const ( + GetChain = `SELECT * FROM chains;` + + GetChainByID = `SELECT * FROM chains WHERE chain_id = @id;` + + GetChainByName = `SELECT * FROM chains WHERE name = @name;` + + InsertChain = `INSERT INTO chains (name) VALUES (@name) RETURNING *;` + + UpdateChain = `UPDATE chains SET name = @name WHERE chain_id = @id RETURNING *;` + + RemoveChain = `DELETE FROM chains WHERE chain_id = @id RETURNING *;` +) diff --git a/node/pkg/api/chain/route.go b/node/pkg/api/chain/route.go new file mode 100644 index 000000000..c4f2fc5cd --- /dev/null +++ b/node/pkg/api/chain/route.go @@ -0,0 +1,15 @@ +package chain + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + chain := router.Group("/chain") + + chain.Get("", get) + chain.Get("/:id", getById) + chain.Post("", insert) + chain.Patch("/:id", patchById) + chain.Delete("/:id", deleteById) +} diff --git a/node/pkg/api/l2aggregator/controller.go b/node/pkg/api/l2aggregator/controller.go new file mode 100644 index 000000000..691b2cc92 --- /dev/null +++ b/node/pkg/api/l2aggregator/controller.go @@ -0,0 +1,34 @@ +package l2aggregator + +import ( + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/gofiber/fiber/v2" +) + +type l2agregatorPairModel struct { + Id *utils.CustomInt64 `db:"id" json:"id"` + L1AggregatorAddress string `db:"l1_aggregator_address" json:"l1AggregatorAddress"` + L2AggregatorAddress string `db:"l2_aggregator_address" json:"l2AggregatorAddress"` + Active *utils.CustomBool `db:"active" json:"active"` + ChainId *utils.CustomInt64 `db:"chain_id" json:"chainId"` +} + +func get(c *fiber.Ctx) error { + _chain := c.Params("chain") + l1Address := c.Params("l1Address") + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": _chain}) + if err != nil { + return err + } + + result, err := utils.QueryRow[l2agregatorPairModel](c, GetL2AggregatorPair, map[string]any{"l1_aggregator_address": l1Address, "chain_id": chain_result.ChainId}) + if err != nil { + return err + } + + return c.JSON(result) + +} diff --git a/node/pkg/api/l2aggregator/queries.go b/node/pkg/api/l2aggregator/queries.go new file mode 100644 index 000000000..8a714f997 --- /dev/null +++ b/node/pkg/api/l2aggregator/queries.go @@ -0,0 +1,7 @@ +package l2aggregator + +const ( + GetL2AggregatorPair = ` + SELECT * FROM l2aggregatorpair WHERE (l1_aggregator_address = @l1_aggregator_address AND chain_id = @chain_id) LIMIT 1; + ` +) diff --git a/node/pkg/api/l2aggregator/route.go b/node/pkg/api/l2aggregator/route.go new file mode 100644 index 000000000..624cc39e0 --- /dev/null +++ b/node/pkg/api/l2aggregator/route.go @@ -0,0 +1,11 @@ +package l2aggregator + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + l2aggregator := router.Group("/l2aggregator") + + l2aggregator.Get("/:chain/:l1Address", get) +} diff --git a/node/pkg/api/listener/controller.go b/node/pkg/api/listener/controller.go new file mode 100644 index 000000000..8dad261c3 --- /dev/null +++ b/node/pkg/api/listener/controller.go @@ -0,0 +1,153 @@ +package listener + +import ( + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/service" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ListenerUpdateModel struct { + Address string `db:"address" json:"address" validate:"required"` + EventName string `db:"event_name" json:"eventName" validate:"required"` +} + +type ListenerSearchModel struct { + Chain string `db:"name" json:"chain"` + Service string `db:"name" json:"service"` +} + +type ListenerModel struct { + ListenerId *utils.CustomInt64 `db:"listener_id" json:"id"` + Address string `db:"address" json:"address" validate:"required"` + EventName string `db:"event_name" json:"eventName" validate:"required"` + Service string `db:"service_name" json:"service" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +type ListenerInsertModel struct { + Address string `db:"address" json:"address" validate:"required"` + EventName string `db:"event_name" json:"eventName" validate:"required"` + Service string `db:"service_name" json:"service" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ListenerInsertModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + return err + } + + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + return err + } + + result, err := utils.QueryRow[ListenerModel](c, InsertListener, map[string]any{ + "address": payload.Address, + "event_name": payload.EventName, + "chain_id": chain_result.ChainId, + "service_id": service_result.ServiceId}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + payload := new(ListenerSearchModel) + params := GetListenerQueryParams{} + + if len(c.Body()) == 0 { + results, err := utils.QueryRows[ListenerModel](c, GenerateGetListenerQuery(params), nil) + if err != nil { + return err + } + + return c.JSON(results) + } + + if err := c.BodyParser(payload); err != nil { + return err + } + + if payload.Chain != "" { + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + return err + } + params.ChainId = chain_result.ChainId.String() + } + + if payload.Service != "" { + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + return err + } + params.ServiceId = service_result.ServiceId.String() + } + + results, err := utils.QueryRows[ListenerModel](c, GenerateGetListenerQuery(params), nil) + if err != nil { + return err + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ListenerModel](c, GetListenerById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(ListenerUpdateModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + result, err := utils.QueryRow[ListenerModel](c, UpdateListenerById, map[string]any{ + "id": id, + "address": payload.Address, + "event_name": payload.EventName}) + if err != nil { + return err + } + + return c.JSON(result) + +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ListenerModel](c, DeleteListenerById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} diff --git a/node/pkg/api/listener/queries.go b/node/pkg/api/listener/queries.go new file mode 100644 index 000000000..6bd3f01c7 --- /dev/null +++ b/node/pkg/api/listener/queries.go @@ -0,0 +1,66 @@ +package listener + +import ( + "strings" +) + +type GetListenerQueryParams struct { + ChainId string + ServiceId string +} + +const ( + InsertListener = ` + INSERT INTO listeners (address, event_name, chain_id, service_id) + VALUES (@address, @event_name, @chain_id, @service_id) + RETURNING listeners.listener_id, listeners.address, listeners.event_name, + (SELECT name from chains WHERE chain_id = listeners.chain_id) AS chain_name, + (SELECT name from services WHERE service_id = listeners.service_id) AS service_name; + ` + + GetListenerById = ` + SELECT listeners.listener_id, listeners.address, listeners.event_name, chains.name AS chain_name, services.name AS service_name + FROM listeners + JOIN chains ON listeners.chain_id = chains.chain_id + JOIN services ON listeners.service_id = services.service_id + WHERE listener_id = @id + LIMIT 1; + ` + + UpdateListenerById = ` + UPDATE listeners + SET address = @address, event_name = @event_name + WHERE listener_id = @id + RETURNING listeners.listener_id, listeners.address, listeners.event_name, + (SELECT name from chains WHERE chain_id = listeners.chain_id) AS chain_name, + (SELECT name from services WHERE service_id = listeners.service_id) AS service_name; + ` + + DeleteListenerById = ` + DELETE FROM listeners WHERE listener_id = @id + RETURNING listeners.listener_id, listeners.address, listeners.event_name, + (SELECT name from chains WHERE chain_id = listeners.chain_id) AS chain_name, + (SELECT name from services WHERE service_id = listeners.service_id) AS service_name; + ` +) + +func GenerateGetListenerQuery(params GetListenerQueryParams) string { + baseQuery := ` + SELECT listeners.listener_id, listeners.address, listeners.event_name, chains.name AS chain_name, services.name AS service_name + FROM listeners + JOIN chains ON listeners.chain_id = chains.chain_id + JOIN services ON listeners.service_id = services.service_id + ` + var conditionQueries []string + if params.ChainId != "" { + conditionQueries = append(conditionQueries, "listeners.chain_id = "+params.ChainId) + } + if params.ServiceId != "" { + conditionQueries = append(conditionQueries, "listeners.service_id = "+params.ServiceId) + } + if len(conditionQueries) == 0 { + return baseQuery + } + joinedString := strings.Join(conditionQueries, " AND ") + return baseQuery + " WHERE " + joinedString +} diff --git a/node/pkg/api/listener/route.go b/node/pkg/api/listener/route.go new file mode 100644 index 000000000..9318ab98a --- /dev/null +++ b/node/pkg/api/listener/route.go @@ -0,0 +1,15 @@ +package listener + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + listener := router.Group("/listener") + + listener.Post("", insert) + listener.Get("", get) + listener.Get("/:id", getById) + listener.Patch("/:id", updateById) + listener.Delete("/:id", deleteById) +} diff --git a/node/pkg/api/proxy/controller.go b/node/pkg/api/proxy/controller.go new file mode 100644 index 000000000..82093791d --- /dev/null +++ b/node/pkg/api/proxy/controller.go @@ -0,0 +1,101 @@ +package proxy + +import ( + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ProxyModel struct { + Id *utils.CustomInt64 `db:"id" json:"id"` + Protocol string `db:"protocol" json:"protocol" validate:"required"` + Host string `db:"host" json:"host" validate:"required"` + Port *utils.CustomInt32 `db:"port" json:"port" validate:"required"` + Location *string `db:"location" json:"location"` +} + +type ProxyInsertModel struct { + Protocol string `db:"protocol" json:"protocol" validate:"required"` + Host string `db:"host" json:"host" validate:"required"` + Port *utils.CustomInt32 `db:"port" json:"port" validate:"required"` + Location *string `db:"location" json:"location"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ProxyInsertModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + result, err := utils.QueryRow[ProxyModel](c, InsertProxy, map[string]any{ + "protocol": payload.Protocol, + "host": payload.Host, + "port": payload.Port, + "location": &payload.Location}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[ProxyModel](c, GetProxy, nil) + if err != nil { + return err + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ProxyModel](c, GetProxyById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + + payload := new(ProxyInsertModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + result, err := utils.QueryRow[ProxyModel](c, UpdateProxyById, map[string]any{ + "id": id, + "protocol": payload.Protocol, + "host": payload.Host, + "port": payload.Port, + "location": &payload.Location}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ProxyModel](c, DeleteProxyById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} diff --git a/node/pkg/api/proxy/queries.go b/node/pkg/api/proxy/queries.go new file mode 100644 index 000000000..475df6a33 --- /dev/null +++ b/node/pkg/api/proxy/queries.go @@ -0,0 +1,20 @@ +package proxy + +const ( + InsertProxy = `INSERT INTO proxies (protocol, host, port, location) VALUES (@protocol, @host, @port, @location) RETURNING *;` + + GetProxy = `SELECT * FROM proxies ORDER BY id asc;` + + GetProxyById = `SELECT * FROM proxies WHERE id = @id;` + + UpdateProxyById = ` + UPDATE proxies + SET protocol = @protocol, host = @host, port = @port, location = @location + WHERE id = @id + RETURNING *; + ` + + DeleteProxyById = ` + DELETE FROM proxies WHERE id = @id RETURNING *; + ` +) diff --git a/node/pkg/api/proxy/route.go b/node/pkg/api/proxy/route.go new file mode 100644 index 000000000..991a3971c --- /dev/null +++ b/node/pkg/api/proxy/route.go @@ -0,0 +1,15 @@ +package proxy + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + proxy := router.Group("/proxy") + + proxy.Post("", insert) + proxy.Get("", get) + proxy.Get("/:id", getById) + proxy.Patch("/:id", updateById) + proxy.Delete("/:id", deleteById) +} diff --git a/node/pkg/api/reporter/controller.go b/node/pkg/api/reporter/controller.go new file mode 100644 index 000000000..f1a110e5d --- /dev/null +++ b/node/pkg/api/reporter/controller.go @@ -0,0 +1,248 @@ +package reporter + +import ( + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/service" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ReporterUpdateModel struct { + Address string `db:"address" json:"address" validate:"required"` + PrivateKey string `db:"privateKey" json:"privateKey" validate:"required"` + OracleAddress string `db:"oracleAddress" json:"oracleAddress" validate:"required"` +} + +type ReporterModel struct { + ReporterId *utils.CustomInt64 `db:"reporter_id" json:"id"` + Address string `db:"address" json:"address" validate:"required"` + PrivateKey string `db:"privateKey" json:"privateKey" validate:"required"` + OracleAddress string `db:"oracleAddress" json:"oracleAddress" validate:"required"` + Service string `db:"service_name" json:"service" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +type ReporterInsertModel struct { + Address string `db:"address" json:"address" validate:"required"` + PrivateKey string `db:"privateKey" json:"privateKey" validate:"required"` + OracleAddress string `db:"oracleAddress" json:"oracleAddress" validate:"required"` + Service string `db:"service_name" json:"service" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +type ReporterSearchModel struct { + Chain string `db:"chain_name" json:"chain"` + Service string `db:"service_name" json:"service"` +} + +type ReporterSearchByOracleAddressModel struct { + OracleAddress string `db:"oracleAddress" json:"oracleAddress" validate:"required"` + Chain string `db:"chain_name" json:"chain"` + Service string `db:"service_name" json:"service"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ReporterInsertModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + return err + } + + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + return err + } + + encrypted, err := utils.EncryptText(payload.PrivateKey) + if err != nil { + return err + } + + result, err := utils.QueryRow[ReporterModel](c, InsertReporter, map[string]any{ + "address": payload.Address, + "privateKey": encrypted, + "oracleAddress": payload.OracleAddress, + "chain_id": chain_result.ChainId, + "service_id": service_result.ServiceId}) + if err != nil { + return err + } + + result.PrivateKey = payload.PrivateKey + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + payload := new(ReporterSearchModel) + params := GetReporterQueryParams{} + + if len(c.Body()) == 0 { + results, err := utils.QueryRows[ReporterModel](c, GenerateGetReporterQuery(params), nil) + if err != nil { + return err + } + for i := range results { + decrypted, err := utils.DecryptText(results[i].PrivateKey) + if err != nil { + return err + } + results[i].PrivateKey = decrypted + } + + return c.JSON(results) + } + + if err := c.BodyParser(payload); err != nil { + return err + } + + if payload.Chain != "" { + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + return err + } + params.ChainId = chain_result.ChainId.String() + } + + if payload.Service != "" { + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + return err + } + params.ServiceId = service_result.ServiceId.String() + } + + results, err := utils.QueryRows[ReporterModel](c, GenerateGetReporterQuery(params), nil) + if err != nil { + return err + } + + for i := range results { + decrypted, err := utils.DecryptText(results[i].PrivateKey) + if err != nil { + return err + } + results[i].PrivateKey = decrypted + } + + return c.JSON(results) +} + +func getByOracleAddress(c *fiber.Ctx) error { + oracleAddress := c.Params("oracleAddress") + payload := new(ReporterSearchModel) + params := GetReporterQueryParams{} + + if err := c.BodyParser(payload); err != nil { + return err + } + + params.OracleAddress = oracleAddress + + if payload.Chain != "" { + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + return err + } + params.ChainId = chain_result.ChainId.String() + } + + if payload.Service != "" { + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + return err + } + params.ServiceId = service_result.ServiceId.String() + } + + results, err := utils.QueryRows[ReporterModel](c, GenerateGetReporterQuery(params), nil) + if err != nil { + return err + } + + for i := range results { + decrypted, err := utils.DecryptText(results[i].PrivateKey) + if err != nil { + return err + } + results[i].PrivateKey = decrypted + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ReporterModel](c, GetReporterById, map[string]any{"id": id}) + if err != nil { + return err + } + + decrypted, err := utils.DecryptText(result.PrivateKey) + if err != nil { + return err + } + result.PrivateKey = decrypted + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(ReporterUpdateModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + encrypted, err := utils.EncryptText(payload.PrivateKey) + if err != nil { + return err + } + + result, err := utils.QueryRow[ReporterModel](c, UpdateReporterById, map[string]any{ + "id": id, + "address": payload.Address, + "privateKey": encrypted, + "oracleAddress": payload.OracleAddress}) + + if err != nil { + return err + } + + result.PrivateKey = payload.PrivateKey + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ReporterModel](c, DeleteReporterById, map[string]any{"id": id}) + if err != nil { + return err + } + + decrypted, err := utils.DecryptText(result.PrivateKey) + if err != nil { + return err + } + result.PrivateKey = decrypted + + return c.JSON(result) +} diff --git a/node/pkg/api/reporter/queries.go b/node/pkg/api/reporter/queries.go new file mode 100644 index 000000000..4148a1969 --- /dev/null +++ b/node/pkg/api/reporter/queries.go @@ -0,0 +1,65 @@ +package reporter + +import ( + "strings" +) + +type GetReporterQueryParams struct { + ChainId string + ServiceId string + OracleAddress string +} + +const ( + InsertReporter = ` + INSERT INTO reporters (address, "privateKey", "oracleAddress", chain_id, service_id) + VALUES (@address, @privateKey, @oracleAddress, @chain_id, @service_id) + RETURNING reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", + (SELECT name FROM chains WHERE chains.chain_id = reporters.chain_id) AS chain_name, + (SELECT name FROM services WHERE services.service_id = reporters.service_id) AS service_name; + ` + + GetReporterById = ` + SELECT reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", chains.name AS chain_name, services.name AS service_name + FROM reporters + JOIN chains ON reporters.chain_id = chains.chain_id + JOIN services ON reporters.service_id = services.service_id + WHERE reporter_id = @id LIMIT 1;` + + UpdateReporterById = ` + UPDATE reporters + SET address = @address, "privateKey" = @privateKey, "oracleAddress" = @oracleAddress + WHERE reporter_id = @id + RETURNING reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", + (SELECT name FROM chains WHERE chains.chain_id = reporters.chain_id) AS chain_name, + (SELECT name FROM services WHERE services.service_id = reporters.service_id) AS service_name; + ` + + DeleteReporterById = `DELETE FROM reporters WHERE reporter_id = @id RETURNING reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", + (SELECT name FROM chains WHERE chains.chain_id = reporters.chain_id) AS chain_name, + (SELECT name FROM services WHERE services.service_id = reporters.service_id) AS service_name;` +) + +func GenerateGetReporterQuery(params GetReporterQueryParams) string { + baseQuery := ` + SELECT reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", chains.name AS chain_name, services.name AS service_name + FROM reporters + JOIN chains ON reporters.chain_id = chains.chain_id + JOIN services ON reporters.service_id = services.service_id + ` + var conditionQueries []string + if params.ChainId != "" { + conditionQueries = append(conditionQueries, "reporters.chain_id = "+params.ChainId) + } + if params.ServiceId != "" { + conditionQueries = append(conditionQueries, "reporters.service_id = "+params.ServiceId) + } + if params.OracleAddress != "" { + conditionQueries = append(conditionQueries, "\"reporters\".\"oracleAddress\" = '"+params.OracleAddress+"'") + } + if len(conditionQueries) == 0 { + return baseQuery + } + joinedString := strings.Join(conditionQueries, " AND ") + return baseQuery + " WHERE " + joinedString +} diff --git a/node/pkg/api/reporter/route.go b/node/pkg/api/reporter/route.go new file mode 100644 index 000000000..961c54db3 --- /dev/null +++ b/node/pkg/api/reporter/route.go @@ -0,0 +1,16 @@ +package reporter + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + reporter := router.Group("/reporter") + + reporter.Post("", insert) + reporter.Get("", get) + reporter.Get("/oracle-address/:oracleAddress", getByOracleAddress) + reporter.Get("/:id", getById) + reporter.Patch("/:id", updateById) + reporter.Delete("/:id", deleteById) +} diff --git a/node/pkg/api/secrets/secrets.go b/node/pkg/api/secrets/secrets.go new file mode 100644 index 000000000..01b839ce3 --- /dev/null +++ b/node/pkg/api/secrets/secrets.go @@ -0,0 +1,77 @@ +package secrets + +import ( + "context" + "os" + + vault "github.com/hashicorp/vault/api" + auth "github.com/hashicorp/vault/api/auth/kubernetes" + "github.com/rs/zerolog/log" +) + +var secretData map[string]interface{} +var initialized bool = false + +func init() { + ctx := context.Background() + + vaultRole := os.Getenv("VAULT_ROLE") + jwtPath := os.Getenv("JWT_PATH") + vaultSecretPath := os.Getenv("VAULT_SECRET_PATH") + vaultKeyName := os.Getenv("VAULT_KEY_NAME") + + if vaultRole == "" || jwtPath == "" || vaultSecretPath == "" || vaultKeyName == "" { + log.Error().Msg("Missing required environment variables for Vault initialization") + return + } + + config := vault.DefaultConfig() + client, err := vault.NewClient(config) + if err != nil { + log.Error().Err(err).Msg("unable to initialize Vault client") + return + } + + k8sAuth, err := auth.NewKubernetesAuth( + vaultRole, + auth.WithServiceAccountTokenPath(jwtPath), + ) + if err != nil { + log.Error().Err(err).Msg("unable to initialize Kubernetes auth method") + return + } + + authInfo, err := client.Auth().Login(ctx, k8sAuth) + if err != nil { + log.Error().Err(err).Msg("unable to log in with Kubernetes auth") + return + } + if authInfo == nil { + log.Error().Err(err).Msg("no auth info was returned after login") + return + } + + secrets, err := client.KVv2(vaultSecretPath).Get(ctx, vaultKeyName) + if err != nil { + log.Error().Err(err).Msg("unable to read secret") + return + } + + secretData = secrets.Data + initialized = true +} + +func GetSecret(key string) string { + if !initialized { + return os.Getenv(key) + } + value, ok := secretData[key] + if !ok { + return os.Getenv(key) + } + result, ok := value.(string) + if !ok || result == "" { + return os.Getenv(key) + } + return result +} diff --git a/node/pkg/api/service/controller.go b/node/pkg/api/service/controller.go new file mode 100644 index 000000000..c969784ed --- /dev/null +++ b/node/pkg/api/service/controller.go @@ -0,0 +1,78 @@ +package service + +import ( + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ServiceModel struct { + ServiceId *utils.CustomInt64 `db:"service_id" json:"id"` + Name string `db:"name" json:"name" validate:"required"` +} + +type ServiceInsertModel struct { + Name string `db:"name" json:"name" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ServiceInsertModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + result, err := utils.QueryRow[ServiceModel](c, InsertService, map[string]any{"name": payload.Name}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[ServiceModel](c, GetService, nil) + if err != nil { + return err + } + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ServiceModel](c, GetServiceById, map[string]any{"id": id}) + if err != nil { + return err + } + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(ServiceInsertModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + result, err := utils.QueryRow[ServiceModel](c, UpdateServiceById, map[string]any{"id": id, "name": payload.Name}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ServiceModel](c, DeleteServiceById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} diff --git a/node/pkg/api/service/queries.go b/node/pkg/api/service/queries.go new file mode 100644 index 000000000..ea8e962f1 --- /dev/null +++ b/node/pkg/api/service/queries.go @@ -0,0 +1,27 @@ +package service + +const ( + InsertService = ` + INSERT INTO services (name) VALUES (@name) RETURNING *; + ` + + GetService = ` + SELECT * FROM services; + ` + + GetServiceById = ` + SELECT * FROM services WHERE service_id = @id LIMIT 1; + ` + + GetServiceByName = ` + SELECT * FROM services WHERE name = @name LIMIT 1; + ` + + UpdateServiceById = ` + UPDATE services SET name = @name WHERE service_id = @id RETURNING *; + ` + + DeleteServiceById = ` + DELETE FROM services WHERE service_id = @id RETURNING *; + ` +) diff --git a/node/pkg/api/service/route.go b/node/pkg/api/service/route.go new file mode 100644 index 000000000..7e9fef712 --- /dev/null +++ b/node/pkg/api/service/route.go @@ -0,0 +1,15 @@ +package service + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + service := router.Group("/service") + + service.Post("", insert) + service.Get("", get) + service.Get("/:id", getById) + service.Patch("/:id", updateById) + service.Delete("/:id", deleteById) +} diff --git a/node/pkg/api/tests/apierr_test.go b/node/pkg/api/tests/apierr_test.go new file mode 100644 index 000000000..71684ec76 --- /dev/null +++ b/node/pkg/api/tests/apierr_test.go @@ -0,0 +1,76 @@ +package tests + +import ( + "fmt" + + "testing" + "time" + + "bisonai.com/orakl/node/pkg/api/apierr" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestApiErr(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + now := utils.CustomDateTime{Time: time.Now()} + insertData := apierr.ErrorInsertModel{ + RequestId: "66649924661314489704239946349158829048302840686075232939396730072454733114998", + Timestamp: &now, + Code: "10020", + Name: "MissingKeyInJson", + Stack: `MissingKeyInJson + at wrapper (file:///app/dist/worker/reducer.js:19:23) + at file:///app/dist/utils.js:11:61 + at Array.reduce () + at file:///app/dist/utils.js:11:44 + at processRequest (file:///app/dist/worker/request-response.js:58:34) + at process.processTicksAndRejections (node:internal/process/task_queues:95:5) + at async Worker.wrapper [as processFn] (file:///app/dist/worker/request-response.js:27:25) + at async Worker.processJob (/app/node_modules/bullmq/dist/cjs/classes/worker.js:339:28) + at async Worker.retryIfFailed (/app/node_modules/bullmq/dist/cjs/classes/worker.js:513:24)`, + } + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + app := appConfig.App + + defer pgxClient.Close() + v1 := app.Group("/api/v1") + apierr.Routes(v1) + + // read all before insertion + readAllResultBefore, err := utils.GetRequest[[]apierr.ErrorModel](app, "/api/v1/error", nil) + assert.Nil(t, err) + totalBefore := len(readAllResultBefore) + + // insert + insertResult, err := utils.PostRequest[apierr.ErrorModel](app, "/api/v1/error", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]apierr.ErrorModel](app, "/api/v1/error", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[apierr.ErrorModel](app, "/api/v1/error/"+insertResult.ERROR_ID.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted service") + + // delete + deleteResult, err := utils.DeleteRequest[apierr.ErrorModel](app, "/api/v1/error/"+insertResult.ERROR_ID.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, deleteResult, "should be deleted") + + readAllResultAfterDeletion, err := utils.GetRequest[[]apierr.ErrorModel](app, "/api/v1/error", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/node/pkg/api/tests/chain_test.go b/node/pkg/api/tests/chain_test.go new file mode 100644 index 000000000..6ec044997 --- /dev/null +++ b/node/pkg/api/tests/chain_test.go @@ -0,0 +1,68 @@ +package tests + +import ( + "fmt" + "testing" + + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestChain(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertData = chain.ChainInsertModel{Name: "cypress"} + var updateData = chain.ChainInsertModel{Name: "cypress2"} + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + app := appConfig.App + + defer pgxClient.Close() + v1 := app.Group("/api/v1") + chain.Routes(v1) + + // read all before insertion + readAllResultBefore, err := utils.GetRequest[[]chain.ChainModel](app, "/api/v1/chain", nil) + assert.Nil(t, err) + totalBefore := len(readAllResultBefore) + + // insert + insertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]chain.ChainModel](app, "/api/v1/chain", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[chain.ChainModel](app, "/api/v1/chain/"+insertResult.ChainId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted chain") + + // patch + patchResult, err := utils.PatchRequest[chain.ChainModel](app, "/api/v1/chain/"+insertResult.ChainId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[chain.ChainModel](app, "/api/v1/chain/"+insertResult.ChainId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+insertResult.ChainId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]chain.ChainModel](app, "/api/v1/chain", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/node/pkg/api/tests/listener_test.go b/node/pkg/api/tests/listener_test.go new file mode 100644 index 000000000..f3c9bd732 --- /dev/null +++ b/node/pkg/api/tests/listener_test.go @@ -0,0 +1,97 @@ +package tests + +import ( + "fmt" + "testing" + + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/listener" + "bisonai.com/orakl/node/pkg/api/service" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestListener(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertChain = chain.ChainInsertModel{Name: "listener-test-chain"} + var InsertService = service.ServiceInsertModel{Name: "listener-test-service"} + + var insertData = listener.ListenerInsertModel{ + Address: "0xa", + EventName: "new_round(uint, uint80)", + Chain: "listener-test-chain", + Service: "listener-test-service", + } + + var updateData = listener.ListenerUpdateModel{ + Address: "0x1", + EventName: "new_round_v2(uint, uint80)", + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + app := appConfig.App + + defer pgxClient.Close() + v1 := app.Group("/api/v1") + + chain.Routes(v1) + service.Routes(v1) + listener.Routes(v1) + + // insert chain and service before test + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + serviceInsertResult, err := utils.PostRequest[service.ServiceModel](app, "/api/v1/service", InsertService) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]listener.ListenerModel](app, "/api/v1/listener", map[string]any{"chain": "listener-test-chain", "service": "listener-test-service"}) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert + insertResult, err := utils.PostRequest[listener.ListenerModel](app, "/api/v1/listener", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]listener.ListenerModel](app, "/api/v1/listener", map[string]any{"chain": "listener-test-chain", "service": "listener-test-service"}) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[listener.ListenerModel](app, "/api/v1/listener/"+insertResult.ListenerId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted reporter") + + // patch + patchResult, err := utils.PatchRequest[listener.ListenerModel](app, "/api/v1/listener/"+insertResult.ListenerId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[listener.ListenerModel](app, "/api/v1/listener/"+insertResult.ListenerId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[listener.ListenerModel](app, "/api/v1/listener/"+insertResult.ListenerId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]listener.ListenerModel](app, "/api/v1/listener", map[string]any{"chain": "listener-test-chain", "service": "listener-test-service"}) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) + + // delete chain and service (cleanup) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[service.ServiceModel](app, "/api/v1/service/"+serviceInsertResult.ServiceId.String(), nil) + assert.Nil(t, err) +} diff --git a/node/pkg/api/tests/proxy_test.go b/node/pkg/api/tests/proxy_test.go new file mode 100644 index 000000000..a77f7f9c4 --- /dev/null +++ b/node/pkg/api/tests/proxy_test.go @@ -0,0 +1,79 @@ +package tests + +import ( + "fmt" + "testing" + + "bisonai.com/orakl/node/pkg/api/proxy" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestProxy(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + var location = "kr" + var portNumber = utils.CustomInt32(5000) + var insertData = proxy.ProxyInsertModel{ + Protocol: "http", + Host: "127.0.0.1", + Port: &portNumber, + } + + var updateData = proxy.ProxyInsertModel{ + Protocol: "http", + Host: "127.0.0.1", + Port: &portNumber, + Location: &location, + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + app := appConfig.App + + defer pgxClient.Close() + v1 := app.Group("/api/v1") + proxy.Routes(v1) + + // read all before insertion + readAllResultBefore, err := utils.GetRequest[[]proxy.ProxyModel](app, "/api/v1/proxy", nil) + assert.Nil(t, err) + totalBefore := len(readAllResultBefore) + + // insert + insertResult, err := utils.PostRequest[proxy.ProxyModel](app, "/api/v1/proxy", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]proxy.ProxyModel](app, "/api/v1/proxy", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[proxy.ProxyModel](app, "/api/v1/proxy/"+insertResult.Id.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted proxy") + + // patch + patchResult, err := utils.PatchRequest[proxy.ProxyModel](app, "/api/v1/proxy/"+insertResult.Id.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[proxy.ProxyModel](app, "/api/v1/proxy/"+insertResult.Id.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[proxy.ProxyModel](app, "/api/v1/proxy/"+insertResult.Id.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]proxy.ProxyModel](app, "/api/v1/proxy", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/node/pkg/api/tests/reporter_test.go b/node/pkg/api/tests/reporter_test.go new file mode 100644 index 000000000..ec40c1d3d --- /dev/null +++ b/node/pkg/api/tests/reporter_test.go @@ -0,0 +1,99 @@ +package tests + +import ( + "fmt" + "testing" + + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/reporter" + "bisonai.com/orakl/node/pkg/api/service" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestReporter(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertChain = chain.ChainInsertModel{Name: "reporter-test-chain"} + var insertService = service.ServiceInsertModel{Name: "reporter-test-service"} + + var insertData = reporter.ReporterInsertModel{ + Address: "0xa", + PrivateKey: "0xb", + OracleAddress: "0xc", + Chain: "reporter-test-chain", + Service: "reporter-test-service", + } + + var updateData = reporter.ReporterUpdateModel{ + Address: "0x1", + PrivateKey: "0x2", + OracleAddress: "0x3", + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + app := appConfig.App + + defer pgxClient.Close() + v1 := app.Group("/api/v1") + + chain.Routes(v1) + service.Routes(v1) + reporter.Routes(v1) + + // insert chain and service before test + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + serviceInsertResult, err := utils.PostRequest[service.ServiceModel](app, "/api/v1/service", insertService) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]reporter.ReporterModel](app, "/api/v1/reporter", map[string]any{"chain": "reporter-test-chain", "service": "reporter-test-service"}) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert + insertResult, err := utils.PostRequest[reporter.ReporterModel](app, "/api/v1/reporter", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]reporter.ReporterModel](app, "/api/v1/reporter", map[string]any{"chain": "reporter-test-chain", "service": "reporter-test-service"}) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[reporter.ReporterModel](app, "/api/v1/reporter/"+insertResult.ReporterId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should be inserted") + + // patch + patchResult, err := utils.PatchRequest[reporter.ReporterModel](app, "/api/v1/reporter/"+insertResult.ReporterId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[reporter.ReporterModel](app, "/api/v1/reporter/"+insertResult.ReporterId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[reporter.ReporterModel](app, "/api/v1/reporter/"+insertResult.ReporterId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]reporter.ReporterModel](app, "/api/v1/reporter", map[string]any{"chain": "reporter-test-chain", "service": "reporter-test-service"}) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) + + // delete chain and service (cleanup) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[service.ServiceModel](app, "/api/v1/service/"+serviceInsertResult.ServiceId.String(), nil) + assert.Nil(t, err) +} diff --git a/node/pkg/api/tests/service_test.go b/node/pkg/api/tests/service_test.go new file mode 100644 index 000000000..e7f4231fc --- /dev/null +++ b/node/pkg/api/tests/service_test.go @@ -0,0 +1,68 @@ +package tests + +import ( + "fmt" + "testing" + + "bisonai.com/orakl/node/pkg/api/service" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestService(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertData = service.ServiceInsertModel{Name: "SERVICE_TEST"} + var updateData = service.ServiceInsertModel{Name: "SERVICE_TEST_2"} + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + app := appConfig.App + + defer pgxClient.Close() + v1 := app.Group("/api/v1") + service.Routes(v1) + + // read all before insertion + readAllResultBefore, err := utils.GetRequest[[]service.ServiceModel](app, "/api/v1/service", nil) + assert.Nil(t, err) + totalBefore := len(readAllResultBefore) + + // insert + insertResult, err := utils.PostRequest[service.ServiceModel](app, "/api/v1/service", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]service.ServiceModel](app, "/api/v1/service", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[service.ServiceModel](app, "/api/v1/service/"+insertResult.ServiceId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted service") + + // patch + patchResult, err := utils.PatchRequest[service.ServiceModel](app, "/api/v1/service/"+insertResult.ServiceId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[service.ServiceModel](app, "/api/v1/service/"+insertResult.ServiceId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[service.ServiceModel](app, "/api/v1/service/"+insertResult.ServiceId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]service.ServiceModel](app, "/api/v1/service", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/node/pkg/api/tests/vrf_test.go b/node/pkg/api/tests/vrf_test.go new file mode 100644 index 000000000..3c4f3c24c --- /dev/null +++ b/node/pkg/api/tests/vrf_test.go @@ -0,0 +1,94 @@ +package tests + +import ( + "fmt" + "testing" + + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/utils" + "bisonai.com/orakl/node/pkg/api/vrf" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestVrf(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertChain = chain.ChainInsertModel{Name: "vrf-test-chain"} + + var insertData = vrf.VrfInsertModel{ + Sk: "ebeb5229570725793797e30a426d7ef8aca79d38ff330d7d1f28485d2366de32", + Pk: "045b8175cfb6e7d479682a50b19241671906f706bd71e30d7e80fd5ff522c41bf0588735865a5faa121c3801b0b0581440bdde24b03dc4c4541df9555d15223e82", + PkX: "41389205596727393921445837404963099032198113370266717620546075917307049417712", + PkY: "40042424443779217635966540867474786311411229770852010943594459290130507251330", + KeyHash: "0x6f32373625e3d1f8f303196cbb78020ac2503acd1129e44b36b425781a9664ac", + Chain: "vrf-test-chain", + } + + var updateData = vrf.VrfUpdateModel{ + Sk: "ebeb5229570725793797e30a426d7ef8aca79d38ff330d7d1f28485d2366de32", + Pk: "045b8175cfb6e7d479682a50b19241671906f706bd71e30d7e80fd5ff522c41bf0588735865a5faa121c3801b0b0581440bdde24b03dc4c4541df9555d15223e82", + PkX: "41389205596727393921445837404963099032198113370266717620546075917307049417712", + PkY: "40042424443779217635966540867474786311411229770852010943594459290130507251330", + KeyHash: "0x", + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + app := appConfig.App + + defer pgxClient.Close() + v1 := app.Group("/api/v1") + vrf.Routes(v1) + chain.Routes(v1) + + // insert chain before test + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]vrf.VrfModel](app, "/api/v1/vrf", map[string]any{"chain": "vrf-test-chain"}) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert + insertResult, err := utils.PostRequest[vrf.VrfModel](app, "/api/v1/vrf", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]vrf.VrfModel](app, "/api/v1/vrf", map[string]any{"chain": "vrf-test-chain"}) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[vrf.VrfModel](app, "/api/v1/vrf/"+insertResult.VrfKeyId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted vrf") + + // patch + patchResult, err := utils.PatchRequest[vrf.VrfModel](app, "/api/v1/vrf/"+insertResult.VrfKeyId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[vrf.VrfModel](app, "/api/v1/vrf/"+insertResult.VrfKeyId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[vrf.VrfModel](app, "/api/v1/vrf/"+insertResult.VrfKeyId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]vrf.VrfModel](app, "/api/v1/vrf", map[string]any{"chain": "vrf-test-chain"}) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) + + // delete chain (cleanup) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) +} diff --git a/node/pkg/api/utils/custom_types.go b/node/pkg/api/utils/custom_types.go new file mode 100644 index 000000000..eb987b8ff --- /dev/null +++ b/node/pkg/api/utils/custom_types.go @@ -0,0 +1,234 @@ +package utils + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + "time" +) + +// float8 in postgresql +// json return type: float +type CustomFloat float64 + +// boolean in postgresql +// json return type: boolean +type CustomBool bool + +// int4 in postgresql +// json return type: number +type CustomInt32 int32 + +// int8 and bigint in postgresql +// json return type: string +type CustomInt64 int64 + +type CustomDateTime struct { + time.Time +} + +const RFC3339Milli = "2006-01-02T15:04:05.000Z07:00" + +func (cf *CustomFloat) MarshalJSON() ([]byte, error) { + return json.Marshal(*cf) +} + +func (cf *CustomFloat) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case float64: + *cf = CustomFloat(v) + case float32: + *cf = CustomFloat(float64(v)) + case int: + *cf = CustomFloat(float64(v)) + case string: + converted, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + *cf = CustomFloat(converted) + default: + return fmt.Errorf("unexpected type for CustomFloat: %T", value) + + } + return nil +} + +func (cb *CustomBool) MarshalJSON() ([]byte, error) { + return json.Marshal(*cb) +} + +func (cb *CustomBool) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case bool: + *cb = CustomBool(v) + case string: + converted, err := strconv.ParseBool(v) + if err != nil { + return err + } + *cb = CustomBool(converted) + default: + return fmt.Errorf("unexpected type for CustomBoolean: %T", value) + } + return nil +} + +func (ci_32 *CustomInt32) MarshalJSON() ([]byte, error) { + return json.Marshal(*ci_32) +} + +func (ci_32 *CustomInt32) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case int32: + *ci_32 = CustomInt32(v) + case int64: + *ci_32 = CustomInt32(int32(v)) + case int: + *ci_32 = CustomInt32(int32(v)) + case float64: + *ci_32 = CustomInt32(int32(v)) + case float32: + *ci_32 = CustomInt32(int32(v)) + case string: + if v == "" { + *ci_32 = CustomInt32(0) + } else { + converted, err := strconv.Atoi(v) + if err != nil { + return err + } + *ci_32 = CustomInt32(int32(converted)) + } + + default: + return fmt.Errorf("unexpected type for customInt32: %T", value) + } + return nil +} + +func (ci_64 CustomInt64) String() string { + return strconv.FormatInt(int64(ci_64), 10) +} + +func (ci_64 *CustomInt64) MarshalJSON() ([]byte, error) { + + return json.Marshal(ci_64.String()) +} + +func (ci_64 *CustomInt64) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case int64: + *ci_64 = CustomInt64(v) + case int32: + *ci_64 = CustomInt64(int64(v)) + case int: + *ci_64 = CustomInt64(int64(v)) + case float64: + *ci_64 = CustomInt64(int64(v)) + case float32: + *ci_64 = CustomInt64(int64(v)) + case string: + if v == "" { + *ci_64 = CustomInt64(0) + } else { + converted, err := strconv.Atoi(v) + if err != nil { + return err + } + *ci_64 = CustomInt64(converted) + } + + default: + return fmt.Errorf("unexpected type for CustomInt64: %T", value) + } + return nil +} + +func (cdt CustomDateTime) String() string { + utcTime := cdt.Time.UTC() + return utcTime.Format(RFC3339Milli) +} + +func (cdt *CustomDateTime) MarshalJSON() ([]byte, error) { + return json.Marshal(cdt.String()) +} + +func (cdt *CustomDateTime) Scan(src interface{}) error { + switch v := src.(type) { + case time.Time: + cdt.Time = v + case string: + v = strings.Replace(v, "GMT", "UTC", -1) + + if err := tryParsingRFC3339Milli(v, cdt); err != nil { + if err := tryParsingRFC3339(v, cdt); err != nil { + return fmt.Errorf("unexpected dateTime format: %s", v) + } + } + default: + return fmt.Errorf("unexpected type for CustomDateTime: %T", src) + } + return nil +} + +func (cdt *CustomDateTime) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case time.Time: + cdt.Time = v + case string: + v = strings.Replace(v, "GMT", "UTC", -1) + + if err := tryParsingRFC3339Milli(v, cdt); err != nil { + if err := tryParsingRFC3339(v, cdt); err != nil { + return fmt.Errorf("unexpected dateTime format: %s", v) + } + } + default: + return fmt.Errorf("unexpected type for CustomDateTime: %T", value) + } + return nil +} + +// Recommended dateTime format which matches output format +func tryParsingRFC3339Milli(v string, cdt *CustomDateTime) error { + converted, err := time.Parse(RFC3339Milli, v) + if err == nil { + cdt.Time = converted + } + return err +} + +func tryParsingRFC3339(v string, cdt *CustomDateTime) error { + converted, err := time.Parse(time.RFC3339, v) + if err == nil { + cdt.Time = converted + } + return err +} diff --git a/node/pkg/api/utils/test_helper.go b/node/pkg/api/utils/test_helper.go new file mode 100644 index 000000000..d1fd4a251 --- /dev/null +++ b/node/pkg/api/utils/test_helper.go @@ -0,0 +1,142 @@ +package utils + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + + "github.com/gofiber/fiber/v2" +) + +func req[T any](app *fiber.App, method string, endpoint string, requestBody interface{}) (T, error) { + var result T + var body io.Reader + + if requestBody != nil { + marshalledData, err := json.Marshal(requestBody) + if err != nil { + fmt.Println("failed to marshal request body") + return result, err + } + body = bytes.NewReader(marshalledData) + } + + req, err := http.NewRequest( + method, + endpoint, + body, + ) + + req.Header.Set("Content-Type", "application/json") + if err != nil { + fmt.Println("failed to create request") + return result, err + } + res, err := app.Test(req, -1) + if err != nil { + fmt.Println("failed to call test") + fmt.Println(err) + return result, err + } + + resultBody, err := io.ReadAll(res.Body) + if err != nil { + fmt.Println("failed to read result body:" + string(resultBody)) + return result, err + } + + err = json.Unmarshal(resultBody, &result) + if err != nil { + fmt.Println("failed Unmarshal result body:" + string(resultBody)) + return result, err + } + + return result, nil +} + +func GetRequest[T any](app *fiber.App, endpoint string, requestBody interface{}) (T, error) { + return req[T](app, "GET", endpoint, requestBody) +} + +func PostRequest[T any](app *fiber.App, endpoint string, requestBody interface{}) (T, error) { + return req[T](app, "POST", endpoint, requestBody) +} + +func PatchRequest[T any](app *fiber.App, endpoint string, requestBody interface{}) (T, error) { + return req[T](app, "PATCH", endpoint, requestBody) +} + +func DeleteRequest[T any](app *fiber.App, endpoint string, requestBody interface{}) (T, error) { + return req[T](app, "DELETE", endpoint, requestBody) +} + +func UrlRequest[T any](urlEndpoint string, method string, requestBody interface{}) (T, error) { + var result T + var body io.Reader + + if requestBody != nil { + marshalledData, err := json.Marshal(requestBody) + if err != nil { + fmt.Println("failed to marshal request body") + return result, err + } + body = bytes.NewReader(marshalledData) + } + + url, err := url.Parse(urlEndpoint) + if err != nil { + fmt.Println("Error parsing URL:", err) + return result, err + } + + req, err := http.NewRequest( + method, + url.String(), + body, + ) + + req.Header.Set("Content-Type", "application/json") + if err != nil { + fmt.Println("failed to create request") + return result, err + } + + response, err := http.DefaultClient.Do(req) + if err != nil { + fmt.Println(url.String()) + fmt.Println("Error making POST request:", err) + return result, err + } + resultBody, err := io.ReadAll(response.Body) + if err != nil { + fmt.Println("Error reading response body:", err) + return result, err + } + + err = json.Unmarshal(resultBody, &result) + if err != nil { + fmt.Println("failed Unmarshal result body:" + string(resultBody)) + return result, err + } + + return result, nil +} + +func DeepCopyMap(src map[string]interface{}) (map[string]interface{}, error) { + srcJSON, err := json.Marshal(src) + if err != nil { + return nil, err + } + + dst := make(map[string]interface{}) + + err = json.Unmarshal(srcJSON, &dst) + if err != nil { + return nil, err + } + + return dst, nil +} diff --git a/node/pkg/api/utils/utils.go b/node/pkg/api/utils/utils.go new file mode 100644 index 000000000..34808d141 --- /dev/null +++ b/node/pkg/api/utils/utils.go @@ -0,0 +1,297 @@ +package utils + +import ( + "context" + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "encoding/hex" + "errors" + "fmt" + "log" + "os" + "runtime/debug" + "strconv" + "strings" + + "bisonai.com/orakl/node/pkg/api/secrets" + "golang.org/x/crypto/scrypt" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/recover" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" +) + +type AppConfig struct { + Postgres *pgxpool.Pool + App *fiber.App +} + +func IsTesting(c *fiber.Ctx) bool { + testing, ok := c.Locals("testing").(bool) + if !ok { + // disable test mode if loading testing fails + return false + } else { + return testing + } +} + +func GetPgx(c *fiber.Ctx) (*pgxpool.Pool, error) { + con, ok := c.Locals("pgxConn").(*pgxpool.Pool) + if !ok { + return con, errors.New("failed to get pgxConn") + } else { + return con, nil + } +} + +func RawQueryWithoutReturn(c *fiber.Ctx, query string, args map[string]any) error { + pgxPool, err := GetPgx(c) + if err != nil { + return err + } + + rows, err := pgxPool.Query(c.Context(), query, pgx.NamedArgs(args)) + if err != nil { + return err + } + defer rows.Close() + + return nil +} + +func QueryRow[T any](c *fiber.Ctx, query string, args map[string]any) (T, error) { + var result T + pgxPool, err := GetPgx(c) + if err != nil { + return result, err + } + + rows, err := pgxPool.Query(c.Context(), query, pgx.NamedArgs(args)) + if err != nil { + return result, err + } + + result, err = pgx.CollectOneRow(rows, pgx.RowToStructByName[T]) + if errors.Is(err, pgx.ErrNoRows) { + return result, nil + } + return result, err +} + +func QueryRows[T any](c *fiber.Ctx, query string, args map[string]any) ([]T, error) { + results := []T{} + pgxPool, err := GetPgx(c) + if err != nil { + return results, err + } + + rows, err := pgxPool.Query(c.Context(), query, pgx.NamedArgs(args)) + if err != nil { + return results, err + } + + results, err = pgx.CollectRows(rows, pgx.RowToStructByName[T]) + if errors.Is(err, pgx.ErrNoRows) || (results == nil && err == nil) { + return []T{}, nil + } + return results, err +} + +func Setup(options ...string) (AppConfig, error) { + var version string + var appConfig AppConfig + + if len(options) > 0 { + version = options[0] + } else { + version = "test" + } + + config, err := LoadEnvVars() + if err != nil { + return appConfig, err + } + // pgsql connect + pgxPool, pgxError := pgxpool.New(context.Background(), config["DATABASE_URL"].(string)) + if pgxError != nil { + return appConfig, pgxError + } + + testing, err := strconv.ParseBool(config["TEST_MODE"].(string)) + if err != nil { + // defaults to testing false + testing = false + } + + app := fiber.New(fiber.Config{ + AppName: "Orakl API " + version, + EnablePrintRoutes: true, + ErrorHandler: CustomErrorHandler, + }) + app.Use(recover.New( + recover.Config{ + EnableStackTrace: true, + StackTraceHandler: CustomStackTraceHandler, + }, + )) + app.Use(cors.New()) + + app.Use(func(c *fiber.Ctx) error { + c.Locals("pgxConn", pgxPool) + c.Locals("testing", testing) + return c.Next() + }) + + appConfig = AppConfig{ + Postgres: pgxPool, + App: app, + } + return appConfig, nil +} + +func EncryptText(textToEncrypt string) (string, error) { + config, err := LoadEnvVars() + if err != nil { + return "", err + } + password := config["ENCRYPT_PASSWORD"].(string) + // Generate a random 16-byte IV + iv := make([]byte, 16) + if _, err = rand.Read(iv); err != nil { + return "", err + } + + // Derive a 32-byte key using scrypt + key, err := scrypt.Key([]byte(password), []byte("salt"), 16384, 8, 1, 32) + if err != nil { + return "", err + } + + // Create a cipher using AES-256-CTR with the key and IV + block, err := aes.NewCipher(key) + if err != nil { + return "", err + } + stream := cipher.NewCTR(block, iv) + + // Encrypt the text + ciphertext := make([]byte, len(textToEncrypt)) + stream.XORKeyStream(ciphertext, []byte(textToEncrypt)) + + // Combine the IV and ciphertext into a single string + encryptedText := hex.EncodeToString(iv) + hex.EncodeToString(ciphertext) + + return encryptedText, nil +} + +func DecryptText(encryptedText string) (string, error) { + config, err := LoadEnvVars() + if err != nil { + return "", err + } + password := config["ENCRYPT_PASSWORD"].(string) + + // Extract the IV and ciphertext from the string + iv, err := hex.DecodeString(encryptedText[:32]) + if err != nil { + return "", err + } + ciphertext, err := hex.DecodeString(encryptedText[32:]) + if err != nil { + return "", err + } + + // Derive the key using scrypt + key, err := scrypt.Key([]byte(password), []byte("salt"), 16384, 8, 1, 32) + if err != nil { + return "", err + } + + // Create a decipher using AES-256-CTR with the key and IV + block, err := aes.NewCipher(key) + if err != nil { + return "", err + } + stream := cipher.NewCTR(block, iv) + + // Decrypt the ciphertext + decryptedText := make([]byte, len(ciphertext)) + stream.XORKeyStream(decryptedText, ciphertext) + + return string(decryptedText), nil +} + +func LoadEnvVars() (map[string]interface{}, error) { + databaseURL := secrets.GetSecret("DATABASE_URL") + encryptPassword := secrets.GetSecret("ENCRYPT_PASSWORD") + + redisHost := os.Getenv("REDIS_HOST") + redisPort := os.Getenv("REDIS_PORT") + appPort := os.Getenv("APP_PORT") + testMode := os.Getenv("TEST_MODE") + + if databaseURL == "" { + return nil, errors.New("DATABASE_URL is not set") + } + if redisHost == "" { + redisHost = "localhost" + } + if redisPort == "" { + redisPort = "6379" + } + if appPort == "" { + appPort = "3000" + } + if encryptPassword == "" { + return nil, errors.New("ENCRYPT_PASSWORD is not set") + } + + return map[string]interface{}{ + "DATABASE_URL": databaseURL, + "REDIS_HOST": redisHost, + "REDIS_PORT": redisPort, + "APP_PORT": appPort, + "TEST_MODE": testMode, + "ENCRYPT_PASSWORD": encryptPassword, + }, nil +} + +func CustomErrorHandler(c *fiber.Ctx, err error) error { + // Status code defaults to 500 + code := fiber.StatusInternalServerError + + // Retrieve the custom status code if it's a *fiber.Error + var e *fiber.Error + if errors.As(err, &e) { + code = e.Code + } + + // Set Content-Type: text/plain; charset=utf-8 + c.Set(fiber.HeaderContentType, fiber.MIMETextPlainCharsetUTF8) + + // Return status code with error message + // | ${status} | ${ip} | ${method} | ${path} | ${error}", + log.Printf("| %d | %s | %s | %s | %s\n", code, c.IP(), c.Method(), c.Path(), err.Error()) + return c.Status(code).SendString(err.Error()) +} + +func CustomStackTraceHandler(_ *fiber.Ctx, e interface{}) { + stackTrace := strings.Split(string(debug.Stack()), "\n") + var failPoint string + + for _, line := range stackTrace { + if strings.Contains(line, "controller.go") { + path := strings.Split(strings.TrimSpace(line), " ")[0] + splitted := strings.Split(path, "/") + failPoint = splitted[len(splitted)-2] + "/" + splitted[len(splitted)-1] + + break + } + } + log.Printf("| (%s) panic: %v \n", failPoint, e) + _, _ = os.Stderr.WriteString(fmt.Sprintf("%s\n", debug.Stack())) //nolint:errcheck // This will never fail +} diff --git a/node/pkg/api/vrf/controller.go b/node/pkg/api/vrf/controller.go new file mode 100644 index 000000000..57e612d95 --- /dev/null +++ b/node/pkg/api/vrf/controller.go @@ -0,0 +1,142 @@ +package vrf + +import ( + "bisonai.com/orakl/node/pkg/api/chain" + "bisonai.com/orakl/node/pkg/api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type VrfModel struct { + VrfKeyId *utils.CustomInt64 `db:"vrf_key_id" json:"id"` + Sk string `db:"sk" json:"sk" validate:"required"` + Pk string `db:"pk" json:"pk" validate:"required"` + PkX string `db:"pk_x" json:"pkX" validate:"required"` + PkY string `db:"pk_y" json:"pkY" validate:"required"` + KeyHash string `db:"key_hash" json:"keyHash" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +type VrfUpdateModel struct { + Sk string `db:"sk" json:"sk" validate:"required"` + Pk string `db:"pk" json:"pk" validate:"required"` + PkX string `db:"pk_x" json:"pkX" validate:"required"` + PkY string `db:"pk_y" json:"pkY" validate:"required"` + KeyHash string `db:"key_hash" json:"keyHash" validate:"required"` +} + +type VrfInsertModel struct { + Sk string `db:"sk" json:"sk" validate:"required"` + Pk string `db:"pk" json:"pk" validate:"required"` + PkX string `db:"pk_x" json:"pkX" validate:"required"` + PkY string `db:"pk_y" json:"pkY" validate:"required"` + KeyHash string `db:"key_hash" json:"keyHash" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(VrfInsertModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + return err + } + + result, err := utils.QueryRow[VrfModel](c, InsertVrf, map[string]any{ + "sk": payload.Sk, + "pk": payload.Pk, + "pk_x": payload.PkX, + "pk_y": payload.PkY, + "key_hash": payload.KeyHash, + "chain_id": chain_result.ChainId}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + payload := new(struct { + CHAIN string `db:"name" json:"chain"` + }) + + if len(c.Body()) == 0 { + results, err := utils.QueryRows[VrfModel](c, GetVrfWithoutChainId, nil) + if err != nil { + return err + } + return c.JSON(results) + } + + if err := c.BodyParser(payload); err != nil { + return err + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.CHAIN}) + if err != nil { + return err + } + + results, err := utils.QueryRows[VrfModel](c, GetVrf, map[string]any{"chain_id": chain_result.ChainId}) + if err != nil { + return err + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[VrfModel](c, GetVrfById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(VrfUpdateModel) + if err := c.BodyParser(payload); err != nil { + return err + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + return err + } + + result, err := utils.QueryRow[VrfModel](c, UpdateVrfById, map[string]any{ + "id": id, + "sk": payload.Sk, + "pk": payload.Pk, + "pk_x": payload.PkX, + "pk_y": payload.PkY, + "key_hash": payload.KeyHash}) + if err != nil { + return err + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[VrfModel](c, DeleteVrfById, map[string]any{"id": id}) + if err != nil { + return err + } + + return c.JSON(result) +} diff --git a/node/pkg/api/vrf/queries.go b/node/pkg/api/vrf/queries.go new file mode 100644 index 000000000..bc31507d4 --- /dev/null +++ b/node/pkg/api/vrf/queries.go @@ -0,0 +1,43 @@ +package vrf + +const ( + InsertVrf = ` + INSERT INTO vrf_keys (sk, pk, pk_x, pk_y, key_hash, chain_id) + VALUES (@sk, @pk, @pk_x, @pk_y, @key_hash, @chain_id) + RETURNING vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, + (SELECT name FROM chains WHERE chains.chain_id = vrf_keys.chain_id) AS chain_name; + ` + + GetVrf = ` + SELECT vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, chains.name AS chain_name + FROM vrf_keys + JOIN chains ON vrf_keys.chain_id = chains.chain_id + WHERE vrf_keys.chain_id = @chain_id; + ` + + GetVrfWithoutChainId = ` + SELECT vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, chains.name AS chain_name + FROM vrf_keys + JOIN chains ON vrf_keys.chain_id = chains.chain_id; + ` + + GetVrfById = ` + SELECT vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, chains.name AS chain_name + FROM vrf_keys + JOIN chains ON vrf_keys.chain_id = chains.chain_id + WHERE vrf_key_id = @id LIMIT 1; + ` + + UpdateVrfById = ` + UPDATE vrf_keys + SET sk = @sk, pk = @pk, pk_x = @pk_x, pk_y = @pk_y, key_hash = @key_hash + WHERE vrf_key_id = @id + RETURNING vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, + (SELECT name FROM chains WHERE chains.chain_id = vrf_keys.chain_id) AS chain_name; + ` + + DeleteVrfById = ` + DELETE FROM vrf_keys WHERE vrf_key_id = @id RETURNING vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, + (SELECT name FROM chains WHERE chains.chain_id = vrf_keys.chain_id) AS chain_name; + ` +) diff --git a/node/pkg/api/vrf/route.go b/node/pkg/api/vrf/route.go new file mode 100644 index 000000000..e00a4b783 --- /dev/null +++ b/node/pkg/api/vrf/route.go @@ -0,0 +1,15 @@ +package vrf + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + vrf := router.Group("/vrf") + + vrf.Post("", insert) + vrf.Get("", get) + vrf.Get("/:id", getById) + vrf.Patch("/:id", updateById) + vrf.Delete("/:id", deleteById) +}