diff --git a/.github/workflows/goapi.image+upload.yaml b/.github/workflows/goapi.image+upload.yaml new file mode 100644 index 000000000..ac997113b --- /dev/null +++ b/.github/workflows/goapi.image+upload.yaml @@ -0,0 +1,97 @@ +name: GO API Deploy to Amazon ECR + +on: + push: + braches: + - master + paths: + - "go-api/**" + workflow_dispatch: + +env: + ecr_url: public.ecr.aws/bisonai/orakl-goapi + +jobs: + prepare: + name: Prepare Build + runs-on: ubuntu-latest + + outputs: + tag_date: ${{ steps.tag.outputs.date }} + tag_git_hash: ${{ steps.tag.outputs.git_hash }} + version: ${{ steps.package.outputs.version }} + + steps: + - uses: actions/checkout@v3 + + - name: Get time TAG + id: tag + run: | + echo "date=$(date +'%Y%m%d.%H%M')" >> $GITHUB_OUTPUT + echo "git_hash=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT + + - name: Get package version + id: package + run: | + version=$(cat "./go-api/.version") + echo "version=${version}" >> $GITHUB_OUTPUT + + build: + name: Build + runs-on: ubuntu-latest + needs: prepare + + permissions: + id-token: write + contents: read + + outputs: + tag_date: ${{ steps.tag.outputs.date }} + tag_git_hash: ${{ steps.tag.outputs.git_hash }} + + steps: + - uses: actions/checkout@v3 + + - name: Setup Go + uses: actions/setup-go@v4 + with: + go-version: "1.21.5" + check-latest: true + cache-dependency-path: | + ./go-api/go.sum + + - name: Run lint + uses: golangci/golangci-lint-action@v3 + with: + version: v1.54 + working-directory: go-api + skip-pkg-cache: true + skip-build-cache: true + args: --timeout=10m + + - name: Run Vet + run: | + cd ./go-api + go install golang.org/x/tools/go/analysis/passes/shadow/cmd/shadow@latest + go vet + go vet -vettool=$(which shadow) + + - name: Docker build orakl-goapi + run: SERVICE_NAME=orakl-goapi docker-compose -f docker-compose.build.yaml build + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-region: us-east-1 + role-to-assume: ${{ secrets.ROLE_ARN }} + + - name: Login to Amazon ECR + id: login-ecr-public + uses: aws-actions/amazon-ecr-login@v1 + with: + registry-type: public + + - name: Publish Image to ECR(api) + run: | + docker tag orakl-goapi ${{ env.ecr_url }}:v${{ needs.prepare.outputs.version }}.${{ needs.prepare.outputs.tag_date }}.${{ needs.prepare.outputs.tag_git_hash }} + docker push ${{ env.ecr_url }}:v${{ needs.prepare.outputs.version }}.${{ needs.prepare.outputs.tag_date }}.${{ needs.prepare.outputs.tag_git_hash }} diff --git a/.github/workflows/goapi.test.yaml b/.github/workflows/goapi.test.yaml new file mode 100644 index 000000000..a891c4ee2 --- /dev/null +++ b/.github/workflows/goapi.test.yaml @@ -0,0 +1,74 @@ +name: "goapi: test" + +on: + push: + branches-ignore: + - "master" + paths: + - "go-api/**" + workflow_dispatch: + +jobs: + core-build: + runs-on: ubuntu-latest + timeout-minutes: 3 + + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + POSTGRES_DB: orakl-test + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + redis: + image: redis + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 + + steps: + - uses: actions/checkout@v3 + - name: Setup Go + uses: actions/setup-go@v4 + with: + go-version: "1.21.5" + check-latest: true + cache-dependency-path: | + ./go-api/go.sum + - name: Install golang-migrate + run: | + curl -L https://github.com/golang-migrate/migrate/releases/download/v4.17.0/migrate.linux-amd64.tar.gz | tar xvz + sudo mv ./migrate /usr/bin + - name: Migrate up + run: | + cd ./go-api + migrate -database "postgresql://postgres:postgres@localhost:5432/orakl-test?search_path=public&sslmode=disable" -verbose -path ./migrations up + - name: Install dependencies + run: | + cd ./go-api + go mod tidy + - name: Build + run: | + cd ./go-api + go build + - name: Run test + run: | + cd ./go-api + go test ./tests -v + env: + DATABASE_URL: "postgresql://postgres:postgres@localhost:5432/orakl-test?search_path=public" + ENCRYPT_PASSWORD: "abc123" + REDIS_HOST: "localhost" + REDIS_PORT: "6379" + TEST_MODE: true diff --git a/.gitignore b/.gitignore index dcab9bf5d..bd2b0aea0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ node_modules/ contracts/scripts/**/tmp yarn-error.log -dockerfiles/local-data-feed/tmp/** \ No newline at end of file +.DS_Store +dockerfiles/local-data-feed/tmp/** diff --git a/docker-compose.local-data-feed.yaml b/docker-compose.local-data-feed.yaml index 56bd7eaec..3db396c0c 100644 --- a/docker-compose.local-data-feed.yaml +++ b/docker-compose.local-data-feed.yaml @@ -61,7 +61,7 @@ services: api: build: context: . - dockerfile: dockerfiles/orakl-api.Dockerfile + dockerfile: dockerfiles/orakl-goapi.Dockerfile image: orakl-api expose: - "3000" diff --git a/dockerfiles/local-data-feed/envs/.api.env b/dockerfiles/local-data-feed/envs/.api.env index 86f9ec673..ecb1cda0b 100644 --- a/dockerfiles/local-data-feed/envs/.api.env +++ b/dockerfiles/local-data-feed/envs/.api.env @@ -1,4 +1,4 @@ -DATABASE_URL=postgresql://testuser@postgres:5432/test?schema=public +DATABASE_URL=postgresql://testuser@postgres:5432/test?search_path=public APP_PORT=3000 ENCRYPT_PASSWORD=anything diff --git a/dockerfiles/orakl-goapi.Dockerfile b/dockerfiles/orakl-goapi.Dockerfile new file mode 100644 index 000000000..f3cc57754 --- /dev/null +++ b/dockerfiles/orakl-goapi.Dockerfile @@ -0,0 +1,35 @@ + +FROM golang:1.21.5-bullseye as builder + +RUN apt-get update && apt-get install -y curl + +WORKDIR /app + +COPY go-api go-api + +WORKDIR /app/go-api + +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o apibin -ldflags="-w -s" . + +# debian:bullseye-slim +FROM debian@sha256:4b48997afc712259da850373fdbc60315316ee72213a4e77fc5a66032d790b2a + +RUN apt-get update && apt-get install -y curl + +RUN curl -L https://github.com/golang-migrate/migrate/releases/download/v4.17.0/migrate.linux-amd64.tar.gz | tar xvz && \ + mv ./migrate /usr/bin + +WORKDIR /app + +RUN mkdir /app/migrations + +COPY --from=builder /app/go-api/migrations /app/migrations + +COPY --from=builder /app/go-api/apibin /usr/bin + +CMD sh -c '_DATABASE_URL=$DATABASE_URL; if echo $_DATABASE_URL | grep -q "\?"; then \ + _DATABASE_URL="${_DATABASE_URL}&sslmode=disable"; \ + else \ + _DATABASE_URL="${_DATABASE_URL}?sslmode=disable"; \ + fi && \ + migrate -database "$_DATABASE_URL" -verbose -path ./migrations up && apibin' \ No newline at end of file diff --git a/go-api/.env.example b/go-api/.env.example new file mode 100644 index 000000000..a7d38a343 --- /dev/null +++ b/go-api/.env.example @@ -0,0 +1,6 @@ +APP_PORT= +DATABASE_URL= +REDIS_HOST= +REDIS_PORT= +TEST_MODE= +ENCRYPT_PASSWORD= \ No newline at end of file diff --git a/go-api/.env.local b/go-api/.env.local new file mode 100644 index 000000000..5869e0648 --- /dev/null +++ b/go-api/.env.local @@ -0,0 +1,6 @@ +APP_PORT=3111 +DATABASE_URL=postgresql://${USER}@localhost:5432/orakl?search_path=public +REDIS_HOST=localhost +REDIS_PORT=6379 +TEST_MODE=true +ENCRYPT_PASSWORD=anything diff --git a/go-api/.gitignore b/go-api/.gitignore new file mode 100644 index 000000000..af63df964 --- /dev/null +++ b/go-api/.gitignore @@ -0,0 +1,22 @@ +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work +.env \ No newline at end of file diff --git a/go-api/.version b/go-api/.version new file mode 100644 index 000000000..8a9ecc2ea --- /dev/null +++ b/go-api/.version @@ -0,0 +1 @@ +0.0.1 \ No newline at end of file diff --git a/go-api/README.md b/go-api/README.md new file mode 100644 index 000000000..dea57b896 --- /dev/null +++ b/go-api/README.md @@ -0,0 +1,181 @@ +# orakl-api migration + +## To Dos + +- [ ] swagger + +## Structure + +- `main.go` : entrypoint to run api server +- `/utils/utils.go`: package containing utility functions +- `/{service}/route.go`: contains routes each calling its function in controller +- `/{service}/controller.go`: contains model and function referenced from endpoint +- `/{service}/queries.go`: contains query or query generator to call db +- `/tests/{service}_test.go`: contains test for each service + +## Naming convention + +### PascalCase + +- exported(called outside package) function or variable +- struct + +```go +type FeedInsertModel struct {...} +func GenerateGetListenerQuery(params map[string]string) string {...} +const (GetProxy = `SELECT * FROM proxies ORDER BY id asc;`) +``` + +### camelCase + +- function and variables which is used within package + +### CamelCase starting with Capital letter + +- elements inside struct + +```go +type ProxyInsertModel struct { + Protocol int `db:"protocol" json:"protocol" validate:"required"` + Host string `db:"host" json:"host" validate:"required"` + Port int `db:"port" json:"port" validate:"required"` + Location string `db:"location" json:"location"` +} +``` + +### Other rules + +- some model starts with \_(underbar), it means that it's used within controller. Otherwise it means that its structure for request payload + +```go +type ReporterInsertModel struct {} // struct taken from request body parameters +type _ReporterInsertModel struct {} // struct used when calling insert query +``` + +## Used libraries + +### Api + +- go-fiber (api framework) +- pgx (postgres client) +- gp-redis (redis client) + +### DB migration tool + +- go-migrate (db migration) + +# How to run + +## Prerequisites + +### Install go + +```bash +brew install go +``` + +### Install db + +- Just as orakl-api, it requires postgres and redis + +```bash +brew install postgresql +brew install redis +``` + +### Set .env + +```bash +cp .env.example .env +``` + +- One thing that is different from orakl-api is when setting postgresql url, `?schema={schema}` should be `?search_path={schema}`. +- If port is not defined, api port will be 3000. Other environment variables are required. +- If `TEST_MODE` is true, some routes which aren't used in production will be accessable. + +## Run + +```bash +go run main.go +``` + +# How to run test + +From root path run following command + +``` +go test ./tests -v +``` + +- `-v` is verbose option + +## Run consistency test + +- Run nodejs api and goapi with same db connection url +- If both applications is up and running, execute following command + +``` +go test ./scripts/ -run TestConsistency +``` + +## Run docker-compose from local environment + +- Change api service's docker image into go-apis +- And if there's `schema={}` in DB connection url in .api.env file update it into keyword `search_path={}` + +# How to use DB migration tool + +- This is meant for future development (ex. adding new column or table), don't run it on existing dbs + +## Install golang-migrate + +```bash +brew install golang-migrate +``` + +## Migrate commands + +- Run commands from go-api folder +- Write appropriate db connection url for each usecases +- Be careful on adding `sslmode=disbale`, if it has other option such as `?schema=public` add `&sslmode=disable` else add `?sslmode=disable` + +### `migrate create` + +create empty migration files with a pair of .up file and .down file + +```bash +migrate create -ext sql -dir ./migrations -seq {migration_file_name} +``` + +### `migrate up` + +```bash +migrate -database "postgres://{USER}@localhost:5432/orakl?sslmode=disable" -path ./migrations up +``` + +### `migrate down` + +```bash +migrate -database "postgres://{USER}@localhost:5432/orakl?sslmode=disable" -path ./migrations down +``` + +### `migrate force` + +Reference: https://github.com/golang-migrate/migrate/blob/0815e2d770003b4945a4bf86850fb92ca4b7cc5e/GETTING_STARTED.md#forcing-your-database-version + +- If migration file contained an error, migrate will not let you run other migrations on the same database +- Once you know, you should force your database to a version reflecting its real state + +```bash +migrate -database "postgres://{USER}@localhost:5432/orakl?sslmode=disable" -path ./migrations force ${VERSION} +``` + +## References + +- https://gofiber.io/: go fiber +- https://github.com/jackc/pgx: pgx (postgres driver) +- https://github.com/redis/go-redis: go-redis +- https://github.com/golang/go/issues/27179: golang map doesn't preserve json key order, use json.rawMessage instead +- https://stackoverflow.com/questions/69762108/implementing-ethereum-personal-sign-eip-191-from-go-ethereum-gives-different-s: Keccak256Hash in golang +- https://github.com/golang-migrate/migrate: go-migrate +- https://github.com/golang-migrate/migrate/blob/master/database/postgres/TUTORIAL.md: Postgres migration tutorial diff --git a/go-api/adapter/controller.go b/go-api/adapter/controller.go new file mode 100644 index 000000000..ff6f0e32e --- /dev/null +++ b/go-api/adapter/controller.go @@ -0,0 +1,167 @@ +package adapter + +import ( + "encoding/json" + "fmt" + "go-api/feed" + "go-api/utils" + "strconv" + + "github.com/ethereum/go-ethereum/crypto" + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type AdapterHashModel struct { + Name string `db:"name" json:"name"` + Decimals *utils.CustomInt32 `db:"decimals" json:"decimals"` + Feeds []feed.FeedWithoutAdapterIdModel `json:"feeds"` +} + +type AdapterInsertModel struct { + AdapterHash string `db:"adapter_hash" json:"adapterHash"` + Name string `db:"name" json:"name" validate:"required"` + Decimals *utils.CustomInt32 `db:"decimals" json:"decimals" validate:"required"` + Feeds []feed.FeedInsertModel `json:"feeds"` +} + +type AdapterModel struct { + AdapterId *utils.CustomInt64 `db:"adapter_id" json:"id"` + AdapterHash string `db:"adapter_hash" json:"adapterHash"` + Name string `db:"name" json:"name" validate:"required"` + Decimals *utils.CustomInt32 `db:"decimals" json:"decimals" validate:"required"` +} + +type AdapterDetailModel struct { + AdapterModel + Feeds []feed.FeedModel `json:"feeds"` +} + +type AdapterIdModel struct { + AdapterId *utils.CustomInt64 `db:"adapter_id" json:"id"` +} + +type FeedIdModel struct { + FeedId *utils.CustomInt64 `db:"feed_id" json:"id"` +} + +func insert(c *fiber.Ctx) error { + payload := new(AdapterInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + err := computeAdapterHash(payload, true) + if err != nil { + panic(err) + } + + row, err := utils.QueryRow[AdapterIdModel](c, InsertAdapter, map[string]any{ + "adapter_hash": payload.AdapterHash, + "name": payload.Name, + "decimals": payload.Decimals}) + if err != nil { + panic(err) + } + + for _, item := range payload.Feeds { + item.AdapterId = row.AdapterId + _, err := utils.QueryRow[FeedIdModel](c, InsertFeed, map[string]any{ + "name": item.Name, + "definition": item.Definition, + "adapter_id": item.AdapterId}) + if err != nil { + panic(err) + } + } + + result := AdapterModel{AdapterId: row.AdapterId, AdapterHash: payload.AdapterHash, Name: payload.Name, Decimals: payload.Decimals} + + return c.JSON(result) +} + +func hash(c *fiber.Ctx) error { + verifyRaw := c.Query("verify") + verify, err := strconv.ParseBool(verifyRaw) + if err != nil { + panic(err) + } + + var payload AdapterInsertModel + + if err := c.BodyParser(&payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + err = computeAdapterHash(&payload, verify) + if err != nil { + panic(err) + } + return c.JSON(payload) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[AdapterModel](c, GetAdapter, nil) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[AdapterModel](c, GetAdpaterById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + + result, err := utils.QueryRow[AdapterModel](c, RemoveAdapter, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func computeAdapterHash(data *AdapterInsertModel, verify bool) error { + adapterIdRemovedFeeds := make([]feed.FeedWithoutAdapterIdModel, len(data.Feeds)) + for idx, item := range data.Feeds { + adapterIdRemovedFeeds[idx] = feed.FeedWithoutAdapterIdModel{ + Name: item.Name, + Definition: item.Definition, + } + } + + input := AdapterHashModel{data.Name, data.Decimals, adapterIdRemovedFeeds} + + out, err := json.Marshal(input) + if err != nil { + panic(err) + } + + hash := crypto.Keccak256Hash([]byte(out)) + hashString := fmt.Sprintf("0x%x", hash) + if verify && data.AdapterHash != hashString { + return fmt.Errorf("hashes do not match!\nexpected %s, received %s", hashString, data.AdapterHash) + } + + data.AdapterHash = hashString + return nil +} diff --git a/go-api/adapter/queries.go b/go-api/adapter/queries.go new file mode 100644 index 000000000..7cbbeda84 --- /dev/null +++ b/go-api/adapter/queries.go @@ -0,0 +1,19 @@ +package adapter + +const ( + InsertAdapter = ` + INSERT INTO adapters (adapter_hash, name, decimals) VALUES (@adapter_hash, @name, @decimals) RETURNING adapter_id; + ` + + InsertFeed = ` + INSERT INTO feeds (name, definition, adapter_id) VALUES (@name, @definition, @adapter_id) RETURNING feed_id; + ` + + GetAdapter = `SELECT * FROM adapters;` + + GetAdpaterById = `SELECT * FROM adapters WHERE adapter_id = @id;` + + GetAdapterByHash = `SELECT * FROM adapters WHERE adapter_hash = @adapter_hash` + + RemoveAdapter = `DELETE FROM adapters WHERE adapter_id = @id RETURNING *;` +) diff --git a/go-api/adapter/route.go b/go-api/adapter/route.go new file mode 100644 index 000000000..b6bcd6cad --- /dev/null +++ b/go-api/adapter/route.go @@ -0,0 +1,15 @@ +package adapter + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + adapter := router.Group("/adapter") + + adapter.Post("", insert) + adapter.Post("/hash", hash) + adapter.Get("", get) + adapter.Get("/:id", getById) + adapter.Delete("/:id", deleteById) +} diff --git a/go-api/aggregate/controller.go b/go-api/aggregate/controller.go new file mode 100644 index 000000000..8cf1172dc --- /dev/null +++ b/go-api/aggregate/controller.go @@ -0,0 +1,156 @@ +package aggregate + +import ( + "encoding/json" + "go-api/utils" + "time" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type AggregateRedisValueModel struct { + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Value *utils.CustomInt64 `db:"value" json:"value" validate:"required"` +} + +type WrappedInsertModel struct { + Data AggregateInsertModel `json:"data"` +} + +type AggregateInsertModel struct { + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Value *utils.CustomInt64 `db:"value" json:"value" validate:"required"` + AggregatorId *utils.CustomInt64 `db:"aggregator_id" json:"aggregatorId" validate:"required"` +} + +type AggregateModel struct { + AggregateId *utils.CustomInt64 `db:"aggregate_id" json:"id"` + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Value *utils.CustomInt64 `db:"value" json:"value" validate:"required"` + AggregatorId *utils.CustomInt64 `db:"aggregator_id" json:"aggregatorId" validate:"required"` +} + +type AggregateIdModel struct { + AggregateId *utils.CustomInt64 `db:"aggregate_id" json:"id"` +} + +func insert(c *fiber.Ctx) error { + _payload := new(WrappedInsertModel) + if err := c.BodyParser(_payload); err != nil { + panic(err) + } + payload := _payload.Data + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + row, err := utils.QueryRow[AggregateIdModel](c, InsertAggregate, map[string]any{ + "timestamp": payload.Timestamp.String(), + "value": payload.Value, + "aggregator_id": payload.AggregatorId}) + if err != nil { + panic(err) + } + + key := "latestAggregate:" + payload.AggregatorId.String() + value, err := json.Marshal(AggregateRedisValueModel{Timestamp: payload.Timestamp, Value: payload.Value}) + if err != nil { + panic(err) + } + + err = utils.SetRedis(c, key, string(value)) + if err != nil { + panic(err) + } + + result := AggregateModel{AggregateId: row.AggregateId, Timestamp: payload.Timestamp, Value: payload.Value, AggregatorId: payload.AggregatorId} + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[AggregateModel](c, GetAggregate, nil) + if err != nil { + panic(err) + } + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[AggregateModel](c, GetAggregateById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func getLatestByHash(c *fiber.Ctx) error { + hash := c.Params("hash") + result, err := utils.QueryRow[AggregateModel](c, GetLatestAggregateByHash, map[string]any{"aggregator_hash": hash}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func getLatestById(c *fiber.Ctx) error { + var result AggregateRedisValueModel + returnVal := utils.CustomInt64(0) + + result.Value = &returnVal + result.Timestamp = &utils.CustomDateTime{Time: time.Now()} + + id := c.Params("id") + key := "latestAggregate:" + id + rawResult, err := utils.GetRedis(c, key) + + if err != nil { + pgsqlResult, err := utils.QueryRow[AggregateModel](c, GetLatestAggregateById, map[string]any{"aggregator_id": id}) + if err != nil { + panic(err) + } + return c.JSON(pgsqlResult) + } + + err = json.Unmarshal([]byte(rawResult), &result) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(AggregateInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[AggregateModel](c, UpdateAggregateById, map[string]any{"timestamp": payload.Timestamp.String(), "value": payload.Value, "aggregator_id": payload.AggregatorId, "id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[AggregateModel](c, DeleteAggregateById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/aggregate/queries.go b/go-api/aggregate/queries.go new file mode 100644 index 000000000..0aa68c281 --- /dev/null +++ b/go-api/aggregate/queries.go @@ -0,0 +1,43 @@ +package aggregate + +const ( + InsertAggregate = ` + INSERT INTO aggregates (timestamp, value, aggregator_id) VALUES (@timestamp::timestamptz, @value, @aggregator_id) RETURNING aggregate_id; + ` + + GetAggregate = ` + SELECT * FROM aggregates; + ` + + GetAggregateById = ` + SELECT * + FROM aggregates + WHERE aggregate_id = @id + LIMIT 1; + ` + + GetLatestAggregateByHash = ` + SELECT * + FROM aggregates + WHERE aggregator_id = (SELECT aggregator_id FROM aggregators WHERE aggregator_hash = @aggregator_hash) + ORDER BY timestamp DESC + LIMIT 1; + ` + + GetLatestAggregateById = ` + SELECT * + FROM aggregates + WHERE aggregator_id = @aggregator_id + ORDER BY timestamp DESC + LIMIT 1; + ` + + UpdateAggregateById = ` + UPDATE aggregates + SET aggregator_id = @aggregator_id, timestamp = @timestamp::timestamptz, value = @value + WHERE aggregate_id = @id + RETURNING *; + ` + + DeleteAggregateById = `DELETE FROM aggregates WHERE aggregate_id = @id RETURNING *;` +) diff --git a/go-api/aggregate/route.go b/go-api/aggregate/route.go new file mode 100644 index 000000000..1f0de8666 --- /dev/null +++ b/go-api/aggregate/route.go @@ -0,0 +1,17 @@ +package aggregate + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + aggregate := router.Group("/aggregate") + + aggregate.Post("", insert) + aggregate.Get("", get) + aggregate.Get("/:id", getById) + aggregate.Get("/hash/:hash/latest", getLatestByHash) + aggregate.Get("/id/:id/latest", getLatestById) + aggregate.Patch("/:id", updateById) + aggregate.Delete("/:id", deleteById) +} diff --git a/go-api/aggregator/controller.go b/go-api/aggregator/controller.go new file mode 100644 index 000000000..3ca31bace --- /dev/null +++ b/go-api/aggregator/controller.go @@ -0,0 +1,327 @@ +package aggregator + +import ( + "encoding/json" + "fmt" + "go-api/adapter" + "go-api/chain" + "go-api/feed" + "go-api/utils" + "strconv" + + "github.com/ethereum/go-ethereum/crypto" + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type WrappedUpdateModel struct { + Data AggregatorUpdateModel `json:"data"` +} + +type AggregatorUpdateModel struct { + Active *utils.CustomBool `db:"active" json:"active"` + Chain string `db:"chain" json:"chain" validate:"required"` +} + +type AggregatorInsertModel struct { + AggregatorHash string `db:"aggregator_hash" json:"aggregatorHash"` + Active *utils.CustomBool `db:"active" json:"active"` + Name string `db:"name" json:"name" validate:"required"` + Address string `db:"address" json:"address" validate:"required"` + Heartbeat *utils.CustomInt32 `db:"heartbeat" json:"heartbeat" validate:"required"` + Threshold *utils.CustomFloat `db:"threshold" json:"threshold" validate:"required"` + AbsoluteThreshold *utils.CustomFloat `db:"absolute_threshold" json:"absoluteThreshold" validate:"required"` + AdapterHash string `db:"adapter_hash" json:"adapterHash" validate:"required"` + Chain string `db:"chain" json:"chain" validate:"required"` + FetcherType *utils.CustomInt32 `db:"fetcher_type" json:"fetcherType"` +} + +type AggregatorResultModel struct { + AggregatorId *utils.CustomInt64 `db:"aggregator_id" json:"id"` + AggregatorHash string `db:"aggregator_hash" json:"aggregatorHash"` + Active *utils.CustomBool `db:"active" json:"active"` + Name string `db:"name" json:"name"` + Address string `db:"address" json:"address"` + Heartbeat *utils.CustomInt32 `db:"heartbeat" json:"heartbeat"` + Threshold *utils.CustomFloat `db:"threshold" json:"threshold"` + AbsoluteThreshold *utils.CustomFloat `db:"absolute_threshold" json:"absoluteThreshold"` + AdapterId *utils.CustomInt64 `db:"adapter_id" json:"adapterId"` + ChainId *utils.CustomInt64 `db:"chain_id" json:"chainId"` + FetcherType *utils.CustomInt32 `db:"fetcher_type" json:"fetcherType"` +} + +type AggregatorDetailResultModel struct { + AggregatorResultModel + Adapter adapter.AdapterDetailModel `json:"adapter"` +} + +type _AggregatorInsertModel struct { + AggregatorHash string `db:"aggregator_hash" json:"aggregatorHash"` + Active *utils.CustomBool `db:"active" json:"active"` + Name string `db:"name" json:"name"` + Address string `db:"address" json:"address"` + Heartbeat *utils.CustomInt32 `db:"heartbeat" json:"heartbeat"` + Threshold *utils.CustomFloat `db:"threshold" json:"threshold"` + AbsoluteThreshold *utils.CustomFloat `db:"absolute_threshold" json:"absoluteThreshold"` + AdapterId *utils.CustomInt64 `db:"adapter_id" json:"adapterId"` + ChainId *utils.CustomInt64 `db:"chain_id" json:"chainId"` + FetcherType *utils.CustomInt32 `db:"fetcher_type" json:"fetcherType"` +} + +type AggregatorHashComputeProcessModel struct { + Name string `db:"name" json:"name"` + Heartbeat *utils.CustomInt32 `db:"heartbeat" json:"heartbeat"` + Threshold *utils.CustomFloat `db:"threshold" json:"threshold"` + AbsoluteThreshold *utils.CustomFloat `db:"absolute_threshold" json:"absoluteThreshold"` + AdapterHash string `db:"adapter_hash" json:"adapterHash"` +} + +type AggregatorHashComputeInputModel struct { + AggregatorHash string `db:"aggregator_hash" json:"aggregatorHash"` + AggregatorHashComputeProcessModel +} + +type AggregatorIdModel struct { + AggregatorId *utils.CustomInt64 `db:"aggregator_id" json:"id"` +} + +func insert(c *fiber.Ctx) error { + payload := new(AggregatorInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + panic(err) + } + + adapter_result, err := utils.QueryRow[adapter.AdapterModel](c, adapter.GetAdapterByHash, map[string]any{"adapter_hash": payload.AdapterHash}) + if err != nil { + panic(err) + } + + hashComputeParam := AggregatorHashComputeInputModel{ + AggregatorHash: payload.AggregatorHash, + AggregatorHashComputeProcessModel: AggregatorHashComputeProcessModel{ + Name: payload.Name, + Heartbeat: payload.Heartbeat, + Threshold: payload.Threshold, + AbsoluteThreshold: payload.AbsoluteThreshold, + AdapterHash: payload.AdapterHash, + }, + } + err = computeAggregatorHash(&hashComputeParam, true) + if err != nil { + panic(err) + } + + insertParam := _AggregatorInsertModel{ + AggregatorHash: payload.AggregatorHash, + Active: payload.Active, + Name: payload.Name, + Address: payload.Address, + Heartbeat: payload.Heartbeat, + Threshold: payload.Threshold, + AbsoluteThreshold: payload.AbsoluteThreshold, + AdapterId: adapter_result.AdapterId, + ChainId: chain_result.ChainId, + FetcherType: payload.FetcherType, + } + + if insertParam.Active == nil { + insertBool := utils.CustomBool(false) + insertParam.Active = &insertBool + } + + if insertParam.FetcherType == nil { + insertFetcherType := utils.CustomInt32(0) + insertParam.FetcherType = &insertFetcherType + } + + row, err := utils.QueryRow[AggregatorIdModel](c, InsertAggregator, map[string]any{ + "aggregator_hash": insertParam.AggregatorHash, + "active": insertParam.Active, + "name": insertParam.Name, + "address": insertParam.Address, + "heartbeat": insertParam.Heartbeat, + "threshold": insertParam.Threshold, + "absolute_threshold": insertParam.AbsoluteThreshold, + "adapter_id": insertParam.AdapterId, + "chain_id": insertParam.ChainId, + "fetcher_type": insertParam.FetcherType, + }) + if err != nil { + panic(err) + } + + result := AggregatorResultModel{ + AggregatorId: row.AggregatorId, + AggregatorHash: insertParam.AggregatorHash, + Active: insertParam.Active, + Name: insertParam.Name, + Address: insertParam.Address, + Heartbeat: insertParam.Heartbeat, + Threshold: insertParam.Threshold, + AbsoluteThreshold: insertParam.AbsoluteThreshold, + AdapterId: insertParam.AdapterId, + ChainId: insertParam.ChainId, + FetcherType: insertParam.FetcherType, + } + + return c.JSON(result) +} + +func hash(c *fiber.Ctx) error { + verifyRaw := c.Query("verify") + verify, err := strconv.ParseBool(verifyRaw) + if err != nil { + panic(err) + } + + payload := new(AggregatorInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + hashComputeParam := AggregatorHashComputeInputModel{ + AggregatorHash: payload.AggregatorHash, + AggregatorHashComputeProcessModel: AggregatorHashComputeProcessModel{ + Name: payload.Name, + Heartbeat: payload.Heartbeat, + Threshold: payload.Threshold, + AbsoluteThreshold: payload.AbsoluteThreshold, + AdapterHash: payload.AdapterHash, + }, + } + + err = computeAggregatorHash(&hashComputeParam, verify) + if err != nil { + panic(err) + } + + return c.JSON(hashComputeParam) +} + +func get(c *fiber.Ctx) error { + queries := c.Queries() + queryParam := GetAggregatorQueryParams{ + Active: queries["active"], + Chain: queries["chain"], + Address: queries["address"], + } + queryString, err := GenerateGetAggregatorQuery(queryParam) + if err != nil { + panic(err) + } + + results, err := utils.QueryRows[AggregatorResultModel](c, queryString, nil) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func getByHashAndChain(c *fiber.Ctx) error { + var result = new(AggregatorDetailResultModel) + hash := c.Params("hash") + _chain := c.Params("chain") + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": _chain}) + if err != nil { + panic(err) + } + + result.AggregatorResultModel, err = utils.QueryRow[AggregatorResultModel](c, GetAggregatorByChainAndHash, map[string]any{ + "aggregator_hash": hash, + "chain_id": chain_result.ChainId, + }) + if err != nil { + panic(err) + } + + result.Adapter.AdapterModel, err = utils.QueryRow[adapter.AdapterModel](c, adapter.GetAdpaterById, map[string]any{"id": result.AggregatorResultModel.AdapterId}) + if err != nil { + panic(err) + } + + result.Adapter.Feeds, err = utils.QueryRows[feed.FeedModel](c, feed.GetFeedsByAdapterId, map[string]any{"id": result.AggregatorResultModel.AdapterId}) + if err != nil { + panic(err) + } + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[AggregatorResultModel](c, RemoveAggregator, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func updateByHash(c *fiber.Ctx) error { + hash := c.Params("hash") + _payload := new(WrappedUpdateModel) + if err := c.BodyParser(_payload); err != nil { + panic(err) + } + + payload := _payload.Data + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + if payload.Active == nil { + insertBool := utils.CustomBool(false) + payload.Active = &insertBool + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + panic(err) + } + + result, err := utils.QueryRow[AggregatorResultModel](c, UpdateAggregatorByHash, map[string]any{ + "active": payload.Active, + "hash": hash, + "chain_id": chain_result.ChainId}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func computeAggregatorHash(data *AggregatorHashComputeInputModel, verify bool) error { + input := data + processData := input.AggregatorHashComputeProcessModel + out, err := json.Marshal(processData) + if err != nil { + panic(err) + } + + hash := crypto.Keccak256Hash([]byte(out)) + hashString := fmt.Sprintf("0x%x", hash) + if verify && data.AggregatorHash != hashString { + panic(err) + } + + data.AggregatorHash = hashString + return nil +} diff --git a/go-api/aggregator/queries.go b/go-api/aggregator/queries.go new file mode 100644 index 000000000..c1cf25721 --- /dev/null +++ b/go-api/aggregator/queries.go @@ -0,0 +1,77 @@ +package aggregator + +import ( + "strconv" + "strings" +) + +type GetAggregatorQueryParams struct { + Active string + Chain string + Address string +} + +const ( + InsertAggregator = ` + INSERT INTO aggregators ( + aggregator_hash, + active, + name, + address, + heartbeat, + threshold, + absolute_threshold, + adapter_id, + chain_id, + fetcher_type + ) VALUES ( + @aggregator_hash, + @active, + @name, + @address, + @heartbeat, + @threshold, + @absolute_threshold, + @adapter_id, + @chain_id, + @fetcher_type + ) + RETURNING aggregator_id; + ` + + GetAggregatorByChainAndHash = ` + SELECT * + FROM aggregators + WHERE + aggregator_hash = @aggregator_hash AND + chain_id = @chain_id + LIMIT 1; + ` + + RemoveAggregator = `DELETE FROM aggregators WHERE aggregator_id = @id RETURNING *;` + UpdateAggregator = `UPDATE aggregators SET active = @active WHERE aggregator_id = @id AND chain_id = @chain_id RETURNING *;` + UpdateAggregatorByHash = `UPDATE aggregators SET active = @active WHERE aggregator_hash = @hash AND chain_id = @chain_id RETURNING *;` +) + +func GenerateGetAggregatorQuery(params GetAggregatorQueryParams) (string, error) { + baseQuery := `SELECT * FROM aggregators` + var conditionQueries []string + if params.Active != "" { + _, err := strconv.ParseBool(params.Active) + if err != nil { + return "", err + } + conditionQueries = append(conditionQueries, "active = "+params.Active) + } + if params.Chain != "" { + conditionQueries = append(conditionQueries, "chain_id = (SELECT chain_id FROM chains WHERE name = '"+params.Chain+"')") + } + if params.Address != "" { + conditionQueries = append(conditionQueries, "address = '"+params.Address+"'") + } + if len(conditionQueries) == 0 { + return baseQuery, nil + } + joinedString := strings.Join(conditionQueries, " AND ") + return baseQuery + " WHERE " + joinedString, nil +} diff --git a/go-api/aggregator/route.go b/go-api/aggregator/route.go new file mode 100644 index 000000000..9e397b25e --- /dev/null +++ b/go-api/aggregator/route.go @@ -0,0 +1,16 @@ +package aggregator + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + aggregator := router.Group("/aggregator") + + aggregator.Post("", insert) + aggregator.Post("/hash", hash) + aggregator.Get("", get) + aggregator.Get("/:hash/:chain", getByHashAndChain) + aggregator.Delete("/:id", deleteById) + aggregator.Patch("/:hash", updateByHash) +} diff --git a/go-api/apierr/controller.go b/go-api/apierr/controller.go new file mode 100644 index 000000000..de9e97a60 --- /dev/null +++ b/go-api/apierr/controller.go @@ -0,0 +1,82 @@ +package apierr + +import ( + "fmt" + "go-api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ErrorInsertModel struct { + RequestId string `db:"request_id" json:"requestId" validate:"required"` + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Code string `db:"code" json:"code" validate:"required"` + Name string `db:"name" json:"name" validate:"required"` + Stack string `db:"stack" json:"stack" validate:"required"` +} + +type ErrorModel struct { + ERROR_ID *utils.CustomInt64 `db:"error_id" json:"id"` + RequestId string `db:"request_id" json:"requestId" validate:"required"` + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Code string `db:"code" json:"code" validate:"required"` + Name string `db:"name" json:"name" validate:"required"` + Stack string `db:"stack" json:"stack" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ErrorInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[ErrorModel](c, InsertError, map[string]any{ + "request_id": payload.RequestId, + "timestamp": payload.Timestamp.String(), + "code": payload.Code, + "name": payload.Name, + "stack": payload.Stack}) + + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[ErrorModel](c, GetError, nil) + if err != nil { + panic(err) + } + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ErrorModel](c, GetErrorById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + if !utils.IsTesting(c) { + panic(fmt.Errorf("not allowed")) + } + id := c.Params("id") + result, err := utils.QueryRow[ErrorModel](c, RemoveErrorById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/apierr/queries.go b/go-api/apierr/queries.go new file mode 100644 index 000000000..02da65e07 --- /dev/null +++ b/go-api/apierr/queries.go @@ -0,0 +1,11 @@ +package apierr + +const ( + InsertError = `INSERT INTO error (request_id, timestamp, code, name, stack) VALUES (@request_id, @timestamp::timestamptz, @code, @name, @stack) RETURNING *` + + GetError = `SELECT * FROM error;` + + GetErrorById = `SELECT * FROM error WHERE error_id = @id` + + RemoveErrorById = `DELETE FROM error WHERE error_id = @id RETURNING *;` +) diff --git a/go-api/apierr/route.go b/go-api/apierr/route.go new file mode 100644 index 000000000..94f216f94 --- /dev/null +++ b/go-api/apierr/route.go @@ -0,0 +1,14 @@ +package apierr + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + apierr := router.Group("/error") + + apierr.Post("", insert) + apierr.Get("", get) + apierr.Get("/:id", getById) + apierr.Delete("/:id", deleteById) +} diff --git a/go-api/chain/controller.go b/go-api/chain/controller.go new file mode 100644 index 000000000..29455b8f4 --- /dev/null +++ b/go-api/chain/controller.go @@ -0,0 +1,83 @@ +package chain + +import ( + "go-api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ChainInsertModel struct { + Name string `db:"name" json:"name" validate:"required"` +} + +type ChainModel struct { + ChainId *utils.CustomInt64 `db:"chain_id" json:"id"` + Name string `db:"name" json:"name" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ChainInsertModel) + + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[ChainModel](c, InsertChain, map[string]any{"name": payload.Name}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[ChainModel](c, GetChain, nil) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ChainModel](c, GetChainByID, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func patchById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(ChainInsertModel) + + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[ChainModel](c, UpdateChain, map[string]any{"name": payload.Name, "id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + + result, err := utils.QueryRow[ChainModel](c, RemoveChain, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/chain/queries.go b/go-api/chain/queries.go new file mode 100644 index 000000000..ac5678732 --- /dev/null +++ b/go-api/chain/queries.go @@ -0,0 +1,15 @@ +package chain + +const ( + GetChain = `SELECT * FROM chains;` + + GetChainByID = `SELECT * FROM chains WHERE chain_id = @id;` + + GetChainByName = `SELECT * FROM chains WHERE name = @name;` + + InsertChain = `INSERT INTO chains (name) VALUES (@name) RETURNING *;` + + UpdateChain = `UPDATE chains SET name = @name WHERE chain_id = @id RETURNING *;` + + RemoveChain = `DELETE FROM chains WHERE chain_id = @id RETURNING *;` +) diff --git a/go-api/chain/route.go b/go-api/chain/route.go new file mode 100644 index 000000000..c4f2fc5cd --- /dev/null +++ b/go-api/chain/route.go @@ -0,0 +1,15 @@ +package chain + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + chain := router.Group("/chain") + + chain.Get("", get) + chain.Get("/:id", getById) + chain.Post("", insert) + chain.Patch("/:id", patchById) + chain.Delete("/:id", deleteById) +} diff --git a/go-api/data/controller.go b/go-api/data/controller.go new file mode 100644 index 000000000..e5bdc72fb --- /dev/null +++ b/go-api/data/controller.go @@ -0,0 +1,97 @@ +package data + +import ( + "fmt" + "go-api/utils" + + "github.com/gofiber/fiber/v2" +) + +type BulkInsertModel struct { + Data []DataInsertModel `json:"data"` +} + +type DataInsertModel struct { + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Value *utils.CustomInt64 `db:"value" json:"value" validate:"required"` + AggregatorId *utils.CustomInt64 `db:"aggregator_id" json:"aggregatorId" validate:"required"` + FeedId *utils.CustomInt64 `db:"feed_id" json:"feedId" validate:"required"` +} + +type DataResultModel struct { + DataId *utils.CustomInt64 `db:"data_id" json:"id"` + Timestamp *utils.CustomDateTime `db:"timestamp" json:"timestamp" validate:"required"` + Value *utils.CustomInt64 `db:"value" json:"value" validate:"required"` + AggregatorId *utils.CustomInt64 `db:"aggregator_id" json:"aggregatorId" validate:"required"` + FeedId *utils.CustomInt64 `db:"feed_id" json:"feedId" validate:"required"` +} + +type BulkInsertResultModel struct { + Count int `json:"count"` +} + +func bulkInsert(c *fiber.Ctx) error { + payload := new(BulkInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + query, err := GenerateBulkInsertQuery(payload.Data) + if err != nil { + panic(err) + } + err = utils.RawQueryWithoutReturn(c, query, nil) + + if err != nil { + panic(err) + } + + countResult := BulkInsertResultModel{Count: len(payload.Data)} + + return c.JSON(countResult) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[DataResultModel](c, GetData, nil) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[DataResultModel](c, GetDataById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func getByFeedId(c *fiber.Ctx) error { + if !utils.IsTesting(c) { + panic(fmt.Errorf("not allowed")) + } + id := c.Params("id") + results, err := utils.QueryRows[DataResultModel](c, GetDataByFeedId, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func deleteById(c *fiber.Ctx) error { + if !utils.IsTesting(c) { + panic(fmt.Errorf("not allowed")) + } + id := c.Params("id") + result, err := utils.QueryRow[DataResultModel](c, DeleteDataById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/data/queries.go b/go-api/data/queries.go new file mode 100644 index 000000000..c300f4022 --- /dev/null +++ b/go-api/data/queries.go @@ -0,0 +1,36 @@ +package data + +import ( + "fmt" + "strings" + + "github.com/go-playground/validator/v10" +) + +const ( + GetData = `SELECT * FROM data;` + + GetDataById = `SELECT * FROM data WHERE data_id = @id;` + + DeleteDataById = `DELETE FROM data WHERE data_id = @id RETURNING *;` + + GetDataByFeedId = `SELECT * FROM data WHERE feed_id = @id;` +) + +func GenerateBulkInsertQuery(bulkInsertData []DataInsertModel) (string, error) { + baseQuery := `INSERT INTO data (timestamp, value, aggregator_id, feed_id) VALUES` + var insertQueries []string + validate := validator.New() + + for _, insertData := range bulkInsertData { + if err := validate.Struct(insertData); err != nil { + return "", err + } + + insertValueString := fmt.Sprintf("('%v'::timestamptz,%v,%v,%v)", insertData.Timestamp.String(), insertData.Value, insertData.AggregatorId, insertData.FeedId) + insertQueries = append(insertQueries, insertValueString) + } + joinedString := strings.Join(insertQueries, ", ") + result := baseQuery + " " + joinedString + " RETURNING *;" + return result, nil +} diff --git a/go-api/data/route.go b/go-api/data/route.go new file mode 100644 index 000000000..af9ca1cb9 --- /dev/null +++ b/go-api/data/route.go @@ -0,0 +1,15 @@ +package data + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + data := router.Group("/data") + + data.Post("", bulkInsert) + data.Get("", get) + data.Get("/:id", getById) + data.Delete("/:id", deleteById) + data.Get("/feed/:id", getByFeedId) +} diff --git a/go-api/favicon.ico b/go-api/favicon.ico new file mode 100644 index 000000000..a0bf1b781 Binary files /dev/null and b/go-api/favicon.ico differ diff --git a/go-api/feed/controller.go b/go-api/feed/controller.go new file mode 100644 index 000000000..9171e35b6 --- /dev/null +++ b/go-api/feed/controller.go @@ -0,0 +1,72 @@ +package feed + +import ( + "fmt" + "go-api/utils" + + "encoding/json" + + "github.com/gofiber/fiber/v2" +) + +type FeedWithoutAdapterIdModel struct { + Name string `db:"name" json:"name"` + Definition json.RawMessage `db:"definition" json:"definition"` +} + +type FeedInsertModel struct { + Name string `db:"name" json:"name"` + Definition json.RawMessage `db:"definition" json:"definition"` + AdapterId *utils.CustomInt64 `db:"adapter_id" json:"adapterId"` +} + +type FeedModel struct { + FeedId *utils.CustomInt64 `db:"feed_id" json:"id"` + Name string `db:"name" json:"name"` + Definition json.RawMessage `db:"definition" json:"definition"` + AdapterId *utils.CustomInt64 `db:"adapter_id" json:"adapterId"` +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[FeedModel](c, GetFeed, nil) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[FeedModel](c, GetFeedById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func getByAdpaterId(c *fiber.Ctx) error { + if !utils.IsTesting(c) { + panic(fmt.Errorf("not allowed")) + } + id := c.Params("id") + results, err := utils.QueryRows[FeedModel](c, GetFeedsByAdapterId, map[string]any{"id": id}) + if err != nil { + panic(err) + } + return c.JSON(results) +} + +func removeById(c *fiber.Ctx) error { + if !utils.IsTesting(c) { + panic(fmt.Errorf("not allowed")) + } + id := c.Params("id") + result, err := utils.QueryRow[FeedModel](c, DeleteFeedById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/feed/queries.go b/go-api/feed/queries.go new file mode 100644 index 000000000..1e2eb7fac --- /dev/null +++ b/go-api/feed/queries.go @@ -0,0 +1,11 @@ +package feed + +const ( + GetFeed = `SELECT * FROM feeds;` + + GetFeedById = `SELECT * FROM feeds WHERE feed_id = @id;` + + DeleteFeedById = `DELETE FROM feeds WHERE feed_id = @id RETURNING *` + + GetFeedsByAdapterId = `SELECT * FROM feeds WHERE adapter_id = @id;` +) diff --git a/go-api/feed/route.go b/go-api/feed/route.go new file mode 100644 index 000000000..d0070dbd6 --- /dev/null +++ b/go-api/feed/route.go @@ -0,0 +1,15 @@ +package feed + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + + feed := router.Group("/feed") + feed.Delete("/:id", removeById) + feed.Get("/adapter/:id", getByAdpaterId) + feed.Get("", get) + feed.Get("/:id", getById) + +} diff --git a/go-api/go.mod b/go-api/go.mod new file mode 100644 index 000000000..a0cd20f2a --- /dev/null +++ b/go-api/go.mod @@ -0,0 +1,58 @@ +module go-api + +go 1.21.5 + +require ( + github.com/gofiber/fiber/v2 v2.52.0 + github.com/jackc/pgx/v5 v5.5.2 +) + +require ( + github.com/andybalholm/brotli v1.0.5 // indirect + github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de // indirect + github.com/bits-and-blooms/bitset v1.10.0 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.2.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/consensys/bavard v0.1.13 // indirect + github.com/consensys/gnark-crypto v0.12.1 // indirect + github.com/crate-crypto/go-kzg-4844 v0.7.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/ethereum/c-kzg-4844 v0.4.0 // indirect + github.com/ethereum/go-ethereum v1.13.8 // indirect + github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator v9.31.0+incompatible // indirect + github.com/go-playground/validator/v10 v10.16.0 // indirect + github.com/google/uuid v1.5.0 // indirect + github.com/holiman/uint256 v1.2.4 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/jackc/puddle/v2 v2.2.1 // indirect + github.com/joho/godotenv v1.5.1 // indirect + github.com/klauspost/compress v1.17.0 // indirect + github.com/leodido/go-urn v1.2.4 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.15 // indirect + github.com/mmcloughlin/addchain v0.4.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/redis/go-redis/v9 v9.3.1 // indirect + github.com/rivo/uniseg v0.2.0 // indirect + github.com/stretchr/objx v0.5.1 // indirect + github.com/stretchr/testify v1.8.4 // indirect + github.com/supranational/blst v0.3.11 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasthttp v1.51.0 // indirect + github.com/valyala/tcplisten v1.0.0 // indirect + golang.org/x/crypto v0.17.0 // indirect + golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa // indirect + golang.org/x/net v0.18.0 // indirect + golang.org/x/sync v0.5.0 // indirect + golang.org/x/sys v0.15.0 // indirect + golang.org/x/text v0.14.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + rsc.io/tmplfunc v0.0.3 // indirect +) diff --git a/go-api/go.sum b/go-api/go.sum new file mode 100644 index 000000000..93c591c13 --- /dev/null +++ b/go-api/go.sum @@ -0,0 +1,138 @@ +github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs= +github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA= +github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw= +github.com/bits-and-blooms/bitset v1.10.0 h1:ePXTeiPEazB5+opbv5fr8umg2R/1NlzgDsyepwsSr88= +github.com/bits-and-blooms/bitset v1.10.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/btcsuite/btcd/btcec/v2 v2.2.0 h1:fzn1qaOt32TuLjFlkzYSsBC35Q3KUjT1SwPxiMSCF5k= +github.com/btcsuite/btcd/btcec/v2 v2.2.0/go.mod h1:U7MHm051Al6XmscBQ0BoNydpOTsFAn707034b5nY8zU= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ= +github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= +github.com/consensys/gnark-crypto v0.12.1 h1:lHH39WuuFgVHONRl3J0LRBtuYdQTumFSDtJF7HpyG8M= +github.com/consensys/gnark-crypto v0.12.1/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= +github.com/crate-crypto/go-kzg-4844 v0.7.0 h1:C0vgZRk4q4EZ/JgPfzuSoxdCq3C3mOZMBShovmncxvA= +github.com/crate-crypto/go-kzg-4844 v0.7.0/go.mod h1:1kMhvPgI0Ky3yIa+9lFySEBUBXkYxeOi8ZF1sYioxhc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/ethereum/c-kzg-4844 v0.4.0 h1:3MS1s4JtA868KpJxroZoepdV0ZKBp3u/O5HcZ7R3nlY= +github.com/ethereum/c-kzg-4844 v0.4.0/go.mod h1:VewdlzQmpT5QSrVhbBuGoCdFJkpaJlO1aQputP83wc0= +github.com/ethereum/go-ethereum v1.13.8 h1:1od+thJel3tM52ZUNQwvpYOeRHlbkVFZ5S8fhi0Lgsg= +github.com/ethereum/go-ethereum v1.13.8/go.mod h1:sc48XYQxCzH3fG9BcrXCOOgQk2JfZzNAmIKnceogzsA= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator v9.31.0+incompatible h1:UA72EPEogEnq76ehGdEDp4Mit+3FDh548oRqwVgNsHA= +github.com/go-playground/validator v9.31.0+incompatible/go.mod h1:yrEkQXlcI+PugkyDjY2bRrL/UBU4f3rvrgkN3V8JEig= +github.com/go-playground/validator/v10 v10.16.0 h1:x+plE831WK4vaKHO/jpgUGsvLKIqRRkz6M78GuJAfGE= +github.com/go-playground/validator/v10 v10.16.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= +github.com/gofiber/fiber/v2 v2.51.0 h1:JNACcZy5e2tGApWB2QrRpenTWn0fq0hkFm6k0C86gKQ= +github.com/gofiber/fiber/v2 v2.51.0/go.mod h1:xaQRZQJGqnKOQnbQw+ltvku3/h8QxvNi8o6JiJ7Ll0U= +github.com/gofiber/fiber/v2 v2.52.0 h1:S+qXi7y+/Pgvqq4DrSmREGiFwtB7Bu6+QFLuIHYw/UE= +github.com/gofiber/fiber/v2 v2.52.0/go.mod h1:KEOE+cXMhXG0zHc9d8+E38hoX+ZN7bhOtgeF2oT6jrQ= +github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= +github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= +github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/holiman/uint256 v1.2.4 h1:jUc4Nk8fm9jZabQuqr2JzednajVmBpC+oiTiXZJEApU= +github.com/holiman/uint256 v1.2.4/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.5.1 h1:5I9etrGkLrN+2XPCsi6XLlV5DITbSL/xBZdmAxFcXPI= +github.com/jackc/pgx/v5 v5.5.1/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA= +github.com/jackc/pgx/v5 v5.5.2 h1:iLlpgp4Cp/gC9Xuscl7lFL1PhhW+ZLtXZcrfCt4C3tA= +github.com/jackc/pgx/v5 v5.5.2/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A= +github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= +github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= +github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= +github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= +github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY= +github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU= +github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/redis/go-redis/v9 v9.3.1 h1:KqdY8U+3X6z+iACvumCNxnoluToB+9Me+TvyFa21Mds= +github.com/redis/go-redis/v9 v9.3.1/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= +github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.1 h1:4VhoImhV/Bm0ToFkXFi8hXNXwpDRZ/ynw3amt82mzq0= +github.com/stretchr/objx v0.5.1/go.mod h1:/iHQpkQwBD6DLUmQ4pE+s1TXdob1mORJ4/UFdrifcy0= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/supranational/blst v0.3.11 h1:LyU6FolezeWAhvQk0k6O/d49jqgO52MSDDfYgbeoEm4= +github.com/supranational/blst v0.3.11/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.50.0 h1:H7fweIlBm0rXLs2q0XbalvJ6r0CUPFWK3/bB4N13e9M= +github.com/valyala/fasthttp v1.50.0/go.mod h1:k2zXd82h/7UZc3VOdJ2WaUqt1uZ/XpXAfE9i+HBC3lA= +github.com/valyala/fasthttp v1.51.0 h1:8b30A5JlZ6C7AS81RsWjYMQmrZG6feChmgAolCl1SqA= +github.com/valyala/fasthttp v1.51.0/go.mod h1:oI2XroL+lI7vdXyYoQk03bXBThfFl2cVdIA3Xl7cH8g= +github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8= +github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= +golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= +golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= +golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= +golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= +golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= +golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= +golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= +golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +rsc.io/tmplfunc v0.0.3 h1:53XFQh69AfOa8Tw0Jm7t+GV7KZhOi6jzsCzTtKbMvzU= +rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA= diff --git a/go-api/l2aggregator/controller.go b/go-api/l2aggregator/controller.go new file mode 100644 index 000000000..db09cca71 --- /dev/null +++ b/go-api/l2aggregator/controller.go @@ -0,0 +1,34 @@ +package l2aggregator + +import ( + "go-api/chain" + "go-api/utils" + + "github.com/gofiber/fiber/v2" +) + +type l2agregatorPairModel struct { + Id *utils.CustomInt64 `db:"id" json:"id"` + L1AggregatorAddress string `db:"l1_aggregator_address" json:"l1AggregatorAddress"` + L2AggregatorAddress string `db:"l2_aggregator_address" json:"l2AggregatorAddress"` + Active *utils.CustomBool `db:"active" json:"active"` + ChainId *utils.CustomInt64 `db:"chain_id" json:"chainId"` +} + +func get(c *fiber.Ctx) error { + _chain := c.Params("chain") + l1Address := c.Params("l1Address") + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": _chain}) + if err != nil { + panic(err) + } + + result, err := utils.QueryRow[l2agregatorPairModel](c, GetL2AggregatorPair, map[string]any{"l1_aggregator_address": l1Address, "chain_id": chain_result.ChainId}) + if err != nil { + panic(err) + } + + return c.JSON(result) + +} diff --git a/go-api/l2aggregator/queries.go b/go-api/l2aggregator/queries.go new file mode 100644 index 000000000..8a714f997 --- /dev/null +++ b/go-api/l2aggregator/queries.go @@ -0,0 +1,7 @@ +package l2aggregator + +const ( + GetL2AggregatorPair = ` + SELECT * FROM l2aggregatorpair WHERE (l1_aggregator_address = @l1_aggregator_address AND chain_id = @chain_id) LIMIT 1; + ` +) diff --git a/go-api/l2aggregator/route.go b/go-api/l2aggregator/route.go new file mode 100644 index 000000000..624cc39e0 --- /dev/null +++ b/go-api/l2aggregator/route.go @@ -0,0 +1,11 @@ +package l2aggregator + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + l2aggregator := router.Group("/l2aggregator") + + l2aggregator.Get("/:chain/:l1Address", get) +} diff --git a/go-api/listener/controller.go b/go-api/listener/controller.go new file mode 100644 index 000000000..c832261e5 --- /dev/null +++ b/go-api/listener/controller.go @@ -0,0 +1,153 @@ +package listener + +import ( + "go-api/chain" + "go-api/service" + "go-api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ListenerUpdateModel struct { + Address string `db:"address" json:"address" validate:"required"` + EventName string `db:"event_name" json:"eventName" validate:"required"` +} + +type ListenerSearchModel struct { + Chain string `db:"name" json:"chain"` + Service string `db:"name" json:"service"` +} + +type ListenerModel struct { + ListenerId *utils.CustomInt64 `db:"listener_id" json:"id"` + Address string `db:"address" json:"address" validate:"required"` + EventName string `db:"event_name" json:"eventName" validate:"required"` + Service string `db:"service_name" json:"service" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +type ListenerInsertModel struct { + Address string `db:"address" json:"address" validate:"required"` + EventName string `db:"event_name" json:"eventName" validate:"required"` + Service string `db:"service_name" json:"service" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ListenerInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + panic(err) + } + + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + panic(err) + } + + result, err := utils.QueryRow[ListenerModel](c, InsertListener, map[string]any{ + "address": payload.Address, + "event_name": payload.EventName, + "chain_id": chain_result.ChainId, + "service_id": service_result.ServiceId}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + payload := new(ListenerSearchModel) + params := GetListenerQueryParams{} + + if len(c.Body()) == 0 { + results, err := utils.QueryRows[ListenerModel](c, GenerateGetListenerQuery(params), nil) + if err != nil { + panic(err) + } + + return c.JSON(results) + } + + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + if payload.Chain != "" { + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + panic(err) + } + params.ChainId = chain_result.ChainId.String() + } + + if payload.Service != "" { + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + panic(err) + } + params.ServiceId = service_result.ServiceId.String() + } + + results, err := utils.QueryRows[ListenerModel](c, GenerateGetListenerQuery(params), nil) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ListenerModel](c, GetListenerById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(ListenerUpdateModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[ListenerModel](c, UpdateListenerById, map[string]any{ + "id": id, + "address": payload.Address, + "event_name": payload.EventName}) + if err != nil { + panic(err) + } + + return c.JSON(result) + +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ListenerModel](c, DeleteListenerById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/listener/queries.go b/go-api/listener/queries.go new file mode 100644 index 000000000..6bd3f01c7 --- /dev/null +++ b/go-api/listener/queries.go @@ -0,0 +1,66 @@ +package listener + +import ( + "strings" +) + +type GetListenerQueryParams struct { + ChainId string + ServiceId string +} + +const ( + InsertListener = ` + INSERT INTO listeners (address, event_name, chain_id, service_id) + VALUES (@address, @event_name, @chain_id, @service_id) + RETURNING listeners.listener_id, listeners.address, listeners.event_name, + (SELECT name from chains WHERE chain_id = listeners.chain_id) AS chain_name, + (SELECT name from services WHERE service_id = listeners.service_id) AS service_name; + ` + + GetListenerById = ` + SELECT listeners.listener_id, listeners.address, listeners.event_name, chains.name AS chain_name, services.name AS service_name + FROM listeners + JOIN chains ON listeners.chain_id = chains.chain_id + JOIN services ON listeners.service_id = services.service_id + WHERE listener_id = @id + LIMIT 1; + ` + + UpdateListenerById = ` + UPDATE listeners + SET address = @address, event_name = @event_name + WHERE listener_id = @id + RETURNING listeners.listener_id, listeners.address, listeners.event_name, + (SELECT name from chains WHERE chain_id = listeners.chain_id) AS chain_name, + (SELECT name from services WHERE service_id = listeners.service_id) AS service_name; + ` + + DeleteListenerById = ` + DELETE FROM listeners WHERE listener_id = @id + RETURNING listeners.listener_id, listeners.address, listeners.event_name, + (SELECT name from chains WHERE chain_id = listeners.chain_id) AS chain_name, + (SELECT name from services WHERE service_id = listeners.service_id) AS service_name; + ` +) + +func GenerateGetListenerQuery(params GetListenerQueryParams) string { + baseQuery := ` + SELECT listeners.listener_id, listeners.address, listeners.event_name, chains.name AS chain_name, services.name AS service_name + FROM listeners + JOIN chains ON listeners.chain_id = chains.chain_id + JOIN services ON listeners.service_id = services.service_id + ` + var conditionQueries []string + if params.ChainId != "" { + conditionQueries = append(conditionQueries, "listeners.chain_id = "+params.ChainId) + } + if params.ServiceId != "" { + conditionQueries = append(conditionQueries, "listeners.service_id = "+params.ServiceId) + } + if len(conditionQueries) == 0 { + return baseQuery + } + joinedString := strings.Join(conditionQueries, " AND ") + return baseQuery + " WHERE " + joinedString +} diff --git a/go-api/listener/route.go b/go-api/listener/route.go new file mode 100644 index 000000000..9318ab98a --- /dev/null +++ b/go-api/listener/route.go @@ -0,0 +1,15 @@ +package listener + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + listener := router.Group("/listener") + + listener.Post("", insert) + listener.Get("", get) + listener.Get("/:id", getById) + listener.Patch("/:id", updateById) + listener.Delete("/:id", deleteById) +} diff --git a/go-api/main.go b/go-api/main.go new file mode 100644 index 000000000..08005f395 --- /dev/null +++ b/go-api/main.go @@ -0,0 +1,81 @@ +package main + +import ( + _ "embed" + "go-api/adapter" + "go-api/aggregate" + "go-api/aggregator" + "go-api/apierr" + "go-api/chain" + "go-api/data" + "go-api/feed" + "go-api/l2aggregator" + "go-api/listener" + "go-api/proxy" + "go-api/reporter" + "go-api/service" + "go-api/utils" + "go-api/vrf" + "log" + + "github.com/gofiber/fiber/v2" + "github.com/joho/godotenv" +) + +//go:embed .version +var version string + +func main() { + err := godotenv.Load() + if err != nil { + log.Println("env file is not found, continuing without .env file") + } + config := utils.LoadEnvVars() + + appConfig, err := utils.Setup(version) + if err != nil { + panic(err) + } + + postgres := appConfig.Postgres + redis := appConfig.Redis + app := appConfig.App + + defer postgres.Close() + defer redis.Close() + + v1 := app.Group("/api/v1") + SetRouter(v1) + + var port string + if val, ok := config["APP_PORT"].(string); ok { + port = val + } else { + port = "3000" + } + + err = app.Listen(":" + port) + if err != nil { + panic(err) + } +} + +func SetRouter(_router fiber.Router) { + (_router).Get("", func(c *fiber.Ctx) error { + return c.SendString("Orakl Network API") + }) + + adapter.Routes(_router) + aggregate.Routes(_router) + aggregator.Routes(_router) + apierr.Routes(_router) + chain.Routes(_router) + data.Routes(_router) + feed.Routes(_router) + l2aggregator.Routes(_router) + listener.Routes(_router) + proxy.Routes(_router) + reporter.Routes(_router) + service.Routes(_router) + vrf.Routes(_router) +} diff --git a/go-api/migrations/000001_initialize_tables.down.sql b/go-api/migrations/000001_initialize_tables.down.sql new file mode 100644 index 000000000..ba1dfa056 --- /dev/null +++ b/go-api/migrations/000001_initialize_tables.down.sql @@ -0,0 +1,25 @@ +-- Drop foreign key constraints first +ALTER TABLE IF EXISTS "feeds" DROP CONSTRAINT IF EXISTS "feeds_adapter_id_fkey"; +ALTER TABLE IF EXISTS "aggregates" DROP CONSTRAINT IF EXISTS "aggregates_aggregator_id_fkey"; +ALTER TABLE IF EXISTS "aggregators" DROP CONSTRAINT IF EXISTS "aggregators_adapter_id_fkey"; +ALTER TABLE IF EXISTS "aggregators" DROP CONSTRAINT IF EXISTS "aggregators_chain_id_fkey"; +ALTER TABLE IF EXISTS "data" DROP CONSTRAINT IF EXISTS "data_aggregator_id_fkey"; +ALTER TABLE IF EXISTS "data" DROP CONSTRAINT IF EXISTS "data_feed_id_fkey"; +ALTER TABLE IF EXISTS "listeners" DROP CONSTRAINT IF EXISTS "listeners_chain_id_fkey"; +ALTER TABLE IF EXISTS "listeners" DROP CONSTRAINT IF EXISTS "listeners_service_id_fkey"; +ALTER TABLE IF EXISTS "reporters" DROP CONSTRAINT IF EXISTS "reporters_chain_id_fkey"; +ALTER TABLE IF EXISTS "reporters" DROP CONSTRAINT IF EXISTS "reporters_service_id_fkey"; + +-- Drop tables in reverse order of creation +DROP TABLE IF EXISTS "vrf_keys"; +DROP TABLE IF EXISTS "services"; +DROP TABLE IF EXISTS "reporters"; +DROP TABLE IF EXISTS "proxies"; +DROP TABLE IF EXISTS "listeners"; +DROP TABLE IF EXISTS "feeds"; +DROP TABLE IF EXISTS "error"; +DROP TABLE IF EXISTS "data"; +DROP TABLE IF EXISTS "chains"; +DROP TABLE IF EXISTS "aggregators"; +DROP TABLE IF EXISTS "aggregates"; +DROP TABLE IF EXISTS "adapters"; diff --git a/go-api/migrations/000001_initialize_tables.up.sql b/go-api/migrations/000001_initialize_tables.up.sql new file mode 100644 index 000000000..05924f9f6 --- /dev/null +++ b/go-api/migrations/000001_initialize_tables.up.sql @@ -0,0 +1,119 @@ +CREATE TABLE IF NOT EXISTS "adapters" ( + adapter_hash TEXT NOT NULL, + adapter_id BIGSERIAL NOT NULL, + decimals INTEGER NOT NULL, + name TEXT NOT NULL, + CONSTRAINT "adapters_pkey" PRIMARY KEY ("adapter_id") +); + +CREATE TABLE IF NOT EXISTS "chains" ( + chain_id BIGSERIAL NOT NULL, + name TEXT NOT NULL, + CONSTRAINT "chains_pkey" PRIMARY KEY ("chain_id") +); + +CREATE TABLE IF NOT EXISTS "services" ( + name TEXT NOT NULL, + service_id BIGSERIAL NOT NULL, + CONSTRAINT "services_pkey" PRIMARY KEY ("service_id") +); + +CREATE TABLE IF NOT EXISTS "aggregators" ( + absolute_threshold DOUBLE PRECISION NOT NULL, + active BOOLEAN NOT NULL DEFAULT false, + adapter_id BIGINT NOT NULL, + address TEXT NOT NULL, + aggregator_hash TEXT NOT NULL, + aggregator_id BIGSERIAL NOT NULL, + chain_id BIGINT NOT NULL, + fetcher_type INTEGER NOT NULL, + heartbeat INTEGER NOT NULL, + name TEXT NOT NULL, + threshold DOUBLE PRECISION NOT NULL, + CONSTRAINT "aggregators_adapter_id_fkey" FOREIGN KEY ("adapter_id") REFERENCES "public"."adapters" ("adapter_id"), + CONSTRAINT "aggregators_chain_id_fkey" FOREIGN KEY ("chain_id") REFERENCES "public"."chains" ("chain_id"), + CONSTRAINT "aggregators_pkey" PRIMARY KEY ("aggregator_id") +); + +CREATE TABLE IF NOT EXISTS "aggregates" ( + aggregate_id BIGSERIAL NOT NULL, + aggregator_id BIGINT NOT NULL, + timestamp TIMESTAMP WITH TIME ZONE NOT NULL, + value BIGINT NOT NULL, + CONSTRAINT "aggregates_aggregator_id_fkey" FOREIGN KEY ("aggregator_id") REFERENCES "public"."aggregators" ("aggregator_id"), + CONSTRAINT "aggregates_pkey" PRIMARY KEY ("aggregate_id") +); + +CREATE TABLE IF NOT EXISTS "feeds" ( + adapter_id BIGINT NOT NULL, + definition JSONB NOT NULL, + feed_id BIGSERIAL NOT NULL, + name TEXT NOT NULL, + CONSTRAINT "feeds_adapter_id_fkey" FOREIGN KEY ("adapter_id") REFERENCES "public"."adapters" ("adapter_id"), + CONSTRAINT "feeds_pkey" PRIMARY KEY ("feed_id") +); + +CREATE TABLE IF NOT EXISTS "data" ( + aggregator_id BIGINT NOT NULL, + data_id BIGSERIAL NOT NULL, + feed_id BIGINT NOT NULL, + timestamp TIMESTAMP WITH TIME ZONE NOT NULL, + value BIGINT NOT NULL, + CONSTRAINT "data_aggregator_id_fkey" FOREIGN KEY ("aggregator_id") REFERENCES "public"."aggregators" ("aggregator_id"), + CONSTRAINT "data_pkey" PRIMARY KEY ("data_id"), + CONSTRAINT "data_feed_id_fkey" FOREIGN KEY ("feed_id") REFERENCES "public"."feeds" ("feed_id") +); + +CREATE TABLE IF NOT EXISTS "error" ( + code TEXT NOT NULL, + error_id BIGSERIAL NOT NULL, + name TEXT NOT NULL, + request_id TEXT NOT NULL, + stack TEXT NOT NULL, + timestamp TIMESTAMP WITH TIME ZONE NOT NULL, + CONSTRAINT "error_pkey" PRIMARY KEY ("error_id") +); + +CREATE TABLE IF NOT EXISTS "listeners" ( + address CHARACTER VARYING(42) NOT NULL, + chain_id BIGINT NOT NULL, + event_name CHARACTER VARYING(255) NOT NULL, + listener_id BIGSERIAL NOT NULL, + service_id BIGINT NOT NULL, + CONSTRAINT "listeners_chain_id_fkey" FOREIGN KEY ("chain_id") REFERENCES "public"."chains" ("chain_id"), + CONSTRAINT "listeners_service_id_fkey" FOREIGN KEY ("service_id") REFERENCES "public"."services" ("service_id"), + CONSTRAINT "listeners_pkey" PRIMARY KEY ("listener_id") +); + +CREATE TABLE IF NOT EXISTS "proxies" ( + host TEXT NOT NULL, + id BIGSERIAL NOT NULL, + location TEXT, + port INTEGER NOT NULL, + protocol TEXT NOT NULL, + CONSTRAINT "proxies_pkey" PRIMARY KEY ("id") +); + +CREATE TABLE IF NOT EXISTS "reporters" ( + address CHARACTER VARYING(42) NOT NULL, + chain_id BIGINT NOT NULL, + "oracleAddress" CHARACTER VARYING(42) NOT NULL, + "privateKey" CHARACTER VARYING(164) NOT NULL, + reporter_id BIGSERIAL NOT NULL, + service_id BIGINT NOT NULL, + CONSTRAINT "reporters_chain_id_fkey" FOREIGN KEY ("chain_id") REFERENCES "public"."chains" ("chain_id"), + CONSTRAINT "reporters_service_id_fkey" FOREIGN KEY ("service_id") REFERENCES "public"."services" ("service_id"), + CONSTRAINT "reporters_pkey" PRIMARY KEY ("reporter_id") +); + +CREATE TABLE IF NOT EXISTS "vrf_keys" ( + chain_id BIGINT NOT NULL, + key_hash CHARACTER VARYING(66) NOT NULL, + pk CHARACTER VARYING(130) NOT NULL, + pk_x CHARACTER VARYING(78) NOT NULL, + pk_y CHARACTER VARYING(78) NOT NULL, + sk CHARACTER VARYING(64) NOT NULL, + vrf_key_id BIGSERIAL NOT NULL, + CONSTRAINT "vrf_keys_chain_id_fkey" FOREIGN KEY ("chain_id") REFERENCES "public"."chains" ("chain_id"), + CONSTRAINT "vrf_keys_pkey" PRIMARY KEY ("vrf_key_id") +); \ No newline at end of file diff --git a/go-api/proxy/controller.go b/go-api/proxy/controller.go new file mode 100644 index 000000000..4008149e9 --- /dev/null +++ b/go-api/proxy/controller.go @@ -0,0 +1,101 @@ +package proxy + +import ( + "go-api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ProxyModel struct { + Id *utils.CustomInt64 `db:"id" json:"id"` + Protocol string `db:"protocol" json:"protocol" validate:"required"` + Host string `db:"host" json:"host" validate:"required"` + Port *utils.CustomInt32 `db:"port" json:"port" validate:"required"` + Location *string `db:"location" json:"location"` +} + +type ProxyInsertModel struct { + Protocol string `db:"protocol" json:"protocol" validate:"required"` + Host string `db:"host" json:"host" validate:"required"` + Port *utils.CustomInt32 `db:"port" json:"port" validate:"required"` + Location *string `db:"location" json:"location"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ProxyInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[ProxyModel](c, InsertProxy, map[string]any{ + "protocol": payload.Protocol, + "host": payload.Host, + "port": payload.Port, + "location": &payload.Location}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[ProxyModel](c, GetProxy, nil) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ProxyModel](c, GetProxyById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + + payload := new(ProxyInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[ProxyModel](c, UpdateProxyById, map[string]any{ + "id": id, + "protocol": payload.Protocol, + "host": payload.Host, + "port": payload.Port, + "location": &payload.Location}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ProxyModel](c, DeleteProxyById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/proxy/queries.go b/go-api/proxy/queries.go new file mode 100644 index 000000000..475df6a33 --- /dev/null +++ b/go-api/proxy/queries.go @@ -0,0 +1,20 @@ +package proxy + +const ( + InsertProxy = `INSERT INTO proxies (protocol, host, port, location) VALUES (@protocol, @host, @port, @location) RETURNING *;` + + GetProxy = `SELECT * FROM proxies ORDER BY id asc;` + + GetProxyById = `SELECT * FROM proxies WHERE id = @id;` + + UpdateProxyById = ` + UPDATE proxies + SET protocol = @protocol, host = @host, port = @port, location = @location + WHERE id = @id + RETURNING *; + ` + + DeleteProxyById = ` + DELETE FROM proxies WHERE id = @id RETURNING *; + ` +) diff --git a/go-api/proxy/route.go b/go-api/proxy/route.go new file mode 100644 index 000000000..991a3971c --- /dev/null +++ b/go-api/proxy/route.go @@ -0,0 +1,15 @@ +package proxy + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + proxy := router.Group("/proxy") + + proxy.Post("", insert) + proxy.Get("", get) + proxy.Get("/:id", getById) + proxy.Patch("/:id", updateById) + proxy.Delete("/:id", deleteById) +} diff --git a/go-api/reporter/controller.go b/go-api/reporter/controller.go new file mode 100644 index 000000000..7293a12f2 --- /dev/null +++ b/go-api/reporter/controller.go @@ -0,0 +1,248 @@ +package reporter + +import ( + "go-api/chain" + "go-api/service" + "go-api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ReporterUpdateModel struct { + Address string `db:"address" json:"address" validate:"required"` + PrivateKey string `db:"privateKey" json:"privateKey" validate:"required"` + OracleAddress string `db:"oracleAddress" json:"oracleAddress" validate:"required"` +} + +type ReporterModel struct { + ReporterId *utils.CustomInt64 `db:"reporter_id" json:"id"` + Address string `db:"address" json:"address" validate:"required"` + PrivateKey string `db:"privateKey" json:"privateKey" validate:"required"` + OracleAddress string `db:"oracleAddress" json:"oracleAddress" validate:"required"` + Service string `db:"service_name" json:"service" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +type ReporterInsertModel struct { + Address string `db:"address" json:"address" validate:"required"` + PrivateKey string `db:"privateKey" json:"privateKey" validate:"required"` + OracleAddress string `db:"oracleAddress" json:"oracleAddress" validate:"required"` + Service string `db:"service_name" json:"service" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +type ReporterSearchModel struct { + Chain string `db:"chain_name" json:"chain"` + Service string `db:"service_name" json:"service"` +} + +type ReporterSearchByOracleAddressModel struct { + OracleAddress string `db:"oracleAddress" json:"oracleAddress" validate:"required"` + Chain string `db:"chain_name" json:"chain"` + Service string `db:"service_name" json:"service"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ReporterInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + panic(err) + } + + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + panic(err) + } + + encrypted, err := utils.EncryptText(payload.PrivateKey) + if err != nil { + panic(err) + } + + result, err := utils.QueryRow[ReporterModel](c, InsertReporter, map[string]any{ + "address": payload.Address, + "privateKey": encrypted, + "oracleAddress": payload.OracleAddress, + "chain_id": chain_result.ChainId, + "service_id": service_result.ServiceId}) + if err != nil { + panic(err) + } + + result.PrivateKey = payload.PrivateKey + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + payload := new(ReporterSearchModel) + params := GetReporterQueryParams{} + + if len(c.Body()) == 0 { + results, err := utils.QueryRows[ReporterModel](c, GenerateGetReporterQuery(params), nil) + if err != nil { + panic(err) + } + for i := range results { + decrypted, err := utils.DecryptText(results[i].PrivateKey) + if err != nil { + panic(err) + } + results[i].PrivateKey = decrypted + } + + return c.JSON(results) + } + + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + if payload.Chain != "" { + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + panic(err) + } + params.ChainId = chain_result.ChainId.String() + } + + if payload.Service != "" { + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + panic(err) + } + params.ServiceId = service_result.ServiceId.String() + } + + results, err := utils.QueryRows[ReporterModel](c, GenerateGetReporterQuery(params), nil) + if err != nil { + panic(err) + } + + for i := range results { + decrypted, err := utils.DecryptText(results[i].PrivateKey) + if err != nil { + panic(err) + } + results[i].PrivateKey = decrypted + } + + return c.JSON(results) +} + +func getByOracleAddress(c *fiber.Ctx) error { + oracleAddress := c.Params("oracleAddress") + payload := new(ReporterSearchModel) + params := GetReporterQueryParams{} + + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + params.OracleAddress = oracleAddress + + if payload.Chain != "" { + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + panic(err) + } + params.ChainId = chain_result.ChainId.String() + } + + if payload.Service != "" { + service_result, err := utils.QueryRow[service.ServiceModel](c, service.GetServiceByName, map[string]any{"name": payload.Service}) + if err != nil { + panic(err) + } + params.ServiceId = service_result.ServiceId.String() + } + + results, err := utils.QueryRows[ReporterModel](c, GenerateGetReporterQuery(params), nil) + if err != nil { + panic(err) + } + + for i := range results { + decrypted, err := utils.DecryptText(results[i].PrivateKey) + if err != nil { + panic(err) + } + results[i].PrivateKey = decrypted + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ReporterModel](c, GetReporterById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + decrypted, err := utils.DecryptText(result.PrivateKey) + if err != nil { + panic(err) + } + result.PrivateKey = decrypted + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(ReporterUpdateModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + encrypted, err := utils.EncryptText(payload.PrivateKey) + if err != nil { + panic(err) + } + + result, err := utils.QueryRow[ReporterModel](c, UpdateReporterById, map[string]any{ + "id": id, + "address": payload.Address, + "privateKey": encrypted, + "oracleAddress": payload.OracleAddress}) + + if err != nil { + panic(err) + } + + result.PrivateKey = payload.PrivateKey + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ReporterModel](c, DeleteReporterById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + decrypted, err := utils.DecryptText(result.PrivateKey) + if err != nil { + panic(err) + } + result.PrivateKey = decrypted + + return c.JSON(result) +} diff --git a/go-api/reporter/queries.go b/go-api/reporter/queries.go new file mode 100644 index 000000000..4148a1969 --- /dev/null +++ b/go-api/reporter/queries.go @@ -0,0 +1,65 @@ +package reporter + +import ( + "strings" +) + +type GetReporterQueryParams struct { + ChainId string + ServiceId string + OracleAddress string +} + +const ( + InsertReporter = ` + INSERT INTO reporters (address, "privateKey", "oracleAddress", chain_id, service_id) + VALUES (@address, @privateKey, @oracleAddress, @chain_id, @service_id) + RETURNING reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", + (SELECT name FROM chains WHERE chains.chain_id = reporters.chain_id) AS chain_name, + (SELECT name FROM services WHERE services.service_id = reporters.service_id) AS service_name; + ` + + GetReporterById = ` + SELECT reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", chains.name AS chain_name, services.name AS service_name + FROM reporters + JOIN chains ON reporters.chain_id = chains.chain_id + JOIN services ON reporters.service_id = services.service_id + WHERE reporter_id = @id LIMIT 1;` + + UpdateReporterById = ` + UPDATE reporters + SET address = @address, "privateKey" = @privateKey, "oracleAddress" = @oracleAddress + WHERE reporter_id = @id + RETURNING reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", + (SELECT name FROM chains WHERE chains.chain_id = reporters.chain_id) AS chain_name, + (SELECT name FROM services WHERE services.service_id = reporters.service_id) AS service_name; + ` + + DeleteReporterById = `DELETE FROM reporters WHERE reporter_id = @id RETURNING reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", + (SELECT name FROM chains WHERE chains.chain_id = reporters.chain_id) AS chain_name, + (SELECT name FROM services WHERE services.service_id = reporters.service_id) AS service_name;` +) + +func GenerateGetReporterQuery(params GetReporterQueryParams) string { + baseQuery := ` + SELECT reporters.reporter_id, reporters.address, "reporters"."privateKey", "reporters"."oracleAddress", chains.name AS chain_name, services.name AS service_name + FROM reporters + JOIN chains ON reporters.chain_id = chains.chain_id + JOIN services ON reporters.service_id = services.service_id + ` + var conditionQueries []string + if params.ChainId != "" { + conditionQueries = append(conditionQueries, "reporters.chain_id = "+params.ChainId) + } + if params.ServiceId != "" { + conditionQueries = append(conditionQueries, "reporters.service_id = "+params.ServiceId) + } + if params.OracleAddress != "" { + conditionQueries = append(conditionQueries, "\"reporters\".\"oracleAddress\" = '"+params.OracleAddress+"'") + } + if len(conditionQueries) == 0 { + return baseQuery + } + joinedString := strings.Join(conditionQueries, " AND ") + return baseQuery + " WHERE " + joinedString +} diff --git a/go-api/reporter/route.go b/go-api/reporter/route.go new file mode 100644 index 000000000..961c54db3 --- /dev/null +++ b/go-api/reporter/route.go @@ -0,0 +1,16 @@ +package reporter + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + reporter := router.Group("/reporter") + + reporter.Post("", insert) + reporter.Get("", get) + reporter.Get("/oracle-address/:oracleAddress", getByOracleAddress) + reporter.Get("/:id", getById) + reporter.Patch("/:id", updateById) + reporter.Delete("/:id", deleteById) +} diff --git a/go-api/scripts/consistency_test.go b/go-api/scripts/consistency_test.go new file mode 100644 index 000000000..4d2cdeb7b --- /dev/null +++ b/go-api/scripts/consistency_test.go @@ -0,0 +1,481 @@ +package scripts + +import ( + "encoding/json" + "go-api/adapter" + "go-api/aggregate" + "go-api/aggregator" + "go-api/apierr" + "go-api/chain" + "go-api/data" + "go-api/feed" + "go-api/listener" + "go-api/proxy" + "go-api/reporter" + "go-api/service" + "go-api/utils" + "go-api/vrf" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +// assumes that both node server and go server is both up in local env +// http://127.0.0.1:3000/api/v1/ +const ( + NODE_PORT = "3000" + GO_PORT = "3111" + NODE_URL = "http://127.0.0.1:" + NODE_PORT + "/api/v1" + GO_URL = "http://127.0.0.1:" + GO_PORT + "/api/v1" +) + +type AdapterInsertModel struct { + _AdapterInsertModel + FEEDS []feed.FeedInsertModel `json:"feeds"` +} + +type _AdapterInsertModel struct { + ADAPTER_HASH string `db:"adapter_hash" json:"adapterHash" validate:"required"` + NAME string `db:"name" json:"name" validate:"required"` + DECIMALS int `db:"decimals" json:"decimals" validate:"required"` +} + +var insertedService service.ServiceModel +var insertedChain chain.ChainModel +var insertedAdapter adapter.AdapterModel +var insertedAggregator aggregator.AggregatorResultModel +var insertedAggregate aggregate.AggregateModel +var insertedFeeds []feed.FeedModel +var insertedError apierr.ErrorModel +var insertedListener listener.ListenerModel +var insertedProxy proxy.ProxyModel +var insertedReporter reporter.ReporterModel +var insertedVrf vrf.VrfModel +var insertedData data.DataResultModel + +func TestConsistency(t *testing.T) { + beforeAll() + + TestVrfConsistency(t) + TestServiceConsistency(t) + TestReporterConsistency(t) + TestProxyConsistency(t) + TestListenerConsistency(t) + TestFeedConsistency(t) + TestDataConsistency(t) + TestChainConsistency(t) + TestApierrConsistency(t) + TestAggregatorConsistency(t) + TestAggregateConsistency(t) + TestAdapterConsistency(t) + + defer cleanup() +} + +func TestVrfConsistency(t *testing.T) { + + readAllFromNodeApi, _ := utils.UrlRequest[[]vrf.VrfModel](NODE_URL+"/vrf", "GET", map[string]any{"chain": insertedChain.Name}) + readAllFromGoApi, _ := utils.UrlRequest[[]vrf.VrfModel](GO_URL+"/vrf", "GET", map[string]any{"chain": insertedChain.Name}) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[vrf.VrfModel](NODE_URL+"/vrf/"+insertedVrf.VrfKeyId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[vrf.VrfModel](GO_URL+"/vrf/"+insertedVrf.VrfKeyId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestServiceConsistency(t *testing.T) { + + readAllFromNodeApi, _ := utils.UrlRequest[[]service.ServiceModel](NODE_URL+"/service", "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]service.ServiceModel](GO_URL+"/service", "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[service.ServiceModel](NODE_URL+"/service/"+insertedService.ServiceId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[service.ServiceModel](GO_URL+"/service/"+insertedService.ServiceId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestReporterConsistency(t *testing.T) { + + readAllFromNodeApi, _ := utils.UrlRequest[[]reporter.ReporterModel](NODE_URL+"/reporter", "GET", map[string]any{"chain": insertedChain.Name, "service": insertedService.Name}) + readAllFromGoApi, _ := utils.UrlRequest[[]reporter.ReporterModel](GO_URL+"/reporter", "GET", map[string]any{"chain": insertedChain.Name, "service": insertedService.Name}) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[reporter.ReporterModel](NODE_URL+"/reporter/"+insertedReporter.ReporterId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[reporter.ReporterModel](GO_URL+"/reporter/"+insertedReporter.ReporterId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestProxyConsistency(t *testing.T) { + + readAllFromNodeApi, _ := utils.UrlRequest[[]proxy.ProxyModel](NODE_URL+"/proxy", "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]proxy.ProxyModel](GO_URL+"/proxy", "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[proxy.ProxyModel](NODE_URL+"/proxy/"+insertedProxy.Id.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[proxy.ProxyModel](GO_URL+"/proxy/"+insertedProxy.Id.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestListenerConsistency(t *testing.T) { + + readAllFromNodeApi, _ := utils.UrlRequest[[]listener.ListenerModel](NODE_URL+"/listener", "GET", map[string]any{"chain": insertedChain.Name, "service": insertedService.Name}) + readAllFromGoApi, _ := utils.UrlRequest[[]listener.ListenerModel](GO_URL+"/listener", "GET", map[string]any{"chain": insertedChain.Name, "service": insertedService.Name}) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[listener.ListenerModel](NODE_URL+"/listener/"+insertedListener.ListenerId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[listener.ListenerModel](GO_URL+"/listener/"+insertedListener.ListenerId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestFeedConsistency(t *testing.T) { + + readAllFromNodeApi, _ := utils.UrlRequest[[]feed.FeedModel](NODE_URL+"/feed", "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]feed.FeedModel](GO_URL+"/feed", "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[feed.FeedModel](NODE_URL+"/feed/"+insertedFeeds[0].FeedId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[feed.FeedModel](GO_URL+"/feed/"+insertedFeeds[0].FeedId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestDataConsistency(t *testing.T) { + + readAllFromNodeApi, _ := utils.UrlRequest[[]data.DataResultModel](NODE_URL+"/data", "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]data.DataResultModel](GO_URL+"/data", "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[data.DataResultModel](NODE_URL+"/data/"+insertedData.DataId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[data.DataResultModel](GO_URL+"/data/"+insertedData.DataId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestChainConsistency(t *testing.T) { + + readAllFromNodeApi, _ := utils.UrlRequest[[]chain.ChainModel](NODE_URL+"/chain", "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]chain.ChainModel](GO_URL+"/chain", "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[chain.ChainModel](NODE_URL+"/chain/"+insertedChain.ChainId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[chain.ChainModel](GO_URL+"/chain/"+insertedChain.ChainId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestApierrConsistency(t *testing.T) { + readAllFromNodeApi, _ := utils.UrlRequest[[]apierr.ErrorModel](NODE_URL+"/error", "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]apierr.ErrorModel](GO_URL+"/error", "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[apierr.ErrorModel](NODE_URL+"/error/"+insertedError.ERROR_ID.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[apierr.ErrorModel](GO_URL+"/error/"+insertedError.ERROR_ID.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestAggregatorConsistency(t *testing.T) { + readAllFromNodeApi, _ := utils.UrlRequest[[]aggregator.AggregatorResultModel](NODE_URL+"/aggregator?chain="+insertedChain.Name, "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]aggregator.AggregatorResultModel](GO_URL+"/aggregator?chain="+insertedChain.Name, "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[aggregator.AggregatorResultModel](NODE_URL+"/aggregator/"+insertedAggregator.AggregatorHash+"/"+insertedChain.Name, "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[aggregator.AggregatorResultModel](GO_URL+"/aggregator/"+insertedAggregator.AggregatorHash+"/"+insertedChain.Name, "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestAggregateConsistency(t *testing.T) { + readAllFromNodeApi, _ := utils.UrlRequest[[]aggregate.AggregateModel](NODE_URL+"/aggregate", "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]aggregate.AggregateModel](GO_URL+"/aggregate", "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[aggregate.AggregateModel](NODE_URL+"/aggregate/"+insertedAggregate.AggregateId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[aggregate.AggregateModel](GO_URL+"/aggregate/"+insertedAggregate.AggregateId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +func TestAdapterConsistency(t *testing.T) { + readAllFromNodeApi, _ := utils.UrlRequest[[]adapter.AdapterModel](NODE_URL+"/adapter", "GET", nil) + readAllFromGoApi, _ := utils.UrlRequest[[]adapter.AdapterModel](GO_URL+"/adapter", "GET", nil) + assert.EqualValues(t, readAllFromNodeApi, readAllFromGoApi) + + readSingleFromNodeApi, _ := utils.UrlRequest[adapter.AdapterModel](NODE_URL+"/adapter/"+insertedAdapter.AdapterId.String(), "GET", nil) + readSingleFromGoApi, _ := utils.UrlRequest[adapter.AdapterModel](GO_URL+"/adapter/"+insertedAdapter.AdapterId.String(), "GET", nil) + assert.EqualValues(t, readSingleFromNodeApi, readSingleFromGoApi) +} + +// insert all data to read before test +func beforeAll() { + var err error + // insert service + serviceInsertData := service.ServiceInsertModel{Name: "test-service"} + insertedService, err = utils.UrlRequest[service.ServiceModel](GO_URL+"/service", "POST", serviceInsertData) + if err != nil { + panic("failed to insert service") + } + + // insert chain + chainInsertData := chain.ChainInsertModel{Name: "test-chain"} + insertedChain, err = utils.UrlRequest[chain.ChainModel](GO_URL+"/chain", "POST", chainInsertData) + if err != nil { + panic("failed to insert chain") + } + + // insert adapter & feed + _adapterInsertData := _AdapterInsertModel{ + ADAPTER_HASH: "0xbb555a249d01133784fa04c608ce03c129f73f2a1ef7473d0cfffdc4bcba794e", + NAME: "BTC-USD", + DECIMALS: 8, + } + + adapterInsertData := AdapterInsertModel{ + _AdapterInsertModel: _adapterInsertData, + FEEDS: []feed.FeedInsertModel{ + { + Name: "Binance-BTC-USD-adapter", + Definition: json.RawMessage(`{ + "url": "https://api.binance.us/api/v3/ticker/price?symbol=BTCUSD", + "headers": { + "Content-Type": "application/json" + }, + "method": "GET", + "reducers": [ + { + "function": "PARSE", + "args": [ + "price" + ] + }, + { + "function": "POW10", + "args": 8 + }, + { + "function": "ROUND" + } + ] + }`), + }, + }, + } + insertedAdapter, err = utils.UrlRequest[adapter.AdapterModel](GO_URL+"/adapter", "POST", adapterInsertData) + if err != nil { + panic("failed to insert adapter") + } + insertedFeeds, err = utils.UrlRequest[[]feed.FeedModel](GO_URL+"/feed/adapter/"+insertedAdapter.AdapterId.String(), "GET", nil) + if err != nil { + panic("failed to get inserted feeds") + } + + // insert aggregator + // aggregatorInsertData := aggregator.AggregatorInsertModel{ + // AGGREGATOR_HASH: "0x9ca45583d7b9b061d9e8a20d6a874fcfba50c7a9dbc9c65c3792b4ef0b31e7b9", + // ACTIVE: false, + // NAME: "BTC-USD", + // ADDRESS: "0x222", + // HEARTBEAT: 10000, + // THRESHOLD: 0.04, + // ABSOLUTE_THRESHOLD: 0.1, + // ADAPTER_HASH: insertedAdapter.ADAPTER_HASH, + // CHAIN: insertedChain.NAME, + // FETCHER_TYPE: 0, + // } + aggregatorInsertData := map[string]interface{}{ + "aggregatorHash": "0x9ca45583d7b9b061d9e8a20d6a874fcfba50c7a9dbc9c65c3792b4ef0b31e7b9", + "active": false, + "name": "BTC-USD", + "address": "0x222", + "heartbeat": 10000, + "threshold": 0.04, + "absoluteThreshold": 0.1, + "adapterHash": _adapterInsertData.ADAPTER_HASH, + "chain": insertedChain.Name, + "fetcherType": 0, + } + + insertedAggregator, err = utils.UrlRequest[aggregator.AggregatorResultModel](GO_URL+"/aggregator", "POST", aggregatorInsertData) + if err != nil { + panic("failed to insert aggregator") + } + + // insert error + errInsertData := apierr.ErrorInsertModel{ + RequestId: "66649924661314489704239946349158829048302840686075232939396730072454733114998", + Timestamp: &utils.CustomDateTime{Time: time.Now()}, + Code: "10020", + Name: "MissingKeyInJson", + Stack: `MissingKeyInJson + at wrapper (file:///app/dist/worker/reducer.js:19:23) + at file:///app/dist/utils.js:11:61 + at Array.reduce () + at file:///app/dist/utils.js:11:44 + at processRequest (file:///app/dist/worker/request-response.js:58:34) + at process.processTicksAndRejections (node:internal/process/task_queues:95:5) + at async Worker.wrapper [as processFn] (file:///app/dist/worker/request-response.js:27:25) + at async Worker.processJob (/app/node_modules/bullmq/dist/cjs/classes/worker.js:339:28) + at async Worker.retryIfFailed (/app/node_modules/bullmq/dist/cjs/classes/worker.js:513:24)`, + } + insertedError, err = utils.UrlRequest[apierr.ErrorModel](GO_URL+"/error", "POST", errInsertData) + if err != nil { + panic("failed to insert error") + } + + // insert data + // dataInsertData := data.BulkInsertModel{ + // DATA: []data.DataInsertModel{ + // { + // AGGREGATOR_ID: insertedAggregator.AGGREGATOR_ID, + // TIMESTAMP: &utils.CustomDateTime{Time: time.Now()}, + // VALUE: 2241772466578, + // FEED_ID: insertedFeeds[0].FEED_ID, + // }, + // }, + // } + + dataInsertData := map[string]interface{}{ + "data": []map[string]interface{}{ + { + "aggregatorId": insertedAggregator.AggregatorId, + "timestamp": time.Now(), + "value": 2241772466578, + "feedId": insertedFeeds[0].FeedId, + }, + }, + } + _, err = utils.UrlRequest[struct { + COUNT int `json:"count"` + }](GO_URL+"/data", "POST", dataInsertData) + if err != nil { + panic("failed to insert data") + } + insertedDataList, err := utils.UrlRequest[[]data.DataResultModel](GO_URL+"/data/feed/"+insertedFeeds[0].FeedId.String(), "GET", nil) + if err != nil { + panic("failed to read inserted data list") + } + + insertedData = insertedDataList[0] + + // insert aggregate + insertValue := utils.CustomInt64(10) + aggregateInsertData := aggregate.AggregateInsertModel{ + Timestamp: &utils.CustomDateTime{Time: time.Now()}, + AggregatorId: insertedAggregator.AggregatorId, + Value: &insertValue, + } + wrappedAggregateInsertData := aggregate.WrappedInsertModel{Data: aggregateInsertData} + insertedAggregate, err = utils.UrlRequest[aggregate.AggregateModel](GO_URL+"/aggregate", "POST", wrappedAggregateInsertData) + if err != nil { + panic("failed to insert aggregate") + } + + // insert listener + listenerInsertData := listener.ListenerInsertModel{ + Address: "0xa", + EventName: "new_round(uint, uint80)", + Chain: insertedChain.Name, + Service: insertedService.Name, + } + insertedListener, err = utils.UrlRequest[listener.ListenerModel](GO_URL+"/listener", "POST", listenerInsertData) + if err != nil { + panic("failed to insert listener") + } + + var portNumber = utils.CustomInt32(5000) + // insert proxy + proxyInsertData := proxy.ProxyInsertModel{ + Protocol: "http", + Host: "127.0.0.1", + Port: &portNumber, + } + insertedProxy, err = utils.UrlRequest[proxy.ProxyModel](GO_URL+"/proxy", "POST", proxyInsertData) + if err != nil { + panic("failed to insert proxy") + } + + // insert reporter + reporterInsertData := reporter.ReporterInsertModel{ + Address: "0xa", + PrivateKey: "0xb", + OracleAddress: "0xc", + Chain: insertedChain.Name, + Service: insertedService.Name, + } + insertedReporter, err = utils.UrlRequest[reporter.ReporterModel](GO_URL+"/reporter", "POST", reporterInsertData) + if err != nil { + panic("failed to insert reporter") + } + + // insert vrf + vrfInsertData := vrf.VrfInsertModel{ + Sk: "ebeb5229570725793797e30a426d7ef8aca79d38ff330d7d1f28485d2366de32", + Pk: "045b8175cfb6e7d479682a50b19241671906f706bd71e30d7e80fd5ff522c41bf0588735865a5faa121c3801b0b0581440bdde24b03dc4c4541df9555d15223e82", + PkX: "41389205596727393921445837404963099032198113370266717620546075917307049417712", + PkY: "40042424443779217635966540867474786311411229770852010943594459290130507251330", + KeyHash: "0x6f32373625e3d1f8f303196cbb78020ac2503acd1129e44b36b425781a9664ac", + Chain: insertedChain.Name, + } + insertedVrf, err = utils.UrlRequest[vrf.VrfModel](GO_URL+"/vrf", "POST", vrfInsertData) + if err != nil { + panic("failed to insert data") + } +} + +// remove all inserted data +func cleanup() { + _, err := utils.UrlRequest[vrf.VrfModel](GO_URL+"/vrf/"+insertedVrf.VrfKeyId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete vrf") + } + + _, err = utils.UrlRequest[reporter.ReporterModel](GO_URL+"/reporter/"+insertedReporter.ReporterId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete reporter") + } + + _, err = utils.UrlRequest[proxy.ProxyModel](GO_URL+"/proxy/"+insertedProxy.Id.String(), "DELETE", nil) + if err != nil { + panic("failed to delete proxy") + } + + _, err = utils.UrlRequest[listener.ListenerModel](GO_URL+"/listener/"+insertedListener.ListenerId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete proxy") + } + + _, err = utils.UrlRequest[aggregate.AggregateModel](GO_URL+"/aggregate/"+insertedAggregate.AggregateId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete aggregate") + } + + _, err = utils.UrlRequest[data.DataResultModel](GO_URL+"/data/"+insertedData.DataId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete data") + } + + _, err = utils.UrlRequest[apierr.ErrorModel](GO_URL+"/error/"+insertedError.ERROR_ID.String(), "DELETE", nil) + if err != nil { + panic("failed to delete error") + } + + _, err = utils.UrlRequest[aggregator.AggregatorResultModel](GO_URL+"/aggregator/"+insertedAggregator.AggregatorId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete aggregator") + } + + _, err = utils.UrlRequest[feed.FeedModel](GO_URL+"/feed/"+insertedFeeds[0].FeedId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete feed") + } + + _, err = utils.UrlRequest[adapter.AdapterModel](GO_URL+"/adapter/"+insertedAdapter.AdapterId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete adapter") + } + + _, err = utils.UrlRequest[chain.ChainModel](GO_URL+"/chain/"+insertedChain.ChainId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete chain") + } + + _, err = utils.UrlRequest[service.ServiceModel](GO_URL+"/service/"+insertedService.ServiceId.String(), "DELETE", nil) + if err != nil { + panic("failed to delete service") + } +} diff --git a/go-api/service/controller.go b/go-api/service/controller.go new file mode 100644 index 000000000..37e266844 --- /dev/null +++ b/go-api/service/controller.go @@ -0,0 +1,78 @@ +package service + +import ( + "go-api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type ServiceModel struct { + ServiceId *utils.CustomInt64 `db:"service_id" json:"id"` + Name string `db:"name" json:"name" validate:"required"` +} + +type ServiceInsertModel struct { + Name string `db:"name" json:"name" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(ServiceInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[ServiceModel](c, InsertService, map[string]any{"name": payload.Name}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + results, err := utils.QueryRows[ServiceModel](c, GetService, nil) + if err != nil { + panic(err) + } + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ServiceModel](c, GetServiceById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(ServiceInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[ServiceModel](c, UpdateServiceById, map[string]any{"id": id, "name": payload.Name}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[ServiceModel](c, DeleteServiceById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/service/queries.go b/go-api/service/queries.go new file mode 100644 index 000000000..ea8e962f1 --- /dev/null +++ b/go-api/service/queries.go @@ -0,0 +1,27 @@ +package service + +const ( + InsertService = ` + INSERT INTO services (name) VALUES (@name) RETURNING *; + ` + + GetService = ` + SELECT * FROM services; + ` + + GetServiceById = ` + SELECT * FROM services WHERE service_id = @id LIMIT 1; + ` + + GetServiceByName = ` + SELECT * FROM services WHERE name = @name LIMIT 1; + ` + + UpdateServiceById = ` + UPDATE services SET name = @name WHERE service_id = @id RETURNING *; + ` + + DeleteServiceById = ` + DELETE FROM services WHERE service_id = @id RETURNING *; + ` +) diff --git a/go-api/service/route.go b/go-api/service/route.go new file mode 100644 index 000000000..7e9fef712 --- /dev/null +++ b/go-api/service/route.go @@ -0,0 +1,15 @@ +package service + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + service := router.Group("/service") + + service.Post("", insert) + service.Get("", get) + service.Get("/:id", getById) + service.Patch("/:id", updateById) + service.Delete("/:id", deleteById) +} diff --git a/go-api/tests/adapter_test.go b/go-api/tests/adapter_test.go new file mode 100644 index 000000000..0c4d13997 --- /dev/null +++ b/go-api/tests/adapter_test.go @@ -0,0 +1,113 @@ +package tests + +import ( + "encoding/json" + "fmt" + "go-api/adapter" + "go-api/feed" + "go-api/utils" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestAdapter(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + _adapterInsertData := _AdapterInsertModel{ + AdapterHash: "0xbb555a249d01133784fa04c608ce03c129f73f2a1ef7473d0cfffdc4bcba794e", + Name: "BTC-USD", + Decimals: 8, + } + + adapterInsertData := AdapterInsertModel{ + _AdapterInsertModel: _adapterInsertData, + Feeds: []feed.FeedInsertModel{ + { + Name: "Binance-BTC-USD-adapter", + Definition: json.RawMessage(`{ + "url": "https://api.binance.us/api/v3/ticker/price?symbol=BTCUSD", + "headers": { + "Content-Type": "application/json" + }, + "method": "GET", + "reducers": [ + { + "function": "PARSE", + "args": [ + "price" + ] + }, + { + "function": "POW10", + "args": 8 + }, + { + "function": "ROUND" + } + ] + }`), + }, + }, + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + adapter.Routes(v1) + feed.Routes(v1) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]adapter.AdapterModel](app, "/api/v1/adapter", nil) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert + adapterInsertResult, err := utils.PostRequest[adapter.AdapterModel](app, "/api/v1/adapter", adapterInsertData) + assert.Nil(t, err) + + // read all after insertion + readAllAfter, err := utils.GetRequest[[]adapter.AdapterModel](app, "/api/v1/adapter", nil) + assert.Nil(t, err) + totalAfter := len(readAllAfter) + assert.Less(t, totalBefore, totalAfter) + + // get single + singleReadResult, err := utils.GetRequest[adapter.AdapterModel](app, "/api/v1/adapter/"+adapterInsertResult.AdapterId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, adapterInsertResult, singleReadResult, "should read single element") + + // hash + hashTestInsertData := adapterInsertData + hashTestInsertData.AdapterHash = "" + hashResult, err := utils.PostRequest[adapter.AdapterInsertModel](app, "/api/v1/adapter/hash?verify=false", hashTestInsertData) + assert.Nil(t, err) + assert.Equal(t, adapterInsertData.AdapterHash, hashResult.AdapterHash, "hash should be same") + + // delete by id + feeds, _ := utils.GetRequest[[]feed.FeedModel](app, "/api/v1/feed", nil) + + for _, f := range feeds { + _, err = utils.DeleteRequest[feed.FeedModel](app, "/api/v1/feed/"+f.FeedId.String(), nil) + assert.Nil(t, err) + } + + deleteResult, err := utils.DeleteRequest[adapter.AdapterModel](app, "/api/v1/adapter/"+adapterInsertResult.AdapterId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, deleteResult, "should be deleted") + + readAllResultAfterDeletion, err := utils.GetRequest[[]adapter.AdapterModel](app, "/api/v1/adapter", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) + +} diff --git a/go-api/tests/aggregate_test.go b/go-api/tests/aggregate_test.go new file mode 100644 index 000000000..0d2e9435f --- /dev/null +++ b/go-api/tests/aggregate_test.go @@ -0,0 +1,250 @@ +package tests + +import ( + "encoding/json" + "fmt" + "go-api/adapter" + "go-api/aggregate" + "go-api/aggregator" + "go-api/chain" + "go-api/feed" + "go-api/utils" + "testing" + "time" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +/* +sample data + +POST localhost:3111/api/v1/chain + +{ + "name":"aggregate-test-chain" +} + +POST localhost:3111/api/v1/adapter + +{ + "adapterHash": "0xbb555a249d01133784fa04c608ce03c129f73f2a1ef7473d0cfffdc4bcba794e", + "name": "BTC-USD", + "decimals": 8, + "feeds": [ + { + "name": "Binance-BTC-USD-adapter", + "definition": { + "url": "https://api.binance.us/api/v3/ticker/price?symbol=BTCUSD", + "headers": { + "Content-Type": "application/json" + }, + "method": "GET", + "reducers": [ + { + "function": "PARSE", + "args": [ + "price" + ] + }, + { + "function": "POW10", + "args": 8 + }, + { + "function": "ROUND" + } + ] + } + } + ] +} + +POST localhost:3111/api/v1/aggregator + +{ + "aggregatorHash": "0x9ca45583d7b9b061d9e8a20d6a874fcfba50c7a9dbc9c65c3792b4ef0b31e7b9", + "active": false, + "name": "BTC-USD", + "address": "0x222", + "heartbeat": 10000, + "threshold": 0.04, + "absoluteThreshold": 0.1, + "adapterHash": "0xbb555a249d01133784fa04c608ce03c129f73f2a1ef7473d0cfffdc4bcba794e", + "chain": "aggregate-test-chain" +} +*/ + +func TestAggregate(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + insertChain := chain.ChainInsertModel{Name: "aggregate-test-chain"} + + _adapterInsertData := _AdapterInsertModel{ + AdapterHash: "0xbb555a249d01133784fa04c608ce03c129f73f2a1ef7473d0cfffdc4bcba794e", + Name: "BTC-USD", + Decimals: 8, + } + + adapterInsertData := AdapterInsertModel{ + _AdapterInsertModel: _adapterInsertData, + Feeds: []feed.FeedInsertModel{ + { + Name: "Binance-BTC-USD-adapter", + Definition: json.RawMessage(`{ + "url": "https://api.binance.us/api/v3/ticker/price?symbol=BTCUSD", + "headers": { + "Content-Type": "application/json" + }, + "method": "GET", + "reducers": [ + { + "function": "PARSE", + "args": [ + "price" + ] + }, + { + "function": "POW10", + "args": 8 + }, + { + "function": "ROUND" + } + ] + }`), + }, + }, + } + + // aggregatorInsertData := aggregator.AggregatorInsertModel{ + // AGGREGATOR_HASH: "0x9ca45583d7b9b061d9e8a20d6a874fcfba50c7a9dbc9c65c3792b4ef0b31e7b9", + // ACTIVE: false, + // NAME: "BTC-USD", + // ADDRESS: "0x222", + // HEARTBEAT: 10000, + // THRESHOLD: 0.04, + // ABSOLUTE_THRESHOLD: 0.1, + // ADAPTER_HASH: _adapterInsertData.ADAPTER_HASH, + // CHAIN: insertChain.NAME, + // FETCHER_TYPE: 0, + // } + + aggregatorInsertData := map[string]interface{}{ + "aggregatorHash": "0x9ca45583d7b9b061d9e8a20d6a874fcfba50c7a9dbc9c65c3792b4ef0b31e7b9", + "active": false, + "name": "BTC-USD", + "address": "0x222", + "heartbeat": 10000, + "threshold": 0.04, + "absoluteThreshold": 0.1, + "adapterHash": _adapterInsertData.AdapterHash, + "chain": insertChain.Name, + "fetcherType": 0, + } + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + + chain.Routes(v1) + adapter.Routes(v1) + aggregator.Routes(v1) + aggregate.Routes(v1) + feed.Routes(v1) + + // insert chain, adapter, and aggregator before test + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + + adapterInsertResult, err := utils.PostRequest[adapter.AdapterModel](app, "/api/v1/adapter", adapterInsertData) + assert.Nil(t, err) + + aggregatorInsertResult, err := utils.PostRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator", aggregatorInsertData) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]aggregate.AggregateModel](app, "/api/v1/aggregate", nil) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + now := time.Now().Truncate(time.Second) + + // insert + insertValue := utils.CustomInt64(10) + aggregateInsertData := aggregate.AggregateInsertModel{ + Timestamp: &utils.CustomDateTime{Time: now}, + AggregatorId: aggregatorInsertResult.AggregatorId, + Value: &insertValue} + wrappedAggregateInsertData := aggregate.WrappedInsertModel{Data: aggregateInsertData} + + aggregateInsertResult, err := utils.PostRequest[aggregate.AggregateModel](app, "/api/v1/aggregate", wrappedAggregateInsertData) + assert.Nil(t, err) + + // read all after insertion + readAllAfter, err := utils.GetRequest[[]aggregate.AggregateModel](app, "/api/v1/aggregate", nil) + assert.Nil(t, err) + totalAfter := len(readAllAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[aggregate.AggregateModel](app, "/api/v1/aggregate/"+aggregateInsertResult.AggregateId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, aggregateInsertResult, singleReadResult, "should get single element") + + // read latest by hash + latestByHashResult, err := utils.GetRequest[aggregate.AggregateModel](app, "/api/v1/aggregate/hash/"+aggregatorInsertData["aggregatorHash"].(string)+"/latest", nil) + assert.Nil(t, err) + assert.Equalf(t, aggregateInsertResult, latestByHashResult, "should get latest by hash") + + // read latest by aggregatorId + latestByAggregatorIdResult, err := utils.GetRequest[aggregate.AggregateRedisValueModel](app, "/api/v1/aggregate/id/"+aggregatorInsertResult.AggregatorId.String()+"/latest", nil) + assert.Nil(t, err) + fmt.Println(latestByAggregatorIdResult.Timestamp) + fmt.Println(latestByAggregatorIdResult.Value) + mtmp, _ := json.Marshal(latestByAggregatorIdResult) + fmt.Println(string(mtmp)) + assert.Equalf(t, aggregateInsertResult.Timestamp, latestByAggregatorIdResult.Timestamp, "should get latest by aggregatorId") + assert.Equalf(t, aggregateInsertResult.Value, latestByAggregatorIdResult.Value, "should get latest by aggregatorId") + + // should update by id + updateValue := utils.CustomInt64(20) + aggregateUpdateData := aggregateInsertData + aggregateUpdateData.Value = &updateValue + updateResult, err := utils.PatchRequest[aggregate.AggregateModel](app, "/api/v1/aggregate/"+aggregateInsertResult.AggregateId.String(), aggregateUpdateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[aggregate.AggregateModel](app, "/api/v1/aggregate/"+aggregateInsertResult.AggregateId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, updateResult, singleReadResult, "should get single element") + + // should delete by id + deleteResult, err := utils.DeleteRequest[aggregate.AggregateModel](app, "/api/v1/aggregate/"+aggregateInsertResult.AggregateId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, updateResult, deleteResult, "should be deleted") + readAllAfterDeletion, err := utils.GetRequest[[]aggregate.AggregateModel](app, "/api/v1/aggregate", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllAfterDeletion), totalAfter) + + // clean up + feeds, _ := utils.GetRequest[[]feed.FeedModel](app, "/api/v1/feed", nil) + for _, f := range feeds { + _, err = utils.DeleteRequest[feed.FeedModel](app, "/api/v1/feed/"+f.FeedId.String(), nil) + assert.Nil(t, err) + } + + _, err = utils.DeleteRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator/"+aggregatorInsertResult.AggregatorId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[adapter.AdapterModel](app, "/api/v1/adapter/"+adapterInsertResult.AdapterId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) + +} diff --git a/go-api/tests/aggregator_test.go b/go-api/tests/aggregator_test.go new file mode 100644 index 000000000..ff47b1ac1 --- /dev/null +++ b/go-api/tests/aggregator_test.go @@ -0,0 +1,160 @@ +package tests + +import ( + "encoding/json" + "fmt" + "go-api/adapter" + "go-api/aggregator" + "go-api/chain" + "go-api/feed" + "go-api/utils" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestAggregator(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + insertChain := chain.ChainInsertModel{Name: "aggregator-test-chain"} + + _adapterInsertData := _AdapterInsertModel{ + AdapterHash: "0xbb555a249d01133784fa04c608ce03c129f73f2a1ef7473d0cfffdc4bcba794e", + Name: "BTC-USD", + Decimals: 8, + } + + adapterInsertData := AdapterInsertModel{ + _AdapterInsertModel: _adapterInsertData, + Feeds: []feed.FeedInsertModel{ + { + Name: "Binance-BTC-USD-adapter", + Definition: json.RawMessage(`{ + "url": "https://api.binance.us/api/v3/ticker/price?symbol=BTCUSD", + "headers": { + "Content-Type": "application/json" + }, + "method": "GET", + "reducers": [ + { + "function": "PARSE", + "args": [ + "price" + ] + }, + { + "function": "POW10", + "args": 8 + }, + { + "function": "ROUND" + } + ] + }`), + }, + }, + } + + aggregatorInsertData := map[string]interface{}{ + "aggregatorHash": "0x9ca45583d7b9b061d9e8a20d6a874fcfba50c7a9dbc9c65c3792b4ef0b31e7b9", + "active": false, + "name": "BTC-USD", + "address": "0x222", + "heartbeat": 10000, + "threshold": 0.04, + "absoluteThreshold": 0.1, + "adapterHash": _adapterInsertData.AdapterHash, + "chain": insertChain.Name, + "fetcherType": 0, + } + + customTrue := utils.CustomBool(true) + aggregatorUpdateData := aggregator.WrappedUpdateModel{ + Data: aggregator.AggregatorUpdateModel{ + Active: &customTrue, + Chain: insertChain.Name, + }, + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + + v1 := app.Group("/api/v1") + + chain.Routes(v1) + adapter.Routes(v1) + aggregator.Routes(v1) + feed.Routes(v1) + + // insert chain, adapter (setup) + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + + adapterInsertResult, err := utils.PostRequest[adapter.AdapterModel](app, "/api/v1/adapter", adapterInsertData) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]aggregator.AggregatorResultModel](app, "/api/v1/aggregator?chain="+insertChain.Name, nil) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert + aggregatorInsertResult, err := utils.PostRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator", aggregatorInsertData) + assert.Nil(t, err) + + // read all after insertion + readAllAfter, err := utils.GetRequest[[]aggregator.AggregatorResultModel](app, "/api/v1/aggregator?chain="+insertChain.Name, nil) + assert.Nil(t, err) + totalAfter := len(readAllAfter) + assert.Less(t, totalBefore, totalAfter) + + // read by hash and chain + singleReadResult, err := utils.GetRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator/"+aggregatorInsertData["aggregatorHash"].(string)+"/"+insertChain.Name, nil) + assert.Nil(t, err) + // FIXME: singleReadResult has more detailed info, should check differently + assert.Equalf(t, aggregatorInsertResult.AggregatorId, singleReadResult.AggregatorId, "should read single element") + + // update by id + patchResult, err := utils.PatchRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator/"+aggregatorInsertResult.AggregatorHash, aggregatorUpdateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator/"+aggregatorInsertData["aggregatorHash"].(string)+"/"+insertChain.Name, nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, singleReadResult, "should be patched") + + // hash + hashTestInsertData, _ := utils.DeepCopyMap(aggregatorInsertData) + hashTestInsertData["aggregatorHash"] = "" + hashResult, err := utils.PostRequest[aggregator.AggregatorHashComputeInputModel](app, "/api/v1/aggregator/hash?verify=false", hashTestInsertData) + assert.Nil(t, err) + assert.Equalf(t, aggregatorInsertData["aggregatorHash"].(string), hashResult.AggregatorHash, "hash should be same") + + // delete by id + deleteResult, err := utils.DeleteRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator/"+aggregatorInsertResult.AggregatorId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + readAllResultAfterDeletion, err := utils.GetRequest[[]aggregator.AggregatorResultModel](app, "/api/v1/aggregator?chain="+insertChain.Name, nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) + + // cleanup + feeds, _ := utils.GetRequest[[]feed.FeedModel](app, "/api/v1/feed", nil) + for _, f := range feeds { + _, err = utils.DeleteRequest[feed.FeedModel](app, "/api/v1/feed/"+f.FeedId.String(), nil) + assert.Nil(t, err) + } + + _, err = utils.DeleteRequest[adapter.AdapterModel](app, "/api/v1/adapter/"+adapterInsertResult.AdapterId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) +} diff --git a/go-api/tests/apierr_test.go b/go-api/tests/apierr_test.go new file mode 100644 index 000000000..9988dc01a --- /dev/null +++ b/go-api/tests/apierr_test.go @@ -0,0 +1,77 @@ +package tests + +import ( + "fmt" + + "go-api/apierr" + "go-api/utils" + "testing" + "time" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestApiErr(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + now := utils.CustomDateTime{Time: time.Now()} + insertData := apierr.ErrorInsertModel{ + RequestId: "66649924661314489704239946349158829048302840686075232939396730072454733114998", + Timestamp: &now, + Code: "10020", + Name: "MissingKeyInJson", + Stack: `MissingKeyInJson + at wrapper (file:///app/dist/worker/reducer.js:19:23) + at file:///app/dist/utils.js:11:61 + at Array.reduce () + at file:///app/dist/utils.js:11:44 + at processRequest (file:///app/dist/worker/request-response.js:58:34) + at process.processTicksAndRejections (node:internal/process/task_queues:95:5) + at async Worker.wrapper [as processFn] (file:///app/dist/worker/request-response.js:27:25) + at async Worker.processJob (/app/node_modules/bullmq/dist/cjs/classes/worker.js:339:28) + at async Worker.retryIfFailed (/app/node_modules/bullmq/dist/cjs/classes/worker.js:513:24)`, + } + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + apierr.Routes(v1) + + // read all before insertion + readAllResultBefore, err := utils.GetRequest[[]apierr.ErrorModel](app, "/api/v1/error", nil) + assert.Nil(t, err) + totalBefore := len(readAllResultBefore) + + // insert + insertResult, err := utils.PostRequest[apierr.ErrorModel](app, "/api/v1/error", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]apierr.ErrorModel](app, "/api/v1/error", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[apierr.ErrorModel](app, "/api/v1/error/"+insertResult.ERROR_ID.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted service") + + // delete + deleteResult, err := utils.DeleteRequest[apierr.ErrorModel](app, "/api/v1/error/"+insertResult.ERROR_ID.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, deleteResult, "should be deleted") + + readAllResultAfterDeletion, err := utils.GetRequest[[]apierr.ErrorModel](app, "/api/v1/error", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/go-api/tests/chain_test.go b/go-api/tests/chain_test.go new file mode 100644 index 000000000..d29874293 --- /dev/null +++ b/go-api/tests/chain_test.go @@ -0,0 +1,69 @@ +package tests + +import ( + "fmt" + "go-api/chain" + "go-api/utils" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestChain(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertData = chain.ChainInsertModel{Name: "cypress"} + var updateData = chain.ChainInsertModel{Name: "cypress2"} + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + chain.Routes(v1) + + // read all before insertion + readAllResultBefore, err := utils.GetRequest[[]chain.ChainModel](app, "/api/v1/chain", nil) + assert.Nil(t, err) + totalBefore := len(readAllResultBefore) + + // insert + insertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]chain.ChainModel](app, "/api/v1/chain", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[chain.ChainModel](app, "/api/v1/chain/"+insertResult.ChainId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted chain") + + // patch + patchResult, err := utils.PatchRequest[chain.ChainModel](app, "/api/v1/chain/"+insertResult.ChainId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[chain.ChainModel](app, "/api/v1/chain/"+insertResult.ChainId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+insertResult.ChainId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]chain.ChainModel](app, "/api/v1/chain", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/go-api/tests/data_test.go b/go-api/tests/data_test.go new file mode 100644 index 000000000..d090d4701 --- /dev/null +++ b/go-api/tests/data_test.go @@ -0,0 +1,162 @@ +package tests + +import ( + "encoding/json" + "fmt" + "go-api/adapter" + "go-api/aggregator" + "go-api/chain" + "go-api/data" + "go-api/feed" + "go-api/utils" + "testing" + "time" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestData(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + insertChain := chain.ChainInsertModel{Name: "data-test-chain"} + + _adapterInsertData := _AdapterInsertModel{ + AdapterHash: "0xbb555a249d01133784fa04c608ce03c129f73f2a1ef7473d0cfffdc4bcba794e", + Name: "BTC-USD", + Decimals: 8, + } + + adapterInsertData := AdapterInsertModel{ + _AdapterInsertModel: _adapterInsertData, + Feeds: []feed.FeedInsertModel{ + { + Name: "Binance-BTC-USD-adapter", + Definition: json.RawMessage(`{ + "url": "https://api.binance.us/api/v3/ticker/price?symbol=BTCUSD", + "headers": { + "Content-Type": "application/json" + }, + "method": "GET", + "reducers": [ + { + "function": "PARSE", + "args": [ + "price" + ] + }, + { + "function": "POW10", + "args": 8 + }, + { + "function": "ROUND" + } + ] + }`), + }, + }, + } + + aggregatorInsertData := map[string]interface{}{ + "aggregatorHash": "0x9ca45583d7b9b061d9e8a20d6a874fcfba50c7a9dbc9c65c3792b4ef0b31e7b9", + "active": false, + "name": "BTC-USD", + "address": "0x222", + "heartbeat": 10000, + "threshold": 0.04, + "absoluteThreshold": 0.1, + "adapterHash": _adapterInsertData.AdapterHash, + "chain": insertChain.Name, + "fetcherType": 0, + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + + chain.Routes(v1) + adapter.Routes(v1) + aggregator.Routes(v1) + data.Routes(v1) + feed.Routes(v1) + + // insert chain, adapter, and aggregator before test + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + + adapterInsertResult, err := utils.PostRequest[adapter.AdapterModel](app, "/api/v1/adapter", adapterInsertData) + assert.Nil(t, err) + + aggregatorInsertResult, err := utils.PostRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator", aggregatorInsertData) + assert.Nil(t, err) + + insertedFeeds, err := utils.GetRequest[[]feed.FeedModel](app, "/api/v1/feed/adapter/"+adapterInsertResult.AdapterId.String(), nil) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]data.DataResultModel](app, "/api/v1/data", nil) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + insertData := map[string]interface{}{ + "data": []map[string]interface{}{ + { + "aggregatorId": aggregatorInsertResult.AggregatorId, + "timestamp": time.Now().UTC().Format(utils.RFC3339Milli), + "value": 2241772466578, + "feedId": insertedFeeds[0].FeedId, + }, + }, + } + + count, err := utils.PostRequest[struct { + COUNT int `json:"count"` + }](app, "/api/v1/data", insertData) + assert.Nil(t, err) + assert.Equalf(t, 1, count.COUNT, "1 insert") + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]data.DataResultModel](app, "/api/v1/data", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + lastDataElement := readAllResultAfter[len(readAllResultAfter)-1] + singleReadResult, err := utils.GetRequest[data.DataResultModel](app, "/api/v1/data/"+lastDataElement.DataId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, lastDataElement, singleReadResult, "should get single element") + + // delete by id + insertedData, err := utils.GetRequest[[]data.DataResultModel](app, "/api/v1/data/feed/"+insertedFeeds[0].FeedId.String(), nil) + assert.Nil(t, err) + deletedData, err := utils.DeleteRequest[data.DataResultModel](app, "/api/v1/data/"+insertedData[0].DataId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertedData[0], deletedData, "should delete by id") + + // clean up + + feeds, _ := utils.GetRequest[[]feed.FeedModel](app, "/api/v1/feed", nil) + for _, f := range feeds { + _, err = utils.DeleteRequest[feed.FeedModel](app, "/api/v1/feed/"+f.FeedId.String(), nil) + assert.Nil(t, err) + } + + _, err = utils.DeleteRequest[aggregator.AggregatorResultModel](app, "/api/v1/aggregator/"+aggregatorInsertResult.AggregatorId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[adapter.AdapterModel](app, "/api/v1/adapter/"+adapterInsertResult.AdapterId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) + +} diff --git a/go-api/tests/feed_test.go b/go-api/tests/feed_test.go new file mode 100644 index 000000000..7a38afdb7 --- /dev/null +++ b/go-api/tests/feed_test.go @@ -0,0 +1,117 @@ +package tests + +import ( + "encoding/json" + "fmt" + "go-api/adapter" + "go-api/feed" + "go-api/utils" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func containsFeed(arr []feed.FeedModel, target feed.FeedModel) bool { + for _, f := range arr { + if f.FeedId == target.FeedId { + return true + } + } + return false +} + +func TestFeed(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + _adapterInsertData := _AdapterInsertModel{ + AdapterHash: "0xbb555a249d01133784fa04c608ce03c129f73f2a1ef7473d0cfffdc4bcba794e", + Name: "BTC-USD", + Decimals: 8, + } + adapterInsertData := AdapterInsertModel{ + _AdapterInsertModel: _adapterInsertData, + Feeds: []feed.FeedInsertModel{ + { + Name: "Binance-BTC-USD-adapter", + Definition: json.RawMessage(`{ + "url": "https://api.binance.us/api/v3/ticker/price?symbol=BTCUSD", + "headers": { + "Content-Type": "application/json" + }, + "method": "GET", + "reducers": [ + { + "function": "PARSE", + "args": [ + "price" + ] + }, + { + "function": "POW10", + "args": 8 + }, + { + "function": "ROUND" + } + ] + }`), + }, + }, + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + + adapter.Routes(v1) + feed.Routes(v1) + + // read all before insert + readAllResult, err := utils.GetRequest[[]feed.FeedModel](app, "/api/v1/feed", nil) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert adapter which adds feed + adapterInsertResult, err := utils.PostRequest[adapter.AdapterModel](app, "/api/v1/adapter", adapterInsertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]feed.FeedModel](app, "/api/v1/feed", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + lastElement := readAllResultAfter[len(readAllResultAfter)-1] + singleReadResult, err := utils.GetRequest[feed.FeedModel](app, "/api/v1/feed/"+lastElement.FeedId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, lastElement, singleReadResult, "should get single element") + + // delete added feeds + for _, f := range readAllResultAfter { + if !containsFeed(readAllResult, f) { + deletedFeed, err := utils.DeleteRequest[feed.FeedModel](app, "/api/v1/feed/"+f.FeedId.String(), nil) + assert.Nil(t, err) + assert.NotNil(t, deletedFeed) + } + } + + // delete adapter (cleanup) + _, err = utils.DeleteRequest[adapter.AdapterModel](app, "/api/v1/adapter/"+adapterInsertResult.AdapterId.String(), nil) + assert.Nil(t, err) + + // read all after deletion and cleanup + readAllResultAfterDeletion, err := utils.GetRequest[[]feed.FeedModel](app, "/api/v1/feed", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/go-api/tests/listener_test.go b/go-api/tests/listener_test.go new file mode 100644 index 000000000..94fd8ead8 --- /dev/null +++ b/go-api/tests/listener_test.go @@ -0,0 +1,98 @@ +package tests + +import ( + "fmt" + "go-api/chain" + "go-api/listener" + "go-api/service" + "go-api/utils" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestListener(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertChain = chain.ChainInsertModel{Name: "listener-test-chain"} + var InsertService = service.ServiceInsertModel{Name: "listener-test-service"} + + var insertData = listener.ListenerInsertModel{ + Address: "0xa", + EventName: "new_round(uint, uint80)", + Chain: "listener-test-chain", + Service: "listener-test-service", + } + + var updateData = listener.ListenerUpdateModel{ + Address: "0x1", + EventName: "new_round_v2(uint, uint80)", + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + + chain.Routes(v1) + service.Routes(v1) + listener.Routes(v1) + + // insert chain and service before test + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + serviceInsertResult, err := utils.PostRequest[service.ServiceModel](app, "/api/v1/service", InsertService) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]listener.ListenerModel](app, "/api/v1/listener", map[string]any{"chain": "listener-test-chain", "service": "listener-test-service"}) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert + insertResult, err := utils.PostRequest[listener.ListenerModel](app, "/api/v1/listener", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]listener.ListenerModel](app, "/api/v1/listener", map[string]any{"chain": "listener-test-chain", "service": "listener-test-service"}) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[listener.ListenerModel](app, "/api/v1/listener/"+insertResult.ListenerId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted reporter") + + // patch + patchResult, err := utils.PatchRequest[listener.ListenerModel](app, "/api/v1/listener/"+insertResult.ListenerId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[listener.ListenerModel](app, "/api/v1/listener/"+insertResult.ListenerId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[listener.ListenerModel](app, "/api/v1/listener/"+insertResult.ListenerId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]listener.ListenerModel](app, "/api/v1/listener", map[string]any{"chain": "listener-test-chain", "service": "listener-test-service"}) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) + + // delete chain and service (cleanup) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[service.ServiceModel](app, "/api/v1/service/"+serviceInsertResult.ServiceId.String(), nil) + assert.Nil(t, err) +} diff --git a/go-api/tests/models.go b/go-api/tests/models.go new file mode 100644 index 000000000..f5c6b4baa --- /dev/null +++ b/go-api/tests/models.go @@ -0,0 +1,19 @@ +package tests + +import ( + "go-api/feed" +) + +// FIXME: redeclares structs that aren't accessable from test +// had to define again since _AdapterInsertModel isn't exported struct + +type AdapterInsertModel struct { + _AdapterInsertModel + Feeds []feed.FeedInsertModel `json:"feeds"` +} + +type _AdapterInsertModel struct { + AdapterHash string `db:"adapter_hash" json:"adapterHash" validate:"required"` + Name string `db:"name" json:"name" validate:"required"` + Decimals int `db:"decimals" json:"decimals" validate:"required"` +} diff --git a/go-api/tests/proxy_test.go b/go-api/tests/proxy_test.go new file mode 100644 index 000000000..97ee42dbf --- /dev/null +++ b/go-api/tests/proxy_test.go @@ -0,0 +1,80 @@ +package tests + +import ( + "fmt" + "go-api/proxy" + "go-api/utils" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestProxy(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + var location = "kr" + var portNumber = utils.CustomInt32(5000) + var insertData = proxy.ProxyInsertModel{ + Protocol: "http", + Host: "127.0.0.1", + Port: &portNumber, + } + + var updateData = proxy.ProxyInsertModel{ + Protocol: "http", + Host: "127.0.0.1", + Port: &portNumber, + Location: &location, + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + proxy.Routes(v1) + + // read all before insertion + readAllResultBefore, err := utils.GetRequest[[]proxy.ProxyModel](app, "/api/v1/proxy", nil) + assert.Nil(t, err) + totalBefore := len(readAllResultBefore) + + // insert + insertResult, err := utils.PostRequest[proxy.ProxyModel](app, "/api/v1/proxy", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]proxy.ProxyModel](app, "/api/v1/proxy", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[proxy.ProxyModel](app, "/api/v1/proxy/"+insertResult.Id.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted proxy") + + // patch + patchResult, err := utils.PatchRequest[proxy.ProxyModel](app, "/api/v1/proxy/"+insertResult.Id.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[proxy.ProxyModel](app, "/api/v1/proxy/"+insertResult.Id.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[proxy.ProxyModel](app, "/api/v1/proxy/"+insertResult.Id.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]proxy.ProxyModel](app, "/api/v1/proxy", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/go-api/tests/reporter_test.go b/go-api/tests/reporter_test.go new file mode 100644 index 000000000..2e1f583e4 --- /dev/null +++ b/go-api/tests/reporter_test.go @@ -0,0 +1,100 @@ +package tests + +import ( + "fmt" + "go-api/chain" + "go-api/reporter" + "go-api/service" + "go-api/utils" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestReporter(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertChain = chain.ChainInsertModel{Name: "reporter-test-chain"} + var insertService = service.ServiceInsertModel{Name: "reporter-test-service"} + + var insertData = reporter.ReporterInsertModel{ + Address: "0xa", + PrivateKey: "0xb", + OracleAddress: "0xc", + Chain: "reporter-test-chain", + Service: "reporter-test-service", + } + + var updateData = reporter.ReporterUpdateModel{ + Address: "0x1", + PrivateKey: "0x2", + OracleAddress: "0x3", + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + + chain.Routes(v1) + service.Routes(v1) + reporter.Routes(v1) + + // insert chain and service before test + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + serviceInsertResult, err := utils.PostRequest[service.ServiceModel](app, "/api/v1/service", insertService) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]reporter.ReporterModel](app, "/api/v1/reporter", map[string]any{"chain": "reporter-test-chain", "service": "reporter-test-service"}) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert + insertResult, err := utils.PostRequest[reporter.ReporterModel](app, "/api/v1/reporter", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]reporter.ReporterModel](app, "/api/v1/reporter", map[string]any{"chain": "reporter-test-chain", "service": "reporter-test-service"}) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[reporter.ReporterModel](app, "/api/v1/reporter/"+insertResult.ReporterId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should be inserted") + + // patch + patchResult, err := utils.PatchRequest[reporter.ReporterModel](app, "/api/v1/reporter/"+insertResult.ReporterId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[reporter.ReporterModel](app, "/api/v1/reporter/"+insertResult.ReporterId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[reporter.ReporterModel](app, "/api/v1/reporter/"+insertResult.ReporterId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]reporter.ReporterModel](app, "/api/v1/reporter", map[string]any{"chain": "reporter-test-chain", "service": "reporter-test-service"}) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) + + // delete chain and service (cleanup) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) + _, err = utils.DeleteRequest[service.ServiceModel](app, "/api/v1/service/"+serviceInsertResult.ServiceId.String(), nil) + assert.Nil(t, err) +} diff --git a/go-api/tests/service_test.go b/go-api/tests/service_test.go new file mode 100644 index 000000000..59fd70f44 --- /dev/null +++ b/go-api/tests/service_test.go @@ -0,0 +1,69 @@ +package tests + +import ( + "fmt" + "go-api/service" + "go-api/utils" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestService(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertData = service.ServiceInsertModel{Name: "SERVICE_TEST"} + var updateData = service.ServiceInsertModel{Name: "SERVICE_TEST_2"} + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + service.Routes(v1) + + // read all before insertion + readAllResultBefore, err := utils.GetRequest[[]service.ServiceModel](app, "/api/v1/service", nil) + assert.Nil(t, err) + totalBefore := len(readAllResultBefore) + + // insert + insertResult, err := utils.PostRequest[service.ServiceModel](app, "/api/v1/service", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]service.ServiceModel](app, "/api/v1/service", nil) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[service.ServiceModel](app, "/api/v1/service/"+insertResult.ServiceId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted service") + + // patch + patchResult, err := utils.PatchRequest[service.ServiceModel](app, "/api/v1/service/"+insertResult.ServiceId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[service.ServiceModel](app, "/api/v1/service/"+insertResult.ServiceId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[service.ServiceModel](app, "/api/v1/service/"+insertResult.ServiceId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]service.ServiceModel](app, "/api/v1/service", nil) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) +} diff --git a/go-api/tests/vrf_test.go b/go-api/tests/vrf_test.go new file mode 100644 index 000000000..10a8192df --- /dev/null +++ b/go-api/tests/vrf_test.go @@ -0,0 +1,95 @@ +package tests + +import ( + "fmt" + "go-api/chain" + "go-api/utils" + "go-api/vrf" + "testing" + + "github.com/joho/godotenv" + "github.com/stretchr/testify/assert" +) + +func TestVrf(t *testing.T) { + err := godotenv.Load("../.env") + if err != nil { + fmt.Print("env file is not found, continueing without .env file") + } + + var insertChain = chain.ChainInsertModel{Name: "vrf-test-chain"} + + var insertData = vrf.VrfInsertModel{ + Sk: "ebeb5229570725793797e30a426d7ef8aca79d38ff330d7d1f28485d2366de32", + Pk: "045b8175cfb6e7d479682a50b19241671906f706bd71e30d7e80fd5ff522c41bf0588735865a5faa121c3801b0b0581440bdde24b03dc4c4541df9555d15223e82", + PkX: "41389205596727393921445837404963099032198113370266717620546075917307049417712", + PkY: "40042424443779217635966540867474786311411229770852010943594459290130507251330", + KeyHash: "0x6f32373625e3d1f8f303196cbb78020ac2503acd1129e44b36b425781a9664ac", + Chain: "vrf-test-chain", + } + + var updateData = vrf.VrfUpdateModel{ + Sk: "ebeb5229570725793797e30a426d7ef8aca79d38ff330d7d1f28485d2366de32", + Pk: "045b8175cfb6e7d479682a50b19241671906f706bd71e30d7e80fd5ff522c41bf0588735865a5faa121c3801b0b0581440bdde24b03dc4c4541df9555d15223e82", + PkX: "41389205596727393921445837404963099032198113370266717620546075917307049417712", + PkY: "40042424443779217635966540867474786311411229770852010943594459290130507251330", + KeyHash: "0x", + } + + appConfig, _ := utils.Setup() + + pgxClient := appConfig.Postgres + redisClient := appConfig.Redis + app := appConfig.App + + defer pgxClient.Close() + defer redisClient.Close() + v1 := app.Group("/api/v1") + vrf.Routes(v1) + chain.Routes(v1) + + // insert chain before test + chainInsertResult, err := utils.PostRequest[chain.ChainModel](app, "/api/v1/chain", insertChain) + assert.Nil(t, err) + + // read all before insertion + readAllResult, err := utils.GetRequest[[]vrf.VrfModel](app, "/api/v1/vrf", map[string]any{"chain": "vrf-test-chain"}) + assert.Nil(t, err) + totalBefore := len(readAllResult) + + // insert + insertResult, err := utils.PostRequest[vrf.VrfModel](app, "/api/v1/vrf", insertData) + assert.Nil(t, err) + + // read all after insertion + readAllResultAfter, err := utils.GetRequest[[]vrf.VrfModel](app, "/api/v1/vrf", map[string]any{"chain": "vrf-test-chain"}) + assert.Nil(t, err) + totalAfter := len(readAllResultAfter) + assert.Less(t, totalBefore, totalAfter) + + // read single + singleReadResult, err := utils.GetRequest[vrf.VrfModel](app, "/api/v1/vrf/"+insertResult.VrfKeyId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, insertResult, singleReadResult, "should get inserted vrf") + + // patch + patchResult, err := utils.PatchRequest[vrf.VrfModel](app, "/api/v1/vrf/"+insertResult.VrfKeyId.String(), updateData) + assert.Nil(t, err) + singleReadResult, err = utils.GetRequest[vrf.VrfModel](app, "/api/v1/vrf/"+insertResult.VrfKeyId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, singleReadResult, patchResult, "should be patched") + + // delete + deleteResult, err := utils.DeleteRequest[vrf.VrfModel](app, "/api/v1/vrf/"+insertResult.VrfKeyId.String(), nil) + assert.Nil(t, err) + assert.Equalf(t, patchResult, deleteResult, "should be deleted") + + // read all after delete + readAllResultAfterDeletion, err := utils.GetRequest[[]vrf.VrfModel](app, "/api/v1/vrf", map[string]any{"chain": "vrf-test-chain"}) + assert.Nil(t, err) + assert.Less(t, len(readAllResultAfterDeletion), totalAfter) + + // delete chain (cleanup) + _, err = utils.DeleteRequest[chain.ChainModel](app, "/api/v1/chain/"+chainInsertResult.ChainId.String(), nil) + assert.Nil(t, err) +} diff --git a/go-api/utils/custom_types.go b/go-api/utils/custom_types.go new file mode 100644 index 000000000..eb987b8ff --- /dev/null +++ b/go-api/utils/custom_types.go @@ -0,0 +1,234 @@ +package utils + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + "time" +) + +// float8 in postgresql +// json return type: float +type CustomFloat float64 + +// boolean in postgresql +// json return type: boolean +type CustomBool bool + +// int4 in postgresql +// json return type: number +type CustomInt32 int32 + +// int8 and bigint in postgresql +// json return type: string +type CustomInt64 int64 + +type CustomDateTime struct { + time.Time +} + +const RFC3339Milli = "2006-01-02T15:04:05.000Z07:00" + +func (cf *CustomFloat) MarshalJSON() ([]byte, error) { + return json.Marshal(*cf) +} + +func (cf *CustomFloat) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case float64: + *cf = CustomFloat(v) + case float32: + *cf = CustomFloat(float64(v)) + case int: + *cf = CustomFloat(float64(v)) + case string: + converted, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + *cf = CustomFloat(converted) + default: + return fmt.Errorf("unexpected type for CustomFloat: %T", value) + + } + return nil +} + +func (cb *CustomBool) MarshalJSON() ([]byte, error) { + return json.Marshal(*cb) +} + +func (cb *CustomBool) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case bool: + *cb = CustomBool(v) + case string: + converted, err := strconv.ParseBool(v) + if err != nil { + return err + } + *cb = CustomBool(converted) + default: + return fmt.Errorf("unexpected type for CustomBoolean: %T", value) + } + return nil +} + +func (ci_32 *CustomInt32) MarshalJSON() ([]byte, error) { + return json.Marshal(*ci_32) +} + +func (ci_32 *CustomInt32) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case int32: + *ci_32 = CustomInt32(v) + case int64: + *ci_32 = CustomInt32(int32(v)) + case int: + *ci_32 = CustomInt32(int32(v)) + case float64: + *ci_32 = CustomInt32(int32(v)) + case float32: + *ci_32 = CustomInt32(int32(v)) + case string: + if v == "" { + *ci_32 = CustomInt32(0) + } else { + converted, err := strconv.Atoi(v) + if err != nil { + return err + } + *ci_32 = CustomInt32(int32(converted)) + } + + default: + return fmt.Errorf("unexpected type for customInt32: %T", value) + } + return nil +} + +func (ci_64 CustomInt64) String() string { + return strconv.FormatInt(int64(ci_64), 10) +} + +func (ci_64 *CustomInt64) MarshalJSON() ([]byte, error) { + + return json.Marshal(ci_64.String()) +} + +func (ci_64 *CustomInt64) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case int64: + *ci_64 = CustomInt64(v) + case int32: + *ci_64 = CustomInt64(int64(v)) + case int: + *ci_64 = CustomInt64(int64(v)) + case float64: + *ci_64 = CustomInt64(int64(v)) + case float32: + *ci_64 = CustomInt64(int64(v)) + case string: + if v == "" { + *ci_64 = CustomInt64(0) + } else { + converted, err := strconv.Atoi(v) + if err != nil { + return err + } + *ci_64 = CustomInt64(converted) + } + + default: + return fmt.Errorf("unexpected type for CustomInt64: %T", value) + } + return nil +} + +func (cdt CustomDateTime) String() string { + utcTime := cdt.Time.UTC() + return utcTime.Format(RFC3339Milli) +} + +func (cdt *CustomDateTime) MarshalJSON() ([]byte, error) { + return json.Marshal(cdt.String()) +} + +func (cdt *CustomDateTime) Scan(src interface{}) error { + switch v := src.(type) { + case time.Time: + cdt.Time = v + case string: + v = strings.Replace(v, "GMT", "UTC", -1) + + if err := tryParsingRFC3339Milli(v, cdt); err != nil { + if err := tryParsingRFC3339(v, cdt); err != nil { + return fmt.Errorf("unexpected dateTime format: %s", v) + } + } + default: + return fmt.Errorf("unexpected type for CustomDateTime: %T", src) + } + return nil +} + +func (cdt *CustomDateTime) UnmarshalJSON(data []byte) error { + var value interface{} + if err := json.Unmarshal(data, &value); err != nil { + return err + } + + switch v := value.(type) { + case time.Time: + cdt.Time = v + case string: + v = strings.Replace(v, "GMT", "UTC", -1) + + if err := tryParsingRFC3339Milli(v, cdt); err != nil { + if err := tryParsingRFC3339(v, cdt); err != nil { + return fmt.Errorf("unexpected dateTime format: %s", v) + } + } + default: + return fmt.Errorf("unexpected type for CustomDateTime: %T", value) + } + return nil +} + +// Recommended dateTime format which matches output format +func tryParsingRFC3339Milli(v string, cdt *CustomDateTime) error { + converted, err := time.Parse(RFC3339Milli, v) + if err == nil { + cdt.Time = converted + } + return err +} + +func tryParsingRFC3339(v string, cdt *CustomDateTime) error { + converted, err := time.Parse(time.RFC3339, v) + if err == nil { + cdt.Time = converted + } + return err +} diff --git a/go-api/utils/test_helper.go b/go-api/utils/test_helper.go new file mode 100644 index 000000000..d1fd4a251 --- /dev/null +++ b/go-api/utils/test_helper.go @@ -0,0 +1,142 @@ +package utils + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + + "github.com/gofiber/fiber/v2" +) + +func req[T any](app *fiber.App, method string, endpoint string, requestBody interface{}) (T, error) { + var result T + var body io.Reader + + if requestBody != nil { + marshalledData, err := json.Marshal(requestBody) + if err != nil { + fmt.Println("failed to marshal request body") + return result, err + } + body = bytes.NewReader(marshalledData) + } + + req, err := http.NewRequest( + method, + endpoint, + body, + ) + + req.Header.Set("Content-Type", "application/json") + if err != nil { + fmt.Println("failed to create request") + return result, err + } + res, err := app.Test(req, -1) + if err != nil { + fmt.Println("failed to call test") + fmt.Println(err) + return result, err + } + + resultBody, err := io.ReadAll(res.Body) + if err != nil { + fmt.Println("failed to read result body:" + string(resultBody)) + return result, err + } + + err = json.Unmarshal(resultBody, &result) + if err != nil { + fmt.Println("failed Unmarshal result body:" + string(resultBody)) + return result, err + } + + return result, nil +} + +func GetRequest[T any](app *fiber.App, endpoint string, requestBody interface{}) (T, error) { + return req[T](app, "GET", endpoint, requestBody) +} + +func PostRequest[T any](app *fiber.App, endpoint string, requestBody interface{}) (T, error) { + return req[T](app, "POST", endpoint, requestBody) +} + +func PatchRequest[T any](app *fiber.App, endpoint string, requestBody interface{}) (T, error) { + return req[T](app, "PATCH", endpoint, requestBody) +} + +func DeleteRequest[T any](app *fiber.App, endpoint string, requestBody interface{}) (T, error) { + return req[T](app, "DELETE", endpoint, requestBody) +} + +func UrlRequest[T any](urlEndpoint string, method string, requestBody interface{}) (T, error) { + var result T + var body io.Reader + + if requestBody != nil { + marshalledData, err := json.Marshal(requestBody) + if err != nil { + fmt.Println("failed to marshal request body") + return result, err + } + body = bytes.NewReader(marshalledData) + } + + url, err := url.Parse(urlEndpoint) + if err != nil { + fmt.Println("Error parsing URL:", err) + return result, err + } + + req, err := http.NewRequest( + method, + url.String(), + body, + ) + + req.Header.Set("Content-Type", "application/json") + if err != nil { + fmt.Println("failed to create request") + return result, err + } + + response, err := http.DefaultClient.Do(req) + if err != nil { + fmt.Println(url.String()) + fmt.Println("Error making POST request:", err) + return result, err + } + resultBody, err := io.ReadAll(response.Body) + if err != nil { + fmt.Println("Error reading response body:", err) + return result, err + } + + err = json.Unmarshal(resultBody, &result) + if err != nil { + fmt.Println("failed Unmarshal result body:" + string(resultBody)) + return result, err + } + + return result, nil +} + +func DeepCopyMap(src map[string]interface{}) (map[string]interface{}, error) { + srcJSON, err := json.Marshal(src) + if err != nil { + return nil, err + } + + dst := make(map[string]interface{}) + + err = json.Unmarshal(srcJSON, &dst) + if err != nil { + return nil, err + } + + return dst, nil +} diff --git a/go-api/utils/utils.go b/go-api/utils/utils.go new file mode 100644 index 000000000..d0e060d23 --- /dev/null +++ b/go-api/utils/utils.go @@ -0,0 +1,296 @@ +package utils + +import ( + "context" + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "encoding/hex" + "errors" + "fmt" + "log" + "os" + "runtime/debug" + "strconv" + "strings" + + "golang.org/x/crypto/scrypt" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/recover" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/redis/go-redis/v9" +) + +type AppConfig struct { + Postgres *pgxpool.Pool + Redis *redis.Conn + App *fiber.App +} + +func IsTesting(c *fiber.Ctx) bool { + testing, ok := c.Locals("testing").(bool) + if !ok { + // disable test mode if loading testing fails + return false + } else { + return testing + } +} + +func GetPgx(c *fiber.Ctx) (*pgxpool.Pool, error) { + con, ok := c.Locals("pgxConn").(*pgxpool.Pool) + if !ok { + return con, errors.New("failed to get pgxConn") + } else { + return con, nil + } +} + +func GetRdb(c *fiber.Ctx) (redis.Conn, error) { + con, ok := c.Locals("rdb").(redis.Conn) + if !ok { + return con, errors.New("failed to get rdbConn") + } else { + return con, nil + } +} + +func SetRedis(c *fiber.Ctx, key string, value string) error { + redisConn, err := GetRdb(c) + if err != nil { + return err + } + + return redisConn.Set(c.Context(), key, value, 0).Err() +} + +func GetRedis(c *fiber.Ctx, key string) (string, error) { + redisConn, err := GetRdb(c) + if err != nil { + return "", err + } + + return redisConn.Get(c.Context(), key).Result() +} + +func RawQueryWithoutReturn(c *fiber.Ctx, query string, args map[string]any) error { + pgxPool, err := GetPgx(c) + if err != nil { + return err + } + + rows, err := pgxPool.Query(c.Context(), query, pgx.NamedArgs(args)) + if err != nil { + return err + } + defer rows.Close() + + return nil +} + +func QueryRow[T any](c *fiber.Ctx, query string, args map[string]any) (T, error) { + var result T + pgxPool, err := GetPgx(c) + if err != nil { + return result, err + } + + rows, err := pgxPool.Query(c.Context(), query, pgx.NamedArgs(args)) + if err != nil { + return result, err + } + + result, err = pgx.CollectExactlyOneRow(rows, pgx.RowToStructByName[T]) + return result, err +} + +func QueryRows[T any](c *fiber.Ctx, query string, args map[string]any) ([]T, error) { + results := []T{} + pgxPool, err := GetPgx(c) + if err != nil { + return results, err + } + + rows, err := pgxPool.Query(c.Context(), query, pgx.NamedArgs(args)) + if err != nil { + return results, err + } + + results, err = pgx.CollectRows(rows, pgx.RowToStructByName[T]) + return results, err +} + +func Setup(options ...string) (AppConfig, error) { + var version string + var appConfig AppConfig + + if len(options) > 0 { + version = options[0] + } else { + version = "test" + } + + config := LoadEnvVars() + // pgsql connect + pgxPool, pgxError := pgxpool.New(context.Background(), config["DATABASE_URL"].(string)) + if pgxError != nil { + return appConfig, pgxError + } + // redis connect + rdb := redis.NewClient(&redis.Options{ + Addr: config["REDIS_HOST"].(string) + ":" + config["REDIS_PORT"].(string), + }).Conn() + _, rdbErr := rdb.Ping(context.Background()).Result() + if rdbErr != nil { + return appConfig, rdbErr + } + + testing, err := strconv.ParseBool(config["TEST_MODE"].(string)) + if err != nil { + // defaults to testing false + testing = false + } + + app := fiber.New(fiber.Config{ + AppName: "go-api " + version, + EnablePrintRoutes: true, + ErrorHandler: CustomErrorHandler, + }) + app.Use(recover.New( + recover.Config{ + EnableStackTrace: true, + StackTraceHandler: CustomStackTraceHandler, + }, + )) + app.Use(cors.New()) + + app.Use(func(c *fiber.Ctx) error { + c.Locals("rdb", *rdb) + c.Locals("pgxConn", pgxPool) + c.Locals("testing", testing) + return c.Next() + }) + + appConfig = AppConfig{ + Postgres: pgxPool, + Redis: rdb, + App: app, + } + return appConfig, nil +} + +func EncryptText(textToEncrypt string) (string, error) { + config := LoadEnvVars() + password := config["ENCRYPT_PASSWORD"].(string) + // Generate a random 16-byte IV + iv := make([]byte, 16) + if _, err := rand.Read(iv); err != nil { + return "", err + } + + // Derive a 32-byte key using scrypt + key, err := scrypt.Key([]byte(password), []byte("salt"), 16384, 8, 1, 32) + if err != nil { + return "", err + } + + // Create a cipher using AES-256-CTR with the key and IV + block, err := aes.NewCipher(key) + if err != nil { + return "", err + } + stream := cipher.NewCTR(block, iv) + + // Encrypt the text + ciphertext := make([]byte, len(textToEncrypt)) + stream.XORKeyStream(ciphertext, []byte(textToEncrypt)) + + // Combine the IV and ciphertext into a single string + encryptedText := hex.EncodeToString(iv) + hex.EncodeToString(ciphertext) + + return encryptedText, nil +} + +func DecryptText(encryptedText string) (string, error) { + config := LoadEnvVars() + password := config["ENCRYPT_PASSWORD"].(string) + + // Extract the IV and ciphertext from the string + iv, err := hex.DecodeString(encryptedText[:32]) + if err != nil { + return "", err + } + ciphertext, err := hex.DecodeString(encryptedText[32:]) + if err != nil { + return "", err + } + + // Derive the key using scrypt + key, err := scrypt.Key([]byte(password), []byte("salt"), 16384, 8, 1, 32) + if err != nil { + return "", err + } + + // Create a decipher using AES-256-CTR with the key and IV + block, err := aes.NewCipher(key) + if err != nil { + return "", err + } + stream := cipher.NewCTR(block, iv) + + // Decrypt the ciphertext + decryptedText := make([]byte, len(ciphertext)) + stream.XORKeyStream(decryptedText, ciphertext) + + return string(decryptedText), nil +} + +func LoadEnvVars() map[string]interface{} { + return map[string]interface{}{ + "DATABASE_URL": os.Getenv("DATABASE_URL"), + "REDIS_HOST": os.Getenv("REDIS_HOST"), + "REDIS_PORT": os.Getenv("REDIS_PORT"), + "APP_PORT": os.Getenv("APP_PORT"), + "TEST_MODE": os.Getenv("TEST_MODE"), + "ENCRYPT_PASSWORD": os.Getenv("ENCRYPT_PASSWORD"), + } +} + +func CustomErrorHandler(c *fiber.Ctx, err error) error { + // Status code defaults to 500 + code := fiber.StatusInternalServerError + + // Retrieve the custom status code if it's a *fiber.Error + var e *fiber.Error + if errors.As(err, &e) { + code = e.Code + } + + // Set Content-Type: text/plain; charset=utf-8 + c.Set(fiber.HeaderContentType, fiber.MIMETextPlainCharsetUTF8) + + // Return status code with error message + // | ${status} | ${ip} | ${method} | ${path} | ${error}", + log.Printf("| %d | %s | %s | %s | %s\n", code, c.IP(), c.Method(), c.Path(), err.Error()) + return c.Status(code).SendString(err.Error()) +} + +func CustomStackTraceHandler(_ *fiber.Ctx, e interface{}) { + stackTrace := strings.Split(string(debug.Stack()), "\n") + var failPoint string + + for _, line := range stackTrace { + if strings.Contains(line, "controller.go") { + path := strings.Split(strings.TrimSpace(line), " ")[0] + splitted := strings.Split(path, "/") + failPoint = splitted[len(splitted)-2] + "/" + splitted[len(splitted)-1] + + break + } + } + log.Printf("| (%s) panic: %v \n", failPoint, e) + _, _ = os.Stderr.WriteString(fmt.Sprintf("%s\n", debug.Stack())) //nolint:errcheck // This will never fail +} diff --git a/go-api/vrf/controller.go b/go-api/vrf/controller.go new file mode 100644 index 000000000..e1c07e644 --- /dev/null +++ b/go-api/vrf/controller.go @@ -0,0 +1,142 @@ +package vrf + +import ( + "go-api/chain" + "go-api/utils" + + "github.com/go-playground/validator/v10" + "github.com/gofiber/fiber/v2" +) + +type VrfModel struct { + VrfKeyId *utils.CustomInt64 `db:"vrf_key_id" json:"id"` + Sk string `db:"sk" json:"sk" validate:"required"` + Pk string `db:"pk" json:"pk" validate:"required"` + PkX string `db:"pk_x" json:"pkX" validate:"required"` + PkY string `db:"pk_y" json:"pkY" validate:"required"` + KeyHash string `db:"key_hash" json:"keyHash" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +type VrfUpdateModel struct { + Sk string `db:"sk" json:"sk" validate:"required"` + Pk string `db:"pk" json:"pk" validate:"required"` + PkX string `db:"pk_x" json:"pkX" validate:"required"` + PkY string `db:"pk_y" json:"pkY" validate:"required"` + KeyHash string `db:"key_hash" json:"keyHash" validate:"required"` +} + +type VrfInsertModel struct { + Sk string `db:"sk" json:"sk" validate:"required"` + Pk string `db:"pk" json:"pk" validate:"required"` + PkX string `db:"pk_x" json:"pkX" validate:"required"` + PkY string `db:"pk_y" json:"pkY" validate:"required"` + KeyHash string `db:"key_hash" json:"keyHash" validate:"required"` + Chain string `db:"chain_name" json:"chain" validate:"required"` +} + +func insert(c *fiber.Ctx) error { + payload := new(VrfInsertModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.Chain}) + if err != nil { + panic(err) + } + + result, err := utils.QueryRow[VrfModel](c, InsertVrf, map[string]any{ + "sk": payload.Sk, + "pk": payload.Pk, + "pk_x": payload.PkX, + "pk_y": payload.PkY, + "key_hash": payload.KeyHash, + "chain_id": chain_result.ChainId}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func get(c *fiber.Ctx) error { + payload := new(struct { + CHAIN string `db:"name" json:"chain"` + }) + + if len(c.Body()) == 0 { + results, err := utils.QueryRows[VrfModel](c, GetVrfWithoutChainId, nil) + if err != nil { + panic(err) + } + return c.JSON(results) + } + + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + chain_result, err := utils.QueryRow[chain.ChainModel](c, chain.GetChainByName, map[string]any{"name": payload.CHAIN}) + if err != nil { + panic(err) + } + + results, err := utils.QueryRows[VrfModel](c, GetVrf, map[string]any{"chain_id": chain_result.ChainId}) + if err != nil { + panic(err) + } + + return c.JSON(results) +} + +func getById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[VrfModel](c, GetVrfById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func updateById(c *fiber.Ctx) error { + id := c.Params("id") + payload := new(VrfUpdateModel) + if err := c.BodyParser(payload); err != nil { + panic(err) + } + + validate := validator.New() + if err := validate.Struct(payload); err != nil { + panic(err) + } + + result, err := utils.QueryRow[VrfModel](c, UpdateVrfById, map[string]any{ + "id": id, + "sk": payload.Sk, + "pk": payload.Pk, + "pk_x": payload.PkX, + "pk_y": payload.PkY, + "key_hash": payload.KeyHash}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} + +func deleteById(c *fiber.Ctx) error { + id := c.Params("id") + result, err := utils.QueryRow[VrfModel](c, DeleteVrfById, map[string]any{"id": id}) + if err != nil { + panic(err) + } + + return c.JSON(result) +} diff --git a/go-api/vrf/queries.go b/go-api/vrf/queries.go new file mode 100644 index 000000000..bc31507d4 --- /dev/null +++ b/go-api/vrf/queries.go @@ -0,0 +1,43 @@ +package vrf + +const ( + InsertVrf = ` + INSERT INTO vrf_keys (sk, pk, pk_x, pk_y, key_hash, chain_id) + VALUES (@sk, @pk, @pk_x, @pk_y, @key_hash, @chain_id) + RETURNING vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, + (SELECT name FROM chains WHERE chains.chain_id = vrf_keys.chain_id) AS chain_name; + ` + + GetVrf = ` + SELECT vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, chains.name AS chain_name + FROM vrf_keys + JOIN chains ON vrf_keys.chain_id = chains.chain_id + WHERE vrf_keys.chain_id = @chain_id; + ` + + GetVrfWithoutChainId = ` + SELECT vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, chains.name AS chain_name + FROM vrf_keys + JOIN chains ON vrf_keys.chain_id = chains.chain_id; + ` + + GetVrfById = ` + SELECT vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, chains.name AS chain_name + FROM vrf_keys + JOIN chains ON vrf_keys.chain_id = chains.chain_id + WHERE vrf_key_id = @id LIMIT 1; + ` + + UpdateVrfById = ` + UPDATE vrf_keys + SET sk = @sk, pk = @pk, pk_x = @pk_x, pk_y = @pk_y, key_hash = @key_hash + WHERE vrf_key_id = @id + RETURNING vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, + (SELECT name FROM chains WHERE chains.chain_id = vrf_keys.chain_id) AS chain_name; + ` + + DeleteVrfById = ` + DELETE FROM vrf_keys WHERE vrf_key_id = @id RETURNING vrf_keys.vrf_key_id, vrf_keys.sk, vrf_keys.pk, vrf_keys.pk_x, vrf_keys.pk_y, vrf_keys.key_hash, + (SELECT name FROM chains WHERE chains.chain_id = vrf_keys.chain_id) AS chain_name; + ` +) diff --git a/go-api/vrf/route.go b/go-api/vrf/route.go new file mode 100644 index 000000000..e00a4b783 --- /dev/null +++ b/go-api/vrf/route.go @@ -0,0 +1,15 @@ +package vrf + +import ( + "github.com/gofiber/fiber/v2" +) + +func Routes(router fiber.Router) { + vrf := router.Group("/vrf") + + vrf.Post("", insert) + vrf.Get("", get) + vrf.Get("/:id", getById) + vrf.Patch("/:id", updateById) + vrf.Delete("/:id", deleteById) +}