Skip to content

Commit

Permalink
feat: api cache system with dragonfly (#436)
Browse files Browse the repository at this point in the history
**Describe the pull request**
This Pull Request introduces a new feature to our API - a cache system
implemented with Dragonfly. The cache system is primarily used to cache
all proxies from the intranet, aiming to prevent bursts on the intra
API. By caching frequently requested data, the system reduces the load
on the intra API, resulting in improved performance and response time.

Dragonfly, a robust caching library, is utilized to handle the caching
mechanism effectively, providing features such as cache expiration and
invalidation. The cache system is designed to gracefully handle cache
expiration and ensure that the most up-to-date data is served to the
users.

**Checklist**

- [ ] I have linked the relative issue to this pull request
- [ ] I have made the modifications or added tests related to my PR
- [ ] I have added/updated the documentation for my RP
- [ ] I put my PR in Ready for Review only when all the checklist is
checked

**Breaking changes ?**
no
  • Loading branch information
42atomys authored May 23, 2023
1 parent 8d0f22e commit 4442a58
Show file tree
Hide file tree
Showing 29 changed files with 759 additions and 85 deletions.
10 changes: 5 additions & 5 deletions .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends gnupg2 postgresql-client \
iputils-ping dnsutils vim htop nano sudo curl build-essential zsh wget \
fonts-powerline tig ca-certificates software-properties-common && \
fonts-powerline tig ca-certificates software-properties-common redis-tools && \
# Register kubectl source list
curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg && \
echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list && \
curl -fsSLo /etc/apt/trusted.gpg.d/kubernetes-archive-keyring.gpg https://dl.k8s.io/apt/doc/apt-key.gpg && \
echo "deb [signed-by=/etc/apt/trusted.gpg.d/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list && \
# Register helm source list
curl https://baltocdn.com/helm/signing.asc | gpg --dearmor | tee /usr/share/keyrings/helm.gpg > /dev/null && \
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/helm.gpg] https://baltocdn.com/helm/stable/debian/ all main" | tee /etc/apt/sources.list.d/helm-stable-debian.list && \
curl https://baltocdn.com/helm/signing.asc | gpg --dearmor | tee /etc/apt/trusted.gpg.d/helm.gpg > /dev/null && \
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/trusted.gpg.d/helm.gpg] https://baltocdn.com/helm/stable/debian/ all main" | tee /etc/apt/sources.list.d/helm-stable-debian.list && \
# Run install of kubectl, helm and terraform
apt-get update && \
apt-get install kubectl helm && \
Expand Down
13 changes: 13 additions & 0 deletions .devcontainer/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ services:
GO_ENV: development
APP_VERSION: indev
DATABASE_URL: postgresql://postgres:postgres@database:5432/s42?sslmode=disable
KEYVALUE_STORE_URL: redis://:@dragonfly:6379
AMQP_URL: amqp://rabbitmq:rabbitmq@rabbitmq:5672
CORS_ORIGIN: http://localhost:3000
SEARCHENGINE_MEILISEARCH_HOST: http://meilisearch:7700
Expand Down Expand Up @@ -60,6 +61,17 @@ services:
# Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally.
# (Adding the "ports" property to this file will not forward from a Codespace.)

dragonfly:
hostname: s42-dragonfly
image: 'docker.dragonflydb.io/dragonflydb/dragonfly'
restart: unless-stopped
ulimits:
memlock: -1
volumes:
- dragonfly-data:/data
# Use "forwardPorts" in **devcontainer.json** to forward an workspace port locally.
# (Adding the "ports" property to this file will not forward from a Codespace.)

rabbitmq:
hostname: s42-rabbitmq
image: ghcr.io/42atomys/s42-rabbitmq:3.10.2-management
Expand Down Expand Up @@ -91,6 +103,7 @@ services:

volumes:
postgres-data:
dragonfly-data:
meilisearch-data:
minio-data:

Expand Down
2 changes: 1 addition & 1 deletion .devcontainer/postStartCommand.sh
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ make -f build/Makefile devcontainer-init
# Create the s42-users bucket
go install github.com/minio/mc@latest
mc alias set s3 http://minio:9000 $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY
mc mb s3/$S3_BUCKET_USERS --ignore-existing --region europe-west1
mc mb s3/s42-users --ignore-existing --region europe-west1

# Install and configure kubeseal
go install github.com/bitnami-labs/sealed-secrets/cmd/[email protected]
2 changes: 1 addition & 1 deletion .github/workflows/linters.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
- name: Setup go
uses: actions/setup-go@v4
with:
go-version: "1.18"
go-version: "1.20"
check-latest: true
- name: Setup protoc
uses: arduino/setup-protoc@v1
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ jobs:
- name: Setup go
uses: actions/setup-go@v4
with:
go-version: "1.18"
go-version: "1.20"
check-latest: true
- name: Setup protoc
uses: arduino/setup-protoc@v1
Expand Down
23 changes: 19 additions & 4 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@
"mode": "debug",
"program": "${workspaceFolder}/main.go",
"args": ["serve", "api", "-g"],
"env": {
"DEBUG": "true"
},
"showLog": true
},
{
Expand All @@ -16,7 +19,10 @@
"request": "launch",
"mode": "debug",
"program": "${workspaceFolder}/main.go",
"args": ["serve", "jwtks"]
"args": ["serve", "jwtks"],
"env": {
"DEBUG": "true"
}
},
{
"name": "Launch Interface (with chrome debug)",
Expand All @@ -36,23 +42,32 @@
"request": "launch",
"mode": "debug",
"program": "${workspaceFolder}/main.go",
"args": ["jobs", "webhooks"]
"args": ["jobs", "webhooks"],
"env": {
"DEBUG": "true"
}
},
{
"name": "Launch crawler (campus)",
"type": "go",
"request": "launch",
"mode": "debug",
"program": "${workspaceFolder}/main.go",
"args": ["jobs", "crawler", "campus"]
"args": ["jobs", "crawler", "campus"],
"env": {
"DEBUG": "true"
}
},
{
"name": "Launch crawler (locations)",
"type": "go",
"request": "launch",
"mode": "debug",
"program": "${workspaceFolder}/main.go",
"args": ["jobs", "crawler", "locations"]
"args": ["jobs", "crawler", "locations"],
"env": {
"DEBUG": "true"
}
}
]
}
17 changes: 17 additions & 0 deletions .vscode/tasks.json
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,23 @@
"panel": "dedicated"
}
},
{
"label": "Run dragonfly cli (redis-cli)",
"detail": "Execute redis-cli on container",
"type": "process",
"isBackground": true,
"command": "redis-cli",
"icon": {
"id": "debug-start",
"color": "terminal.ansiRed"
},
"problemMatcher": [],
"args": ["-h", "dragonfly", "-p", "6379"],
"presentation": {
"focus": true,
"panel": "dedicated"
}
},
{
"label": "Populate DB with campus",
"detail": "Execute the campus crawler to populate your development database with campus (need 42 credentials)",
Expand Down
2 changes: 1 addition & 1 deletion build/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# GOLANG BUILD - BUILD
FROM golang:1.18 AS go-build
FROM golang:1.20 AS go-build

WORKDIR /build
COPY . /build
Expand Down
35 changes: 18 additions & 17 deletions cmd/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ package cmd

import (
"bytes"
"context"
"fmt"
"io/ioutil"
"io"
"net/http"
"os"
"strings"
Expand All @@ -20,11 +21,13 @@ import (
"github.com/rs/cors"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/vektah/gqlparser/v2/gqlerror"
"go.opentelemetry.io/otel"

"atomys.codes/stud42/internal/api"
modelsutils "atomys.codes/stud42/internal/models"
"atomys.codes/stud42/internal/pkg/searchengine"
"atomys.codes/stud42/pkg/cache"
"atomys.codes/stud42/pkg/otelgql"
)

Expand All @@ -39,27 +42,25 @@ var apiCmd = &cobra.Command{
Short: "Serve the API in production",

PreRun: func(cmd *cobra.Command, args []string) {
if err := modelsutils.Connect(); err != nil {
log.Fatal().Err(err).Msg("failed to connect to database")
}

if err := modelsutils.Migrate(); err != nil {
log.Fatal().Err(err).Msg("failed to migrate database")
}

searchengine.Initizialize()
},

Run: func(cmd *cobra.Command, args []string) {
tracer := otel.GetTracerProvider().Tracer("graphql-api")
srv := handler.NewDefaultServer(api.NewSchema(modelsutils.Client(), tracer))
// srv.SetRecoverFunc(func(ctx context.Context, err interface{}) error {
// // notify bug tracker...
// log.Error().Err(err.(error)).Msg("unhandled error")
// return gqlerror.Errorf("Internal server error!")
// })
cacheClient, _ := cmd.Context().Value(keyValueCtxKey{}).(*cache.Client)
gqlCacheClient, err := cacheClient.NewGQLCache(30 * time.Minute)
if err != nil {
log.Fatal().Err(err).Msg("failed to init gql cache")
}

srv := handler.NewDefaultServer(api.NewSchema(modelsutils.Client(), cacheClient, tracer))
srv.SetRecoverFunc(func(ctx context.Context, err interface{}) error {
// notify bug tracker...
log.Error().Err(err.(error)).Msg("unhandled api error")
return gqlerror.Errorf("Internal server error!")
})
srv.Use(entgql.Transactioner{TxOpener: modelsutils.Client()})
srv.Use(extension.AutomaticPersistedQuery{Cache: gqlCacheClient})
srv.Use(extension.FixedComplexityLimit(64))
srv.Use(otelgql.Middleware(tracer))

Expand Down Expand Up @@ -96,7 +97,7 @@ var apiCmd = &cobra.Command{
const _50KB = (1 << 10) * 50

limitedBody := http.MaxBytesReader(w, r.Body, _50KB)
bodyBytes, err := ioutil.ReadAll(limitedBody)
bodyBytes, err := io.ReadAll(limitedBody)
limitedBody.Close()

// if r.Body reach the max size limit, the request will be canceled
Expand All @@ -106,7 +107,7 @@ var apiCmd = &cobra.Command{
return
}

r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
h.ServeHTTP(w, r)
}
return http.HandlerFunc(fn)
Expand Down
9 changes: 0 additions & 9 deletions cmd/campus.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,6 @@ import (
var campusCmd = &cobra.Command{
Use: "campus",
Short: "Crawl all campus of 42 network and update the database",
PreRun: func(cmd *cobra.Command, args []string) {
if err := modelsutils.Connect(); err != nil {
log.Fatal().Err(err).Msg("failed to connect to database")
}

if err := modelsutils.Migrate(); err != nil {
log.Fatal().Err(err).Msg("failed to migrate database")
}
},
Run: func(cmd *cobra.Command, args []string) {
log.Info().Msg("Start the crawling of all campus of 42 network")
campuses, err := duoapi.CampusAll(cmd.Context())
Expand Down
11 changes: 0 additions & 11 deletions cmd/locations.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ import (
"atomys.codes/stud42/internal/models/generated/campus"
"atomys.codes/stud42/internal/models/generated/location"
"atomys.codes/stud42/internal/models/generated/user"
"atomys.codes/stud42/internal/pkg/searchengine"
"atomys.codes/stud42/pkg/duoapi"
)

Expand All @@ -25,16 +24,6 @@ var locationsCmd = &cobra.Command{
Short: "Crawl all active locations of specific campus and update the database",
Long: `Crawl all active locations of specific campus and update the database.
For any closed locations, the location will be marked as inactive in the database.`,
PreRun: func(cmd *cobra.Command, args []string) {
if err := modelsutils.Connect(); err != nil {
log.Fatal().Err(err).Msg("failed to connect to database")
}

if err := modelsutils.Migrate(); err != nil {
log.Fatal().Err(err).Msg("failed to migrate database")
}
searchengine.Initizialize()
},
Run: func(cmd *cobra.Command, args []string) {
var campusID = cmd.Flag("campus_id").Value.String()
db := modelsutils.Client()
Expand Down
10 changes: 0 additions & 10 deletions cmd/reindexusers.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,6 @@ This operation is useful when the meilisearch index is corrupted or when the
meilisearch index is not up to date. This operation will take a long time to
complete. This operation will drop the meilisearch index and recreate it with
all the users.`,
PreRun: func(cmd *cobra.Command, args []string) {
if err := modelsutils.Connect(); err != nil {
log.Fatal().Err(err).Msg("failed to connect to database")
}

if err := modelsutils.Migrate(); err != nil {
log.Fatal().Err(err).Msg("failed to migrate database")
}
searchengine.Initizialize()
},
Run: func(cmd *cobra.Command, args []string) {
log.Info().Msg("Prepare the re-indexation of the users")

Expand Down
26 changes: 26 additions & 0 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,43 @@ import (
"context"
"strings"

modelsutils "atomys.codes/stud42/internal/models"
"atomys.codes/stud42/pkg/cache"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)

var cfgFile string

type keyValueCtxKey struct{}

// rootCmd represents the base command when called without any subcommands
var rootCmd = &cobra.Command{
Use: "api",
Short: "stud42 API",
PersistentPreRun: func(cmd *cobra.Command, args []string) {
var cacheClient *cache.Client
var err error

keyValueStoreUrl := viper.GetString("keyvalue-store-url")
if keyValueStoreUrl != "" {
cacheClient, err = cache.NewClient(viper.GetString("keyvalue-store-url"))
if err != nil {
log.Fatal().Err(err).Msg("failed to create cache")
}

cmd.SetContext(context.WithValue(cmd.Context(), keyValueCtxKey{}, cacheClient))
}

if modelsutils.Connect(cacheClient) != nil {
log.Fatal().Msg("Failed to connect to database")
}

if err := modelsutils.Migrate(); err != nil {
log.Fatal().Err(err).Msg("failed to migrate database")
}
},
}

// Execute adds all child commands to the root command and sets flags appropriately.
Expand Down
8 changes: 8 additions & 0 deletions deploy/stacks/apps/s42/api.tf
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,9 @@ module "api" {
DATABASE_HOST = "postgres.${var.namespace}.svc.cluster.local"
DATABASE_NAME = "s42"
DATABASE_URL = "postgresql://postgres:$(DATABASE_PASSWORD)@$(DATABASE_HOST):5432/$(DATABASE_NAME)?sslmode=disable"
KEYVALUE_STORE_HOST = "dragonfly.${var.namespace}.svc.cluster.local"
KEYVALUE_STORE_PORT = "6379"
KEYVALUE_STORE_URL = "redis://:$(DFLY_PASSWORD)@$(KEYVALUE_STORE_HOST):$(KEYVALUE_STORE_PORT)"
SEARCHENGINE_MEILISEARCH_HOST = "http://meilisearch.${var.namespace}.svc.cluster.local:7700"
}

Expand All @@ -60,6 +63,11 @@ module "api" {
name = "postgres-credentials"
}

DFLY_PASSWORD = {
key = "DFLY_PASSWORD"
name = "dragonfly-credentials"
}

GITHUB_TOKEN = {
key = "GITHUB_TOKEN"
name = "github-token"
Expand Down
Loading

0 comments on commit 4442a58

Please sign in to comment.