From 26860fabcc47c51b35d86bd743dbf7848711905e Mon Sep 17 00:00:00 2001 From: Mario-SO Date: Thu, 19 Dec 2024 09:41:31 +0100 Subject: [PATCH] read from secrets staging --- .github/workflows/build_server.yml | 183 ++++++++--------------------- docker-compose.staging.yml | 21 ++-- 2 files changed, 63 insertions(+), 141 deletions(-) diff --git a/.github/workflows/build_server.yml b/.github/workflows/build_server.yml index d59024405..96fb89de7 100644 --- a/.github/workflows/build_server.yml +++ b/.github/workflows/build_server.yml @@ -47,6 +47,7 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 + # Add caching for Docker layers - name: Cache Docker layers uses: actions/cache@v3 with: @@ -75,11 +76,11 @@ jobs: cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max + # Temp fix for cache handling - name: Move cache run: | rm -rf /tmp/.buildx-cache mv /tmp/.buildx-cache-new /tmp/.buildx-cache - deploy: needs: [prepare-env, build-and-push-images] runs-on: ubuntu-latest @@ -87,13 +88,14 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 + # Setup SSH key - name: Setup SSH key run: | mkdir -p ~/.ssh echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa chmod 600 ~/.ssh/id_rsa ssh-keyscan -H 145.223.118.217 >> ~/.ssh/known_hosts - + # Setup Docker context from base64-encoded secret - name: Setup Docker context run: | mkdir -p ~/.docker/contexts @@ -106,145 +108,60 @@ jobs: docker context import streameth-staging ~/.docker/contexts/streameth-staging.tar.gz docker context use streameth-staging fi - - - name: Install Python dependencies - run: pip install PyYAML - - - name: Process and deploy compose file + # Copy compose files to server + - name: Copy compose files + run: | + scp docker-compose.*.yml streameth@145.223.118.217:/home/streameth/streameth/ + # Log in to registry on deployment server + - name: Log in to registry on deployment server + run: | + echo "${{ secrets.TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin + # Update services based on environment + - name: Update services run: | STACK_NAME="${{ needs.prepare-env.outputs.stack_name }}" - COMPOSE_FILE="${{ needs.prepare-env.outputs.compose_file }}" - ENV_TAG="${{ needs.prepare-env.outputs.env_tag }}" SHA="${{ github.sha }}" - echo "🔍 Debug: Using stack name: $STACK_NAME" - echo "🔍 Debug: Using compose file: $COMPOSE_FILE" - echo "🔍 Debug: Environment tag: $ENV_TAG" - echo "🔍 Debug: Commit SHA: $SHA" - - echo "📝 Creating .env file..." - cat << EOF > .env - AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} - EOF - - - # Create Python script to process the YAML - echo "📝 Creating Python script..." - cat << EOF > process_compose.py - import sys - import yaml - import os - - data = yaml.safe_load(sys.stdin) - # Remove name property if it exists - if 'name' in data: - del data['name'] - - # Convert depends_on to lists while preserving network configurations - for service in data["services"].values(): - if "depends_on" in service: - service["depends_on"] = list(service["depends_on"].keys()) - # Ensure network configurations are preserved - if "networks" not in service: - service["networks"] = ["default"] - - # Remove default network name to prevent conflicts - if "networks" in data and "default" in data["networks"]: - if "name" in data["networks"]["default"]: - del data["networks"]["default"]["name"] - - yaml.dump(data, sys.stdout, default_flow_style=False) - EOF - - # Process the compose file - echo "🔄 Processing compose file..." - if ! docker compose --env-file .env -f $COMPOSE_FILE config | python3 process_compose.py > processed-compose.yml; then - echo "❌ Failed to process compose file" - exit 1 - fi - - echo "📊 Processed file details:" - ls -l processed-compose.yml - echo "📄 First few lines of processed file:" - head -n 10 processed-compose.yml - - # Verify images on VPS and deploy - echo "🚀 Deploying to VPS..." - cat processed-compose.yml | ssh streameth@145.223.118.217 " - set -e - - # Setup directory if it doesn't exist - if [ ! -d /home/streameth/streameth ]; then - echo '📁 Creating directory structure...' - mkdir -p /home/streameth/streameth - fi - - echo '📝 Writing compose file...' - cat > /home/streameth/streameth/processed-compose.yml - - echo '🔑 Logging into GitHub Container Registry...' - echo '${{ secrets.TOKEN }}' | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - # Verify images - echo '🔍 Verifying images...' - for service in server stage-transcriptions session-transcriptions clips reel-creator; do - echo \"Pulling \${service}...\" - docker pull ghcr.io/streamethorg/streameth-platform/\${service}:${ENV_TAG}-${SHA} - done - - # Check if stack exists - if docker stack ls | grep -q \"${STACK_NAME}\"; then - echo '🔄 Updating existing stack...' + # Function to update a service and capture its exit status + update_service() { + local service=$1 + local temp_file=$(mktemp) + if docker service update \ + --with-registry-auth \ + --image ghcr.io/streamethorg/streameth-platform/$service:${{ needs.prepare-env.outputs.env_tag }}-$SHA \ + ${STACK_NAME}_${service} > $temp_file 2>&1; then + echo "✅ Service ${STACK_NAME}_${service} updated successfully" + rm $temp_file + return 0 else - echo '🆕 Creating new stack...' + echo "❌ Failed to update ${STACK_NAME}_${service}" + cat $temp_file + rm $temp_file + return 1 fi - - echo '🚀 Deploying stack...' - docker stack deploy -c /home/streameth/streameth/processed-compose.yml --with-registry-auth $STACK_NAME - " + } - echo "âŗ Waiting for stack to stabilize..." - sleep 15 + # Start all updates in parallel and capture PIDs + pids=() + for service in server stage-transcriptions session-transcriptions clips reel-creator; do + update_service $service & + pids+=($!) + done - # Update services - echo "🔄 Updating services..." - ssh streameth@145.223.118.217 " - for service in server stage-transcriptions session-transcriptions clips reel-creator; do - if docker service ls | grep -q \"${STACK_NAME}_\${service}\"; then - echo \"Updating \${service}...\" - docker service update \ - --with-registry-auth \ - --image ghcr.io/streamethorg/streameth-platform/\${service}:${ENV_TAG}-${SHA} \ - ${STACK_NAME}_\${service} - sleep 15 - else - echo \"⚠ī¸ Service \${service} not found in stack\" - fi - done - " + # Wait for all updates and check for failures + failed=0 + for pid in ${pids[@]}; do + if ! wait $pid; then + failed=1 + fi + done - echo "📋 Deployment process completed" - + # Exit with failure if any update failed + exit $failed + # Deploy router if it doesn't exist (only needs to be done once) - name: Deploy router if needed - if: github.ref == 'refs/heads/docker-prod' - run: | - ssh streameth@145.223.118.217 " - if ! docker stack ls | grep -q \"router\"; then - echo \"🔄 Setting up router stack...\" - if [ -f /home/streameth/streameth/docker-compose.router.yml ]; then - docker stack deploy -c /home/streameth/streameth/docker-compose.router.yml router - echo \"✅ Router stack deployed\" - else - echo \"⚠ī¸ Warning: Router compose file not found at /home/streameth/streameth/docker-compose.router.yml\" - exit 1 - fi - else - echo \"ℹī¸ Router stack already exists\" - fi - " - - - name: Cleanup processed compose file - if: always() + if: github.ref == 'refs/heads/docker-prod' # Only check/deploy router on prod branch pushes run: | - ssh streameth@145.223.118.217 "rm -f /home/streameth/streameth/processed-compose.yml" \ No newline at end of file + if ! docker stack ls | grep -q "router"; then + docker stack deploy -c /home/streameth/streameth/docker-compose.router.yml router + fi \ No newline at end of file diff --git a/docker-compose.staging.yml b/docker-compose.staging.yml index 2bbd666b5..40f07f13e 100644 --- a/docker-compose.staging.yml +++ b/docker-compose.staging.yml @@ -53,8 +53,8 @@ x-common-environment: &common-environment REDIS_PORT: 6379 OPENAI_API_KEY_FILE: /run/secrets/openai-api-key # AWS Configuration read from GitHub Actions secrets - AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} + AWS_ACCESS_KEY_ID: /run/secrets/aws-access-key + AWS_SECRET_ACCESS_KEY: /run/secrets/aws-secret-key x-common-deploy: &common-deploy restart_policy: @@ -87,6 +87,8 @@ x-common-secrets: &common-secrets - google-service-account-email - redis-password - openai-api-key + - aws-access-key + - aws-secret-key services: server: @@ -160,13 +162,13 @@ services: reel-creator: image: ghcr.io/streamethorg/streameth-platform/reel-creator:staging environment: - NODE_ENV: staging + NODE_ENV: development SERVER_WEBHOOK_URL: https://staging.api.streameth.org/webhook/remotion SERVER_WEBHOOK_SECRET_FILE: /run/secrets/remotion-webhook-secret - AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} + AWS_ACCESS_KEY_ID: /run/secrets/aws-access-key + AWS_SECRET_ACCESS_KEY: /run/secrets/aws-secret-key BUCKET_NAME: streameth-develop - BUCKET_URL: https://streameth-staging.ams3.digitaloceanspaces.com + BUCKET_URL: https://streameth-develop.ams3.cdn.digitaloceanspaces.com SPACES_KEY_FILE: /run/secrets/spaces-key SPACES_SECRET_FILE: /run/secrets/spaces-secret SITE_NAME: rendering-engine @@ -239,7 +241,6 @@ services: - redis environment: <<: *common-environment - REMOTION_WEBHOOK_URL: https://staging.api.streameth.org/remotion-webhook deploy: <<: *common-deploy mode: replicated @@ -309,4 +310,8 @@ secrets: redis-password: external: true openai-api-key: - external: true \ No newline at end of file + external: true + aws-access-key: + external: true + aws-secret-key: + external: true