Skip to content
This repository has been archived by the owner on Nov 20, 2024. It is now read-only.

Commit

Permalink
COSI-15: Add bucket creation E2E tests and cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
anurag4DSB committed Nov 8, 2024
1 parent bdcec3e commit a10bfd4
Show file tree
Hide file tree
Showing 6 changed files with 174 additions and 6 deletions.
54 changes: 54 additions & 0 deletions .github/scripts/cleanup_cosi_resources.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#!/bin/bash
set -e

LOG_FILE=".github/e2e_tests/artifacts/logs/kind_cluster_logs/cosi_deployment/cleanup_debug.log"
mkdir -p "$(dirname "$LOG_FILE")" # Ensure the log directory exists

error_handler() {
echo "An error occurred during the COSI cleanup. Check the log file for details." | tee -a "$LOG_FILE"
echo "Failed command: $BASH_COMMAND" | tee -a "$LOG_FILE"
exit 1
}

trap 'error_handler' ERR

log_and_run() {
echo "Running: $*" | tee -a "$LOG_FILE"
"$@" | tee -a "$LOG_FILE"
}

log_and_run echo "Removing COSI driver manifests and namespace..."
log_and_run kubectl delete -k . || echo "COSI driver manifests not found." | tee -a "$LOG_FILE"
log_and_run kubectl delete namespace scality-object-storage || echo "Namespace scality-object-storage not found." | tee -a "$LOG_FILE"

log_and_run echo "Verifying namespace deletion..."
if kubectl get namespace scality-object-storage &>/dev/null; then
echo "Warning: Namespace scality-object-storage was not deleted." | tee -a "$LOG_FILE"
exit 1
fi

log_and_run echo "Removing Finalizers from Bucket Claim and Bucket"
log_and_run kubectl patch bucketclaim my-bucket-claim -p '{"metadata":{"finalizers":[]}}' --type=merge || echo "Bucket Claim finalizers not found." | tee -a "$LOG_FILE"

BUCKET_NAMES=$(kubectl get bucket -o jsonpath='{.items[*].metadata.name}')

for BUCKET_NAME in $BUCKET_NAMES; do
log_and_run echo "Removing finalizers from bucket: $BUCKET_NAME"
log_and_run kubectl patch bucket "$BUCKET_NAME" -p '{"metadata":{"finalizers":[]}}' --type=merge || echo "Finalizers not found for bucket: $BUCKET_NAME" | tee -a "$LOG_FILE"
done

log_and_run echo "Deleting Bucket Claim and Bucket Class..."
log_and_run kubectl delete -f cosi-examples/bucketclass.yaml || echo "Bucket Class not found." | tee -a "$LOG_FILE"
log_and_run kubectl delete -f cosi-examples/bucketclaim.yaml || echo "Bucket Claim not found." | tee -a "$LOG_FILE"

log_and_run echo "Deleting COSI CRDs..."
log_and_run kubectl delete -k github.com/kubernetes-sigs/container-object-storage-interface-api || echo "COSI API CRDs not found." | tee -a "$LOG_FILE"
log_and_run kubectl delete -k github.com/kubernetes-sigs/container-object-storage-interface-controller || echo "COSI Controller CRDs not found." | tee -a "$LOG_FILE"

log_and_run echo "Verifying COSI CRDs deletion..."
if kubectl get crd | grep 'container-object-storage-interface' &>/dev/null; then
echo "Warning: Some COSI CRDs were not deleted." | tee -a "$LOG_FILE"
exit 1
fi

log_and_run echo "COSI cleanup completed successfully."
102 changes: 102 additions & 0 deletions .github/scripts/e2e_test_bucket_creation.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
#!/bin/bash
set -e

# Define log file for debugging
LOG_FILE=".github/e2e_tests/artifacts/logs/e2e_tests/bucket_creation_test.log"
mkdir -p "$(dirname "$LOG_FILE")" # Ensure the log directory exists

# Error handling function
error_handler() {
echo "An error occurred during the COSI setup. Check the log file for details." | tee -a "$LOG_FILE"
echo "Failed command: $BASH_COMMAND" | tee -a "$LOG_FILE"
exit 1
}

# Trap errors and call the error handler
trap 'error_handler' ERR

# Log command execution to the log file for debugging
log_and_run() {
echo "Running: $*" | tee -a "$LOG_FILE"
"$@" | tee -a "$LOG_FILE"
}

# Step 1: Create Account in Vault
log_and_run echo "Creating account in Vault container..."
CONTAINER_ID=$(docker ps -qf "name=s3_and_iam_deployment-iam-1")
log_and_run docker exec "$CONTAINER_ID" sh -c "ADMIN_ACCESS_KEY_ID=D4IT2AWSB588GO5J9T00 ADMIN_SECRET_ACCESS_KEY=UEEu8tYlsOGGrgf4DAiSZD6apVNPUWqRiPG0nTB6 ./node_modules/vaultclient/bin/vaultclient create-account --name cosi-account --email [email protected]"
log_and_run docker exec "$CONTAINER_ID" sh -c "ADMIN_ACCESS_KEY_ID=D4IT2AWSB588GO5J9T00 ADMIN_SECRET_ACCESS_KEY=UEEu8tYlsOGGrgf4DAiSZD6apVNPUWqRiPG0nTB6 ./node_modules/vaultclient/bin/vaultclient generate-account-access-key --name=cosi-account --accesskey=PBUOB68AVF39EVVAFNFL --secretkey=P+PK+uMB9spUc21huaQoOexqdJoV00tSnl+pc7t7"

# Retrieve the Host IP Address
HOST_IP=$(hostname -I | awk '{print $1}')
log_and_run echo "Using Host IP: $HOST_IP"

# Step 2: Configure AWS CLI in Home Directory
log_and_run echo "Configuring AWS CLI in home directory..."
log_and_run mkdir -p ~/.aws # Ensure the ~/.aws directory exists

# Create the AWS credentials file
cat <<EOF | tee -a "$LOG_FILE" > ~/.aws/credentials
[default]
aws_access_key_id = PBUOB68AVF39EVVAFNFL
aws_secret_access_key = P+PK+uMB9spUc21huaQoOexqdJoV00tSnl+pc7t7
EOF

# Create the AWS config file
cat <<EOF | tee -a "$LOG_FILE" > ~/.aws/config
[default]
region = us-east-1
output = json
EOF

# Step 3: Apply S3 Secret for COSI with Host IP
log_and_run echo "Applying S3 Secret for COSI with updated endpoint..."
cat <<EOF | kubectl apply -f - | tee -a "$LOG_FILE"
apiVersion: v1
kind: Secret
metadata:
name: s3-secret-for-cosi
namespace: default
type: Opaque
stringData:
COSI_S3_ACCESS_KEY_ID: PBUOB68AVF39EVVAFNFL
COSI_S3_SECRET_ACCESS_KEY: P+PK+uMB9spUc21huaQoOexqdJoV00tSnl+pc7t7
COSI_S3_ENDPOINT: http://$HOST_IP:8000
COSI_S3_REGION: us-west-1
EOF

# Step 4: Apply Bucket Class
log_and_run echo "Applying Bucket Class..."
log_and_run kubectl apply -f cosi-examples/bucketclass.yaml

# Step 5: Apply Bucket Claim
log_and_run echo "Applying Bucket Claim..."
log_and_run kubectl apply -f cosi-examples/bucketclaim.yaml

# Step 6: Verify Bucket Creation with Retry
log_and_run echo "Listing all S3 buckets before verification..."
log_and_run aws s3 ls --endpoint-url "http://localhost:8000"
sleep 5

log_and_run echo "Verifying bucket creation..."
BUCKET_NAME_PREFIX="my-bucket-class"

ATTEMPTS=12 # Total attempts (2 minutes / 10 seconds per attempt)
DELAY=10 # Delay between attempts in seconds

for ((i=1; i<=$ATTEMPTS; i++)); do
log_and_run aws --endpoint-url "http://localhost:8000" s3 ls
BUCKET_FOUND=$(aws --endpoint-url "http://localhost:8000" s3api list-buckets --query "Buckets[?starts_with(Name, 'my-bucket-class')].Name" --output text)

if [ -n "$BUCKET_FOUND" ]; then
log_and_run echo "Bucket created with prefix '$BUCKET_NAME_PREFIX': $BUCKET_FOUND"
exit 0
else
log_and_run echo "Attempt $i: Bucket with prefix '$BUCKET_NAME_PREFIX' not found. Retrying in $DELAY seconds..."
sleep $DELAY
fi
done

# If the bucket was not found within the timeout
log_and_run echo "Bucket with prefix '$BUCKET_NAME_PREFIX' was not created within the expected time."
exit 1
12 changes: 12 additions & 0 deletions .github/workflows/ci-e2e-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,12 @@ jobs:
docker save "$CLOUDSERVER_IMAGE" -o /tmp/.docker_cache/cloudserver_image.tar
shell: bash

- name: E2E tests for bucket creation via COSI driver
run: |
pwd
chmod +x .github/scripts/e2e_test_bucket_creation.sh
.github/scripts/e2e_test_bucket_creation.sh
- name: Cleaup IAM and S3 Services
run: docker compose --profile iam_s3 down
working-directory: .github/s3_and_iam_deployment
Expand All @@ -104,6 +110,12 @@ jobs:
.github/scripts/capture_k8s_logs.sh
if: always()

- name: Cleanup COSI CRDs, Controller, and Driver
run: |
chmod +x .github/scripts/cleanup_cosi_resources.sh
.github/scripts/cleanup_cosi_resources.sh
if: always()

- name: Upload logs and data to Scality artifacts
uses: scality/action-artifacts@v4
with:
Expand Down
4 changes: 2 additions & 2 deletions cosi-examples/bucketclaim.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
kind: BucketClaim
apiVersion: objectstorage.k8s.io/v1alpha1
metadata:
name: bucket-claim-1
name: my-bucket-claim
spec:
bucketClassName: bucket-1-name-prefix
bucketClassName: my-bucket-class
protocols:
- s3
4 changes: 2 additions & 2 deletions cosi-examples/bucketclass.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
kind: BucketClass
apiVersion: objectstorage.k8s.io/v1alpha1
metadata:
name: bucket-1-name-prefix # name of the bucket
name: my-bucket-class # bucket prefix, followed by UUID for every bucket claim
driverName: cosi.scality.com
deletionPolicy: Delete
deletionPolicy: Retain
parameters:
COSI_OBJECT_STORAGE_PROVIDER_SECRET_NAME: s3-secret-for-cosi
COSI_OBJECT_STORAGE_PROVIDER_SECRET_NAMESPACE: default
4 changes: 2 additions & 2 deletions cosi-examples/s3-secret-for-cosi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ metadata:
namespace: default
type: Opaque
stringData:
COSI_S3_ACCESS_KEY_ID: accessKey1 # Plain text access key
COSI_S3_SECRET_ACCESS_KEY: verySecretKey1 # Plain text secret key
COSI_S3_ACCESS_KEY_ID: PBUOB68AVF39EVVAFNFL # Plain text access key, generated in the CI
COSI_S3_SECRET_ACCESS_KEY: P+PK+uMB9spUc21huaQoOexqdJoV00tSnl+pc7t7 # Plain text secret key
COSI_S3_ENDPOINT: http://localhost:8000 # Plain text endpoint
COSI_S3_REGION: us-west-1 # Plain text region

0 comments on commit a10bfd4

Please sign in to comment.