Skip to content

Commit

Permalink
Fix robot hackathon (#8943)
Browse files Browse the repository at this point in the history
* Added missing type cast

* Work-a-round for cloud-native-robotz-hackathon/infrastructure#86

* Remove finalizer from backup object

* Remove empty lines
  • Loading branch information
rbo authored Dec 24, 2024
1 parent acdc1db commit 3e2b874
Showing 1 changed file with 36 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,8 @@
apiVersion: v1
kind: Secret
metadata:
finalizers:
- github.com/cloud-native-robotz-hackathon_infrastructure_issues_86
annotations:
opendatahub.io/connection-type: s3
openshift.io/display-name: "{{ ocp4_workload_cloud_native_robot_workbench_bucket_name }}"
Expand All @@ -80,6 +82,22 @@
AWS_S3_ENDPOINT: "https://s3-openshift-storage.{{ openshift_cluster_ingress_domain }}"
AWS_SECRET_ACCESS_KEY: "{{ obc_secret.resources[0].data.AWS_SECRET_ACCESS_KEY | b64decode }}"
- name: "Create OpenShift AI DataConnection Backup - {{ ocp4_workload_cloud_native_robot_user }}"
kubernetes.core.k8s:
state: present
definition: |
apiVersion: v1
kind: Secret
metadata:
name: {{ ocp4_workload_cloud_native_robot_workbench_bucket_name }}-ai-connection-backup
namespace: "{{ ocp4_workload_cloud_native_robot_user }}-ai"
type: Opaque
stringData:
AWS_ACCESS_KEY_ID: "{{ obc_secret.resources[0].data.AWS_ACCESS_KEY_ID | b64decode }}"
AWS_DEFAULT_REGION: "us-east-1"
AWS_S3_BUCKET: "{{ obc_cm.resources[0].data.BUCKET_NAME }}"
AWS_S3_ENDPOINT: "https://s3-openshift-storage.{{ openshift_cluster_ingress_domain }}"
AWS_SECRET_ACCESS_KEY: "{{ obc_secret.resources[0].data.AWS_SECRET_ACCESS_KEY | b64decode }}"
# ---------------------------------------------------------
# Create Object Bucket & OpenShift AI Data Science Pipeline Server
Expand Down Expand Up @@ -133,6 +151,8 @@
apiVersion: v1
kind: Secret
metadata:
finalizers:
- github.com/cloud-native-robotz-hackathon_infrastructure_issues_86
annotations:
opendatahub.io/connection-type: s3
openshift.io/display-name: "data-science-pipeline-bucket"
Expand All @@ -149,6 +169,22 @@
AWS_S3_ENDPOINT: "https://s3-openshift-storage.{{ openshift_cluster_ingress_domain }}"
AWS_SECRET_ACCESS_KEY: "{{ obc_secret.resources[0].data.AWS_SECRET_ACCESS_KEY | b64decode }}"
- name: "Create data-science-pipeline-bucket-ai-connection Backup - {{ ocp4_workload_cloud_native_robot_user }}"
kubernetes.core.k8s:
state: present
definition: |
apiVersion: v1
kind: Secret
metadata:
name: data-science-pipeline-bucket-ai-connection-backup
namespace: "{{ ocp4_workload_cloud_native_robot_user }}-ai"
type: Opaque
stringData:
AWS_ACCESS_KEY_ID: "{{ obc_secret.resources[0].data.AWS_ACCESS_KEY_ID | b64decode }}"
AWS_DEFAULT_REGION: "us-east-1"
AWS_S3_BUCKET: "{{ obc_cm.resources[0].data.BUCKET_NAME }}"
AWS_S3_ENDPOINT: "https://s3-openshift-storage.{{ openshift_cluster_ingress_domain }}"
AWS_SECRET_ACCESS_KEY: "{{ obc_secret.resources[0].data.AWS_SECRET_ACCESS_KEY | b64decode }}"
- name: Start Data Science Pipeline Server
kubernetes.core.k8s:
Expand Down

0 comments on commit 3e2b874

Please sign in to comment.