Skip to content

Commit

Permalink
Merge pull request #731 from IBM/cpd500
Browse files Browse the repository at this point in the history
Add support for Cloud Pak for Data 5.0+
  • Loading branch information
fketelaars authored Jun 18, 2024
2 parents 369c142 + 903fc9f commit e1530b3
Show file tree
Hide file tree
Showing 53 changed files with 933 additions and 222 deletions.
12 changes: 11 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
# Container image including olm-utils
ARG CPD_OLM_UTILS_V2_IMAGE
ARG CPD_OLM_UTILS_V3_IMAGE

FROM ${CPD_OLM_UTILS_V2_IMAGE}
FROM ${CPD_OLM_UTILS_V2_IMAGE} as olm-utils-v2
RUN cd /opt/ansible && \
tar czf /tmp/opt-ansible-v2.tar.gz *

FROM ${CPD_OLM_UTILS_V3_IMAGE} as olmn-utils-v3

LABEL authors="Arthur Laimbock, \
Markus Wiegleb, \
Expand Down Expand Up @@ -35,6 +40,11 @@ RUN mkdir -p /cloud-pak-deployer && \
COPY . /cloud-pak-deployer/
COPY ./deployer-web/nginx.conf /etc/nginx/

COPY --from=olm-utils-v2 /tmp/opt-ansible-v2.tar.gz /olm-utils/

RUN cd /opt/ansible && \
tar czf /olm-utils/opt-ansible-v3.tar.gz *

# BUG with building wheel
#RUN pip3 install -r /cloud-pak-deployer/deployer-web/requirements.txt > /tmp/deployer-web-pip-install.out 2>&1
RUN pip3 install "cython<3.0.0" wheel && pip3 install PyYAML==6.0 --no-build-isolation && pip3 install -r /cloud-pak-deployer/deployer-web/requirements.txt > /tmp/deployer-web-pip-install.out 2>&1
Expand Down
9 changes: 8 additions & 1 deletion Dockerfile.ppc64le
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
# Container image including olm-utils
ARG CPD_OLM_UTILS_V2_IMAGE
ARG CPD_OLM_UTILS_V3_IMAGE

FROM ${CPD_OLM_UTILS_V2_IMAGE}
RUN cd /opt/ansible && \
tar czf /tmp/opt-ansible-v2.tar.gz *

FROM ${CPD_OLM_UTILS_V3_IMAGE}

LABEL authors="Arthur Laimbock, \
Markus Wiegleb, \
Expand Down Expand Up @@ -33,8 +38,10 @@ RUN mkdir -p /cloud-pak-deployer && \
COPY . /cloud-pak-deployer/
COPY ./deployer-web/nginx.conf /etc/nginx/

COPY --from=olm-utils-v2 /tmp/opt-ansible-v2.tar.gz /olm-utils/

RUN cd /opt/ansible && \
tar czf /olm-utils/opt-ansible-v2.tar.gz *
tar czf /olm-utils/opt-ansible-v3.tar.gz *

# BUG with building wheel
#RUN pip3 install -r /cloud-pak-deployer/deployer-web/requirements.txt > /tmp/deployer-web-pip-install.out 2>&1
Expand Down
5 changes: 5 additions & 0 deletions automation-generators/generic/cp4d/preprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,7 @@ def preprocessor(attributes=None, fullConfig=None, moduleVariables=None):
g = GeneratorPreProcessor(attributes,fullConfig,moduleVariables)

g('project').isRequired()
g('operators_project').isOptional()
g('openshift_cluster_name').expandWith('openshift[*]',remoteIdentifier='name')
g('cp4d_version').isRequired()
g('cartridges').isRequired()
Expand Down Expand Up @@ -273,6 +274,10 @@ def preprocessor(attributes=None, fullConfig=None, moduleVariables=None):
if 'olm_utils' in ge and not 'sequential_install' in ge:
g('sequential_install').set(ge['olm_utils'])

# Set operators project to <project>-operators if not explicitly configure
if not 'operators_project' in ge:
g('operators_project').set('{}-operators'.format(ge['project']))

# Check reference
# - Retrieve the openshift element with name=openshift_cluster_name
# - Within the openshift element retrieve, there must be an openshift_storage element with the name cp4d.openshift_storage_name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,4 +94,10 @@
include_role:
name: openshift-gpu
vars:
_p_openshift_cluster: "{{ current_openshift_cluster }}"
_p_openshift_cluster: "{{ current_openshift_cluster }}"

- name: Configure OpenShift AI
include_role:
name: openshift-ai
vars:
_p_openshift_cluster: "{{ current_openshift_cluster }}"
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
---
- name: Generate OpenShift AI operator {{ status_dir }}/openshift/openshift-{{ _p_openshift_cluster.name }}-openshift-ai-operator.yaml
template:
src: openshift-ai-operator.j2
dest: "{{ status_dir }}/openshift/openshift-{{ _p_openshift_cluster.name }}-openshift-ai-operator.yaml"

- name: Apply yaml for OpenShift AI operator
shell: |
oc apply -f {{ status_dir }}/openshift/openshift-{{ _p_openshift_cluster.name }}-openshift-ai-operator.yaml
- name: Wait until OpenShift AI operator CSV has status Succeeded
shell: |
oc get csv -n redhat-ods-operator \
-l operators.coreos.com/rhods-operator.redhat-ods-operator \
--no-headers \
-o custom-columns='name:metadata.name,phase:status.phase' | \
grep -i succeeded | wc -l
register: _openshift_ai_csv_status
retries: 30
delay: 30
until: _openshift_ai_csv_status.stdout == "1"
vars:
ansible_callback_diy_runner_retry_msg: >-
{%- set result = ansible_callback_diy.result.output -%}
{%- set retries_left = result.retries - result.attempts -%}
Retrying: {{ ansible_callback_diy.task.name }} ({{ retries_left }} Retries left) ...
- name: Generate OpenShift AI DataScienceCluster {{ status_dir }}/openshift/openshift-{{ _p_openshift_cluster.name }}-openshift-ai-dsc.yaml
template:
src: datasciencecluster.j2
dest: "{{ status_dir }}/openshift/openshift-{{ _p_openshift_cluster.name }}-openshift-ai-dsc.yaml"

- name: Apply yaml for OpenShift AI DataScienceCluster
shell: |
oc apply -f {{ status_dir }}/openshift/openshift-{{ _p_openshift_cluster.name }}-openshift-ai-dsc.yaml
- name: Wait until OpenShift AI DataScienceCluster default-dsc is ready
shell: |
oc get DataScienceCluster default-dsc \
--no-headers \
-o custom-columns='name:metadata.name,phase:status.phase' | \
grep -i ready | wc -l
register: _openshift_ai_dsc_status
retries: 30
delay: 30
until: _openshift_ai_dsc_status.stdout == "1"
vars:
ansible_callback_diy_runner_retry_msg: >-
{%- set result = ansible_callback_diy.result.output -%}
{%- set retries_left = result.retries - result.attempts -%}
Retrying: {{ ansible_callback_diy.task.name }} ({{ retries_left }} Retries left) ...
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
---
- include_tasks: install-opernshift-ai.yml
when: _p_openshift_cluster.openshift_ai.install | default(False) | bool
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
---
apiVersion: datasciencecluster.opendatahub.io/v1
kind: DataScienceCluster
metadata:
name: default-dsc
labels:
app.kubernetes.io/name: datasciencecluster
app.kubernetes.io/instance: default-dsc
app.kubernetes.io/part-of: rhods-operator
app.kubernetes.io/managed-by: kustomize
app.kubernetes.io/created-by: rhods-operator
spec:
components:
codeflare:
managementState: Managed
kserve:
serving:
ingressGateway:
certificate:
type: SelfSigned
managementState: Managed
name: knative-serving
managementState: Managed
ray:
managementState: Managed
kueue:
managementState: Managed
workbenches:
managementState: Managed
dashboard:
managementState: Managed
modelmeshserving:
managementState: Managed
datasciencepipelines:
managementState: Managed
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
---
apiVersion: v1
kind: Namespace
metadata:
name: redhat-ods-operator
---
apiVersion: v1
kind: Namespace
metadata:
name: redhat-ods-monitoring
---
apiVersion: v1
kind: Namespace
metadata:
name: redhat-ods-applications
---
apiVersion: v1
kind: Namespace
metadata:
name: rhods-notebooks
---
apiVersion: operators.coreos.com/v1
kind: OperatorGroup
metadata:
name: redhat-ods-operator
namespace: redhat-ods-operator
spec:
upgradeStrategy: Default
---
apiVersion: operators.coreos.com/v1alpha1
kind: Subscription
metadata:
labels:
operators.coreos.com/rhods-operator.redhat-ods-operator: ""
name: rhods-operator
namespace: redhat-ods-operator
spec:
channel: {{ _p_openshift_cluster.openshift_ai.channel | default('stable') }}
installPlanApproval: Automatic
name: rhods-operator
source: redhat-operators
sourceNamespace: openshift-marketplace
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
- name: Insert wkc options into {{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-install-options.yml
blockinfile:
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-install-options.yml"
marker: "# {mark} watsonx_ai options #"
block: |2
watsonx_ai:
tuning_disabled: {{ _current_cp4d_cartridge.installation_options.tuning_disabled | default(false) }}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
--release={{ _p_current_cp4d_cluster.cp4d_version }} \
--case_download=false \
--catsrc=false --sub=true \
--cpd_operator_ns={{ _p_current_cp4d_cluster.operators_project | default('cpd-operators') }} \
--cpd_operator_ns={{ _p_current_cp4d_cluster.operators_project }} \
--upgrade={%- if _upgrade_cp4d -%}true{%- else -%}false{%- endif %} \
--components=watsonx_orchestrate
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
---
- name: Create catalog sources from case files in {{ status_dir }}/cp4d/case using OLM utils
debug:
msg: ""

- name: Ensure that OLM utils work directory exists
file:
path: /tmp/work
Expand All @@ -21,7 +17,7 @@
when: (cpd_airgap | default(False) | bool)

# Always generate the preview script to log what will be done
- name: Generate command preview script to create catalog sources
- name: Generate command preview script to create catalog sources for Cloud Pak for Data {{ _p_catsrc_scope }}
set_fact:
_apply_olm_command_script: "{{ lookup('template', 'apply-olm-create-catsrc.j2') }}"
vars:
Expand All @@ -31,67 +27,67 @@
debug:
var: _apply_olm_command_script

- name: Write script to "{{ status_dir }}/cp4d/apply-olm-catsrc.sh"
- name: Write script to "{{ status_dir }}/cp4d/apply-olm-catsrc-{{ _p_catsrc_scope }}.sh"
copy:
content: "{{ _apply_olm_command_script }}"
dest: "{{ status_dir }}/cp4d/apply-olm-catsrc.sh"
dest: "{{ status_dir }}/cp4d/apply-olm-catsrc-{{ _p_catsrc_scope }}.sh"

- name: Generate preview script to create catalog sources, logs are in {{ status_dir }}/log/apply-olm-create-catsrc.log
- name: Generate preview script to create catalog sources, logs are in {{ status_dir }}/log/apply-olm-create-catsrc-{{ _p_catsrc_scope }}.log
shell: |
{{ _apply_olm_command_script }} > {{ status_dir }}/log/apply-olm-create-catsrc.log 2>&1
{{ _apply_olm_command_script }} > {{ status_dir }}/log/apply-olm-create-catsrc-{{ _p_catsrc_scope }}.log 2>&1
- name: Copy script to {{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc.sh
- name: Copy script to {{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc-{{ _p_catsrc_scope }}.sh
copy:
src: "/tmp/work/preview.sh"
dest: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc.sh"
dest: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc-{{ _p_catsrc_scope }}.sh"
remote_src: True
mode: u+rwx

# TODO: Remove step once problem in preview.sh is fixed
- name: Update script to fix invalid oc apply -f commands
replace:
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc.sh"
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc-{{ _p_catsrc_scope }}.sh"
regexp: '^(.*)oc apply -f << EOF(.*)'
replace: 'oc apply -f - << EOF'

# TODO: Remove step once problem in olm-utils is fixed
- name: Update script to fix invalid cpd-platform catalog source image
replace:
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc.sh"
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc-{{ _p_catsrc_scope }}.sh"
regexp: '^ image: icr.io/cpopen/ibm-cpd-platform-operator-catalog@sha256:953403f1d7193fedb81186ec454fae3ea0852ef4c1929c3c56f12352189b1766'
replace: ' image: icr.io/cpopen/ibm-cpd-platform-operator-catalog@sha256:54d3d7aff34444eb1991335831c18272ad217a6445f898e22f0b30f539b8c7cf'

# TODO: Remove step once problem in olm-utils is fixed
- name: Update script to fix invalid ws-pipelines create catalog source command
replace:
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc.sh"
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc-{{ _p_catsrc_scope }}.sh"
regexp: '(ibm-ws-pipelines-\d.\d.\d)(.tgz)'
replace: '\1*\2'

# TODO: Remove step once problem in olm-utils is fixed
- name: Update script to fix invalid replication create catalog source command
replace:
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc.sh"
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc-{{ _p_catsrc_scope }}.sh"
regexp: '(ibm-replication-\d.\d.\d)(.tgz)'
replace: '\1*\2'

# TODO: Remove step once problem in olm-utils is fixed
- name: Update script to fix invalid DataGate create catalog source command
replace:
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc.sh"
path: "{{ status_dir }}/cp4d/{{ _p_current_cp4d_cluster.project }}-create-catsrc-{{ _p_catsrc_scope }}.sh"
regexp: '(ibm-datagate-prod-\d.\d.\d)(.tgz)'
replace: '\1*\2'

- name: Run apply-olm command to create catalog sources
- name: Run apply-olm command to create catalog sources for Cloud Pak for Data {{ _p_catsrc_scope }}
block:
- name: Generate OLM command to create catalog sources
- name: Generate OLM command to create catalog sources for Cloud Pak for Data {{ _p_catsrc_scope }}
set_fact:
_apply_olm_command: "{{ lookup('template', 'apply-olm-create-catsrc.j2') }}"
vars:
_p_preview_script: False
- name: Run apply-olm command to create catalog sources, logs are in {{ status_dir }}/log/apply-olm-create-catsrc.log
- name: Run apply-olm command to create catalog sources, logs are in {{ status_dir }}/log/apply-olm-create-catsrc-{{ _p_catsrc_scope }}.log
shell: |
{{ _apply_olm_command }} > {{ status_dir }}/log/apply-olm-create-catsrc.log 2>&1
{{ _apply_olm_command }} > {{ status_dir }}/log/apply-olm-create-catsrc{{ _p_catsrc_scope }}.log 2>&1
- name: If not air-gapped, copy case files from /tmp/work/offline to {{ status_dir }}/cp4d/offline
copy:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
---
- include_role:
name: cp4d-variables
- set_fact:
_catsrc_cartridges_to_install_list: "{{ _cartridges_to_install_list }}"

- set_fact:
_catsrc_cartridges_to_install_list: "cpfs,cpd_platform"
when: _p_catsrc_scope == 'platform'

- include_tasks: create-catalog-source-olm-utils.yml
when:
- _p_current_cp4d_cluster.cp4d_version >= "4.5.0"

- include_tasks: wait-catalog-sources-ready.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
--release={{ _p_current_cp4d_cluster.cp4d_version }} \
--case_download={%- if cpd_airgap | default(False) | bool -%}false{%- else -%}true{%- endif %} \
--catsrc=true --sub=false \
--cpd_operator_ns={{ _p_current_cp4d_cluster.operators_project | default('cpd-operators') }} \
--cpd_operator_ns={{ _p_current_cp4d_cluster.operators_project }} \
--preview={%- if _p_preview_script -%}true{%- else -%}false{%- endif %} \
--upgrade={%- if _upgrade_cp4d -%}true{%- else -%}false{%- endif %} \
--components={{ _cartridges_to_install_list }}
--components={{ _catsrc_cartridges_to_install_list }}
Loading

0 comments on commit e1530b3

Please sign in to comment.