diff --git a/image/cli/masfvt/fvt-post-update.yml b/image/cli/masfvt/fvt-post-update.yml new file mode 100644 index 00000000000..3803cbbcb07 --- /dev/null +++ b/image/cli/masfvt/fvt-post-update.yml @@ -0,0 +1,101 @@ +--- +- hosts: localhost + any_errors_fatal: true + vars: + # Image Pull Policy + image_pull_policy: "{{ lookup('env', 'IMAGE_PULL_POLICY') }}" + mas_instance_id: "{{ lookup('env', 'MAS_INSTANCE_ID') }}" + tasks: + # 1. Check if manage is installed + # ----------------------------------------------------------------------------- + - name: "Check if ManageWorkspace exists" + kubernetes.core.k8s_info: + kind: ManageWorkspace + namespace: "mas-{{ mas_instance_id }}-manage" + label_selectors: + - mas.ibm.com/instanceId={{ mas_instance_id }} + register: manage_ws + + - name: 'Set fact manage exists' + set_fact: + manage_exists: true + when: + - manage_ws is defined + - manage_ws.resources is defined + - manage_ws.resources | length>0 + + - block: + # 2. Lookup the Operator Version + # ----------------------------------------------------------------------------- + - name: "upgrade : Lookup OperatorCondition for Manage" + kubernetes.core.k8s_info: + api_version: operators.coreos.com/v2 + kind: OperatorCondition + namespace: "mas-{{ mas_instance_id }}-manage" + label_selectors: + - "operators.coreos.com/ibm-mas-manage.mas-{{ mas_instance_id }}-manage" + register: updated_opcon + retries: 10 + delay: 120 # 2 minutes + until: + - updated_opcon.resources is defined + - updated_opcon.resources | length == 1 + - updated_opcon.resources[0].metadata.name is defined + + - name: "upgrade : Debug OperatorCondition" + debug: + var: updated_opcon + + - name: "upgrade : Lookup operator version for Manage" + set_fact: + updated_opcon_version: "{{ updated_opcon.resources[0].metadata.name.split('.v')[1] | ibm.mas_devops.format_pre_version_with_buildid }}" + + # 3. Check that the Application CR meets the required state + # ----------------------------------------------------------------------------- + - name: "Check if ManageApp CR is updated to latest version and Ready (120s delay)" + kubernetes.core.k8s_info: + kind: "ManageApp" + name: "{{ mas_instance_id }}" + namespace: "mas-{{ mas_instance_id }}-manage" + retries: 20 # about 40 minutes + delay: 120 # 2 minutes + until: + - updated_app_info.resources is defined + - updated_app_info.resources[0].status is defined + - updated_app_info.resources[0].status.versions.reconciled == updated_opcon_version + - updated_app_info.resources | json_query('[*].status.conditions[?type==`Ready`][].reason') | select ('match','Ready') | list | length == 1 + register: updated_app_info + + # 4. Check that the Application Workspaces meet the required state + # ----------------------------------------------------------------------------- + - name: 'Check if workspace CR reconciled version is updated to target version (120s delay)' + kubernetes.core.k8s_info: + kind: "ManageWorkspace" + namespace: "mas-{{ mas_instance_id }}-manage" + label_selectors: + - mas.ibm.com/instanceId={{ mas_instance_id }} + retries: 30 # about 60 minutes + delay: 120 # 2 minutes + until: + - app_ws_cr_version.resources is defined + - app_ws_cr_version.resources[0].status.versions.reconciled == updated_opcon_version + register: app_ws_cr_version + + - name: 'Check if ManageWorkspace CR is Ready (120s delay)' + kubernetes.core.k8s_info: + kind: "ManageWorkspace" + namespace: "mas-{{ mas_instance_id }}-manage" + label_selectors: + - mas.ibm.com/instanceId={{ mas_instance_id }} + retries: 60 # about 120 minutes + delay: 120 # 2 minutes + until: + - app_ws_cr_lookup.resources is defined + - app_ws_cr_lookup.resources | json_query('[*].status.conditions[?type==`Ready`][].reason') | select ('in', ['Ready', 'WorkspaceReady']) | list | length == app_ws_cr_lookup.resources | length + - app_ws_cr_lookup.resources | json_query('[*].status.conditions[?type==`Running`][].reason') | select ('in', ['Successful']) | list | length == app_ws_cr_lookup.resources | length + - app_ws_cr_lookup.resources | json_query('[*].status.conditions[?type==`DeploymentCR`][].reason') | select ('in', ['Successful']) | list | length == app_ws_cr_lookup.resources | length + register: app_ws_cr_lookup + + when: + - manage_exists is defined + - manage_exists diff --git a/image/cli/masfvt/fvt-pre-update.yml b/image/cli/masfvt/fvt-pre-update.yml new file mode 100644 index 00000000000..4ee9fd50924 --- /dev/null +++ b/image/cli/masfvt/fvt-pre-update.yml @@ -0,0 +1,34 @@ +--- +- hosts: localhost + any_errors_fatal: true + vars: + # Image Pull Policy + image_pull_policy: "{{ lookup('env', 'IMAGE_PULL_POLICY') }}" + mas_instance_id: "{{ lookup('env', 'MAS_INSTANCE_ID') }}" + tasks: + - name: 'Check if ManageWorkspace exists' + kubernetes.core.k8s_info: + kind: ManageWorkspace + namespace: "mas-{{ mas_instance_id }}-manage" + label_selectors: + - mas.ibm.com/instanceId={{ mas_instance_id }} + register: manage_ws + + - name: "Lookup upgradeType" + set_fact: + manage_upgrade_type: "{{ manage_ws.resources | json_query('[*].spec.settings.db.upgrade.upgradeType') }}" + when: + - manage_ws is defined + - manage_ws.resources is defined + - manage_ws.resources | length>0 + + - name: "Create or update manage offline upgrade check cron job" + kubernetes.core.k8s: + apply: true + template: templates/mas-fvt-offline-upgrade.yml.j2 + state: present + when: + - manage_ws is defined + - manage_ws.resources is defined + - manage_ws.resources | length>0 + - '"onlineUpgrade" in manage_upgrade_type' diff --git a/image/cli/masfvt/fvt-pre-upgrade.yml b/image/cli/masfvt/fvt-pre-upgrade.yml new file mode 100644 index 00000000000..cf6c4ab093d --- /dev/null +++ b/image/cli/masfvt/fvt-pre-upgrade.yml @@ -0,0 +1,42 @@ +--- +- hosts: localhost + any_errors_fatal: true + vars: + # Image Pull Policy + image_pull_policy: "{{ lookup('env', 'IMAGE_PULL_POLICY') }}" + mas_instance_id: "{{ lookup('env', 'MAS_INSTANCE_ID') }}" + tasks: + - name: 'Check if ManageWorkspace exists' + kubernetes.core.k8s_info: + kind: ManageWorkspace + namespace: "mas-{{ mas_instance_id }}-manage" + label_selectors: + - mas.ibm.com/instanceId={{ mas_instance_id }} + register: manage_ws + + - name: "Check if cronjob already exists" + kubernetes.core.k8s_info: + api_version: batch/v1 + kind: CronJob + name: fvt-manage-offline-upgrade-check + namespace: "mas-{{ mas_instance_id }}-manage" + register: offline_upgrade_job + + - name: "Lookup upgradeType" + set_fact: + manage_upgrade_type: "{{ manage_ws.resources | json_query('[*].spec.settings.db.upgrade.upgradeType') }}" + when: + - manage_ws is defined + - manage_ws.resources is defined + - manage_ws.resources | length>0 + + - name: "Create or update manage offline upgrade check cron job" + kubernetes.core.k8s: + apply: true + template: templates/mas-fvt-offline-upgrade.yml.j2 + state: present + when: + - manage_ws is defined and offline_upgrade_job is defined + - manage_ws.resources is defined and offline_upgrade_job.resources is defined + - manage_ws.resources | length>0 and offline_upgrade_job.resources | length==0 + - '"onlineUpgrade" in manage_upgrade_type' diff --git a/image/cli/masfvt/templates/mas-fvt-offline-upgrade.yml.j2 b/image/cli/masfvt/templates/mas-fvt-offline-upgrade.yml.j2 new file mode 100644 index 00000000000..42fb25a78f0 --- /dev/null +++ b/image/cli/masfvt/templates/mas-fvt-offline-upgrade.yml.j2 @@ -0,0 +1,139 @@ +--- +# Permit outbound communication by the Job pods +kind: NetworkPolicy +apiVersion: networking.k8s.io/v1 +metadata: + name: fvt-manage-{{ mas_instance_id }}-np + namespace: mas-{{ mas_instance_id }}-manage +spec: + podSelector: + egress: + - {} + policyTypes: + - Egress + +--- +# Service account that is authorized to read k8s secrets (needed by the job) +apiVersion: v1 +kind: ServiceAccount +metadata: + name: fvt-manage-{{ mas_instance_id }}-sa + namespace: mas-{{ mas_instance_id }}-manage + +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: fvt-manage-{{ mas_instance_id }}-role + namespace: mas-{{ mas_instance_id }}-manage +rules: + - verbs: + - get + - list + - patch + apiGroups: + - "apps.mas.ibm.com" + resources: + - manageworkspaces + - manageofflineupgraderequests + - manageofflineupgraderequests/status + +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: fvt-manage-{{ mas_instance_id }}-rb + namespace: mas-{{ mas_instance_id }}-manage +subjects: + - kind: ServiceAccount + name: fvt-manage-{{ mas_instance_id }}-sa + namespace: mas-{{ mas_instance_id }}-manage +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: fvt-manage-{{ mas_instance_id }}-role + +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: fvt-manage-offline-upgrade-check + namespace: mas-{{ mas_instance_id }}-manage +spec: + schedule: "*/5 * * * *" + concurrencyPolicy: Forbid + suspend: false + jobTemplate: + spec: + template: + spec: + containers: + - name: run + image: quay.io/ibmmas/cli:latest + imagePullPolicy: IfNotPresent + resources: + limits: + cpu: 200m + memory: 512Mi + requests: + cpu: 10m + memory: 64Mi + env: + - name: MAS_INSTANCE_ID + value: {{ mas_instance_id }} + command: + - /bin/sh + - -c + - | + + set -e + + echo + echo "================================================================================" + echo "Check if we need to set the ManageOfflineUpgradeRequest stage to Requested" + echo "================================================================================" + echo + + MAS_APP_NAMESPACE=mas-${MAS_INSTANCE_ID}-manage + + MANAGE_WORKSPACE_NAME=$(oc get ManageWorkspace -n ${MAS_APP_NAMESPACE} -l mas.ibm.com/instanceId=${MAS_INSTANCE_ID} -o name) + + # Check ManageWorkspace Exists + if [[ -n $MANAGE_WORKSPACE_NAME ]]; then + MANAGE_UPGRADE_TYPE=$(oc get -n ${MAS_APP_NAMESPACE} ${MANAGE_WORKSPACE_NAME} -o=jsonpath="{.spec.settings.db.upgrade.upgradeType}") + + # Check if upgrade type of manage is online upgrade or regular upgrade + if [[ $MANAGE_UPGRADE_TYPE == *"onlineUpgrade"* ]]; then + ONLINE_UPDATE_DONE_MSG="Database online upgrade portion is done, waiting for offline request." + MANAGE_DEPLOYMENTREADY_MSG=$(oc get -n ${MAS_APP_NAMESPACE} ${MANAGE_WORKSPACE_NAME} -o=jsonpath="{.status.conditions[?(@.type=='DeploymentReady')].message}") + + # Check the status of offline upgrade if the patch is completed + if [[ $MANAGE_DEPLOYMENTREADY_MSG == *"$ONLINE_UPDATE_DONE_MSG"* ]]; then + echo + echo "Status found is: ${MANAGE_DEPLOYMENTREADY_MSG}" + echo "ManageWorkspace indicates it is ready for offline upgrade, patching manageofflineupgraderequest CR to requested, and removing old status if present" + UPGRADE_REQUEST_STATUS=$(oc get -n ${MAS_APP_NAMESPACE} manageofflineupgraderequest.apps.mas.ibm.com -l mas.ibm.com/instanceId=${MAS_INSTANCE_ID} -o=jsonpath="{.status}") + UPGRADE_REQUEST_NAME=$(oc get ManageOfflineUpgradeRequest -n ${MAS_APP_NAMESPACE} -l mas.ibm.com/instanceId=${MAS_INSTANCE_ID} -o name) + if [[ -n $UPGRADE_REQUEST_STATUS ]]; then + oc patch -n ${MAS_APP_NAMESPACE} ${UPGRADE_REQUEST_NAME} --subresource status --type=json -p="[{'op': 'remove', 'path': '/status'}]" + fi + oc patch -n ${MAS_APP_NAMESPACE} ${UPGRADE_REQUEST_NAME} --type merge -p $'spec:\n stage: requested' + echo "Patch complete for manageofflineupgraderequest CR" + echo + + else + echo + echo "The ManageWorkspace Status does not indicate it is ready for offlineupgrade, so do nothing" + echo "Final Status found is: ${MANAGE_DEPLOYMENTREADY_MSG}" + fi + else + echo + echo "Upgrade type is: ${MANAGE_UPGRADE_TYPE}." + echo "Upgrade type for manage is not onlineUpgrade, so skipping the step to patch manageofflineupgraderequest CR." + fi + else + echo + echo "ManageWorkspace not found with instance id: ${MAS_INSTANCE_ID} in namespace: ${MAS_APP_NAMESPACE}, so skipping the step to patch manageofflineupgraderequest CR." + fi + restartPolicy: Never + serviceAccountName: fvt-manage-{{ mas_instance_id }}-sa diff --git a/python/src/mas/cli/install/argBuilder.py b/python/src/mas/cli/install/argBuilder.py index 7887d2ba514..abc83431e64 100644 --- a/python/src/mas/cli/install/argBuilder.py +++ b/python/src/mas/cli/install/argBuilder.py @@ -237,6 +237,9 @@ def buildCommand(self) -> str: if self.getParam('mas_appws_bindings_health_wsl_flag') == "true": command += f" --manage-health-wsl{newline}" + if self.getParam('mas_appws_upgrade_type') == "true": + command += f" --manage-upgrade-type \"{self.getParam('mas_appws_upgrade_type')}\"{newline}" + # Facilities Advanced Settings # ----------------------------------------------------------------------------- # TODO: Fix type for storage sizes and max conn pool size diff --git a/python/src/mas/cli/install/argParser.py b/python/src/mas/cli/install/argParser.py index 934c59b97aa..ba404cbb1ac 100644 --- a/python/src/mas/cli/install/argParser.py +++ b/python/src/mas/cli/install/argParser.py @@ -550,6 +550,15 @@ def isValidFile(parser, arg) -> str: help="Manage server timezone. Default is `GMT`" ) +manageArgGroup.add_argument( + "--manage-upgrade-type", + dest="mas_appws_upgrade_type", + required=False, + help="Set Manage upgrade type. Default is `regularUpgrade`", + default="regularUpgrade", + choices=["regularUpgrade", "onlineUpgrade"] +) + # Manage Attachments # ----------------------------------------------------------------------------- manageArgGroup.add_argument( diff --git a/python/src/mas/cli/install/params.py b/python/src/mas/cli/install/params.py index e1de04d609c..8e23bc4c179 100644 --- a/python/src/mas/cli/install/params.py +++ b/python/src/mas/cli/install/params.py @@ -61,6 +61,7 @@ "mas_appws_components", "mas_appws_bindings_health_wsl_flag", "mas_domain", + "mas_appws_upgrade_type", # IPV6 "enable_ipv6", # SLS diff --git a/python/src/mas/cli/install/summarizer.py b/python/src/mas/cli/install/summarizer.py index 720abf99d28..11505743898 100644 --- a/python/src/mas/cli/install/summarizer.py +++ b/python/src/mas/cli/install/summarizer.py @@ -178,6 +178,8 @@ def manageSummary(self) -> None: self.printSummary(" + Workday Applications", "Enabled" if "workday=" in self.getParam("mas_appws_components") else "Disabled") self.printSummary(" + AIP", "Enabled" if "aip=" in self.getParam("mas_appws_components") else "Disabled") + self.printParamSummary("+ Upgrade Type", "mas_appws_upgrade_type") + self.printParamSummary("+ Server bundle size", "mas_app_settings_server_bundles_size") self.printParamSummary("+ Enable JMS queues", "mas_app_settings_default_jms") self.printParamSummary("+ Server Timezone", "mas_app_settings_server_timezone") diff --git a/tekton/generate-tekton-tasks.yml b/tekton/generate-tekton-tasks.yml index a9703b5684d..b25ab4b9751 100644 --- a/tekton/generate-tekton-tasks.yml +++ b/tekton/generate-tekton-tasks.yml @@ -89,6 +89,9 @@ - fvt-start-update - fvt-start-upgrade - fvt-start-uninstall + - fvt-pre-update + - fvt-pre-upgrade + - fvt-post-update - name: Generate Tasks (FVT Launchers) ansible.builtin.template: diff --git a/tekton/src/params/install.yml.j2 b/tekton/src/params/install.yml.j2 index 5694643a11c..c91fe97c83e 100644 --- a/tekton/src/params/install.yml.j2 +++ b/tekton/src/params/install.yml.j2 @@ -655,6 +655,10 @@ type: string description: COS Bucket optionally used to hold attachments in Manage default: "" +- name: mas_appws_upgrade_type + type: string + description: Manage upgrade type + default: "regularUpgrade" # MAS Application Configuration - IBM Maximo Location Services for ESRI # ----------------------------------------------------------------------------- diff --git a/tekton/src/pipelines/fvt-launcher.yml.j2 b/tekton/src/pipelines/fvt-launcher.yml.j2 index 3ff258991cb..03a110b00a4 100644 --- a/tekton/src/pipelines/fvt-launcher.yml.j2 +++ b/tekton/src/pipelines/fvt-launcher.yml.j2 @@ -901,6 +901,23 @@ spec: # Update Sync Point # ------------------------------------------------------------------------- + - name: pre-update + timeout: "0" + taskRef: + kind: Task + name: mas-pre-update + params: + - name: image_pull_policy + value: $(params.image_pull_policy) + - name: mas_instance_id + value: $(params.mas_instance_id) + when: + - input: $(params.sync_with_update) + operator: in + values: ["true", "True"] + runAfter: + - waitfor-install + - name: start-update timeout: "0" taskRef: @@ -911,7 +928,7 @@ spec: operator: in values: ["true", "True"] runAfter: - - waitfor-install + - pre-update - name: waitfor-update timeout: "0" @@ -934,9 +951,43 @@ spec: runAfter: - start-update + - name: post-update + timeout: "0" + taskRef: + kind: Task + name: mas-post-update + params: + - name: image_pull_policy + value: $(params.image_pull_policy) + - name: mas_instance_id + value: $(params.mas_instance_id) + when: + - input: $(params.sync_with_update) + operator: in + values: ["true", "True"] + runAfter: + - waitfor-update + # Upgrade Sync Point # ------------------------------------------------------------------------- + - name: pre-upgrade + timeout: "0" + taskRef: + kind: Task + name: mas-pre-upgrade + params: + - name: image_pull_policy + value: $(params.image_pull_policy) + - name: mas_instance_id + value: $(params.mas_instance_id) + when: + - input: $(params.sync_with_upgrade) + operator: in + values: ["true", "True"] + runAfter: + - post-update + - name: start-upgrade timeout: "0" taskRef: @@ -952,7 +1003,7 @@ spec: operator: in values: ["true", "True"] runAfter: - - waitfor-update + - pre-upgrade - name: waitfor-upgrade timeout: "0" diff --git a/tekton/src/pipelines/taskdefs/apps/manage-workspace.yml.j2 b/tekton/src/pipelines/taskdefs/apps/manage-workspace.yml.j2 index fc24a7e80f3..d23b2d91418 100644 --- a/tekton/src/pipelines/taskdefs/apps/manage-workspace.yml.j2 +++ b/tekton/src/pipelines/taskdefs/apps/manage-workspace.yml.j2 @@ -92,6 +92,8 @@ value: $(params.cos_instance_name) - name: cos_bucket_name value: $(params.cos_bucket_name) + - name: mas_appws_upgrade_type + value: $(params.mas_appws_upgrade_type) # Custom Label Support - name: custom_labels diff --git a/tekton/src/pipelines/update.yml.j2 b/tekton/src/pipelines/update.yml.j2 index e94442a4df2..7c4d2890030 100644 --- a/tekton/src/pipelines/update.yml.j2 +++ b/tekton/src/pipelines/update.yml.j2 @@ -455,4 +455,4 @@ spec: - name: configmap_value # An aggregate status of all the pipelineTasks under the tasks section (excluding the finally section). # This variable is only available in the finally tasks and can have any one of the values (Succeeded, Failed, Completed, or None) - value: $(tasks.status) + value: $(tasks.status) \ No newline at end of file diff --git a/tekton/src/tasks/fvt/fvt-post-update.yml.j2 b/tekton/src/tasks/fvt/fvt-post-update.yml.j2 new file mode 100644 index 00000000000..21d82f58af5 --- /dev/null +++ b/tekton/src/tasks/fvt/fvt-post-update.yml.j2 @@ -0,0 +1,23 @@ +--- +apiVersion: tekton.dev/v1beta1 +kind: Task +metadata: + name: mas-post-update +spec: + params: + {{ lookup('template', task_src_dir ~ '/common/cli-params.yml.j2') | indent(4) }} + - name: mas_instance_id + type: string + description: Instance ID + + steps: + - name: post-update-verify + image: quay.io/ibmmas/cli:latest + imagePullPolicy: $(params.image_pull_policy) + command: + - ansible-playbook + - /masfvt/fvt-post-update.yml + env: + {{ lookup('template', task_src_dir ~ '/common/cli-env.yml.j2') | indent(8) }} + - name: MAS_INSTANCE_ID + value: $(params.mas_instance_id) diff --git a/tekton/src/tasks/fvt/fvt-pre-update.yml.j2 b/tekton/src/tasks/fvt/fvt-pre-update.yml.j2 new file mode 100644 index 00000000000..164626a537f --- /dev/null +++ b/tekton/src/tasks/fvt/fvt-pre-update.yml.j2 @@ -0,0 +1,23 @@ +--- +apiVersion: tekton.dev/v1beta1 +kind: Task +metadata: + name: mas-pre-update +spec: + params: + {{ lookup('template', task_src_dir ~ '/common/cli-params.yml.j2') | indent(4) }} + - name: mas_instance_id + type: string + description: Instance ID + + steps: + - name: create-offline-upgrade-cron + image: quay.io/ibmmas/cli:latest + imagePullPolicy: $(params.image_pull_policy) + command: + - ansible-playbook + - /masfvt/fvt-pre-update.yml + env: + {{ lookup('template', task_src_dir ~ '/common/cli-env.yml.j2') | indent(8) }} + - name: MAS_INSTANCE_ID + value: $(params.mas_instance_id) diff --git a/tekton/src/tasks/fvt/fvt-pre-upgrade.yml.j2 b/tekton/src/tasks/fvt/fvt-pre-upgrade.yml.j2 new file mode 100644 index 00000000000..fe3c3dc7849 --- /dev/null +++ b/tekton/src/tasks/fvt/fvt-pre-upgrade.yml.j2 @@ -0,0 +1,23 @@ +--- +apiVersion: tekton.dev/v1beta1 +kind: Task +metadata: + name: mas-pre-upgrade +spec: + params: + {{ lookup('template', task_src_dir ~ '/common/cli-params.yml.j2') | indent(4) }} + - name: mas_instance_id + type: string + description: Instance ID + + steps: + - name: create-offline-upgrade-cron + image: quay.io/ibmmas/cli:latest + imagePullPolicy: $(params.image_pull_policy) + command: + - ansible-playbook + - /masfvt/fvt-pre-upgrade.yml + env: + {{ lookup('template', task_src_dir ~ '/common/cli-env.yml.j2') | indent(8) }} + - name: MAS_INSTANCE_ID + value: $(params.mas_instance_id) diff --git a/tekton/src/tasks/suite-app-config.yml.j2 b/tekton/src/tasks/suite-app-config.yml.j2 index 288405b8ab9..48276794191 100644 --- a/tekton/src/tasks/suite-app-config.yml.j2 +++ b/tekton/src/tasks/suite-app-config.yml.j2 @@ -185,6 +185,10 @@ spec: type: string description: COS Bucket optionally used to hold attachments in Manage default: "" + - name: mas_appws_upgrade_type + type: string + description: Manage upgrade type + default: "" # Application Workspace - Operand (Predict) - name: predict_deployment_size @@ -363,6 +367,8 @@ spec: value: $(params.cos_instance_name) - name: COS_BUCKET_NAME value: $(params.cos_bucket_name) + - name: MAS_APPWS_UPGRADE_TYPE + value: $(params.mas_appws_upgrade_type) # Application Workspace - Operand (Predict) - name: PREDICT_DEPLOYMENT_SIZE