Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
H
hpc-factory
Manage
Activity
Members
Labels
Plan
Issues
75
Issue boards
Milestones
Wiki
Code
Merge requests
13
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
rc
hpc-factory
Commits
e6d8ff08
Commit
e6d8ff08
authored
9 months ago
by
Krish Moodbidri
Browse files
Options
Downloads
Patches
Plain Diff
Update file .gitlab-ci.yml
parent
2818a776
No related branches found
No related tags found
2 merge requests
!98
Feat manual trigger pipeline jobs
,
!97
Add manual trigger conditions to GitLab CI/CD pipeline jobs
Pipeline
#11568
failed
Changes
1
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
.gitlab-ci.yml
+37
-363
37 additions, 363 deletions
.gitlab-ci.yml
with
37 additions
and
363 deletions
.gitlab-ci.yml
+
37
−
363
View file @
e6d8ff08
...
...
@@ -2,34 +2,19 @@ default:
image
:
$CI_REGISTRY_IMAGE:latest
variables
:
CAMPUS_IP
:
138.26.48.47
CHEAHA_IP
:
172.20.10.9
TEST_IP
:
138.26.49.134
ANSIBLE_REMOTE_TMP
:
"
/tmp"
AWS_DEFAULT_REGION
:
"
bhm"
AWS_HOST
:
"
s3.lts.rc.uab.edu"
FF_SCRIPT_SECTIONS
:
"
true"
OS_AUTH_TYPE
:
"
v3applicationcredential"
OS_AUTH_URL
:
"
https://keystone.cloud.rc.uab.edu:5000/v3"
OS_IDENTITY_API_VERSION
:
"
3"
OS_INTERFACE
:
"
public"
OS_REGION_NAME
:
"
bhm1"
OOD_INSTANCE_NETWORK
:
"
knightly-network"
PKR_VAR_flavor
:
"
m1.medium-ruffner"
PKR_VAR_source_image
:
"
CentOS-7-x86_64-GenericCloud-2009"
PKR_VAR_floating_ip_network
:
"
uab-campus"
PKR_VAR_security_groups
:
'
["allow-ssh"]'
PKR_VAR_skip_create_image
:
"
false"
PKR_VAR_ssh_username
:
"
centos"
PKR_VAR_networks
:
'
["8cf2f12e-905d-46d9-bc70-b0897c65f75a"]'
PKR_VAR_image_membership
:
'
["cf6fa1e53d4c40a49f4e0e469c440359"]'
GIT_AUTHOR_NAME
:
"
Gitlab
runner"
GIT_AUTHOR_EMAIL
:
"
gitlab@runner"
NUM_SERVER_TO_KEEP
:
1
NUM_IMAGE_TO_KEEP
:
30
TIMESTAMP_REGEXP
:
'
[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{6}'
PKR_VAR_root_ssh_key
:
"
ecdsa-sha2-nistp256
AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBAFqqWgmYpEaGtHBeTu27ntVJpYjwq/x5aBefrvfhk8Z9lE3cuZ26vJ9n/9tGE4Zn2Pew1mpZgi6PzfJ3vMt8yA=
root@master"
DEV_KEY
:
"
ssh-rsa
AAAAB3NzaC1yc2EAAAADAQABAAACAQCpncAcYosVHt7HsUcE2XOYDuCi4HQnmFJv279LOcpZgXtZ6o0BM1fe5FgJS0X1ohBXQUFRuYJuJSW/GSmC1K8T+wCrKjZLJdMbqrubHV27diUZfdoVkoJy1vcAQF5nEcoTC7MpAFbBomdn2rsrpgQe8DGiURV7+soqybXV1OsIR3FFf6npnUaskHYT/oVtG9eBOnscyBxoVgbxzlmyoBLXED/sHKFw4nQSF/glYKEFiDu6TRTsBBEGvv23Qo/66QpQiFJ6TNfApNiyY9L1X+Dy8EWU6lozmNgwGDjXQ70Lr6xHnA0QGVALJlHXa6QjpgtpC5Nefsdvtf1hpfFo2VutpbSB+aq9jk3gWNN+XkhrWN5PiwP7YYJNw/WozyfL+IhwjfHZGxkuws+wGR6ZKxlX9W9Vrsq9ncYNKuhy2SdsR6s2XECQtrEQ6ZlX5jRt6Yh5M9ls5fMsWEqknDPmr1Ui6wV7NxprYngo9fLSdYO/ETIO3S6PB0aEHOZOyGitGaM06EmNpvjQn/QkkaVgt/O8wKL1o1AVzXhDMAFvtG6ejppV6kuTUHXFgSGZF6N9fnP91HuytyzC09F+NMWcmnRdrgXlHapjuuL3zzi+XLCQvk8+aYTzBKx1nU2FPMDRZ9sInGmqdTuM002E7qVbaCy4OxcWaAS/L2UVhGnHr+egYw==
louistw@uab.edu"
DOCKER_DRIVER
:
overlay2
BUILD_DATE
:
$CI_COMMIT_TIMESTAMP
BASE_BUILD_FLAVOR
:
"
standard"
COMPUTE_BUILD_FLAVOR
:
"
compute.large"
GPU_BUILD_FLAVOR
:
"
gpu.medium"
OOD_BUILD_FLAVOR
:
"
ood.standard"
PKR_VAR_flavor
:
"
standard"
BUILT_BASE_IMAGE_ID
:
"
"
ANSIBLE_VAR_TOKEN
:
$ANSIBLE_PRIVATE_TOKEN
AWS_ACCESS_KEY_ID
:
$AWS_ACCESS_KEY
AWS_SECRET_ACCESS_KEY
:
$AWS_SECRET_KEY
SELF_REG_APP_KEY
:
$SELF_REG_KEY
SSH_PUB_KEY
:
$CI_SSH_PUB_KEY
stages
:
-
pre-build
...
...
@@ -44,39 +29,18 @@ workflow:
-
if
:
$CI_PIPELINE_SOURCE == 'schedule'
.get_build_date
:
&get_build_date
-
export BUILD_DATE=$(TZ=America/Chicago date +%Y-%m-%dT%H%M%S)
-
echo BUILD_DATE=${BUILD_DATE}
script
:
-
export BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
-
echo "Build Date
:
$BUILD_DATE"
.update_ansible_repo
:
&update_ansible_repo
-
*get_build_date
-
|
if [ ! -d $CI_PROJECT_DIR/CRI_XCBC ]; then
git clone https://github.com/uabrc/CRI_XCBC.git
cd CRI_XCBC
git remote add upstream https://github.com/jprorama/CRI_XCBC.git
cd ..
fi
-
cd CRI_XCBC
-
git config user.name "${GIT_AUTHOR_NAME}"
-
git config user.email "${GIT_AUTHOR_EMAIL}"
-
git fetch origin uab-prod
-
git fetch upstream dev
-
git checkout uab-prod
-
git merge origin/uab-prod
-
git checkout -b integration
-
git merge upstream/dev
-
export CRI_XCBC_HEAD=$(git rev-parse --short HEAD)
-
export CRI_XCBC_dev=$(git rev-parse --short upstream/dev)
-
export CRI_XCBC_prod=$(git rev-parse --short origin/uab-prod)
-
cd ..
-
export PACKER_IMAGE_HEAD=$(git rev-parse --short HEAD)
-
echo CRI_XCBC_HEAD=${CRI_XCBC_HEAD} | tee -a $CI_PROJECT_DIR/image.env
-
echo CRI_XCBC_dev=${CRI_XCBC_dev} | tee -a $CI_PROJECT_DIR/image.env
-
echo CRI_XCBC_prod=${CRI_XCBC_prod} | tee -a $CI_PROJECT_DIR/image.env
-
echo PACKER_IMAGE_HEAD=${PACKER_IMAGE_HEAD} | tee -a $CI_PROJECT_DIR/image.env
script
:
-
git clone https://gitlab.com/my_ansible_repo.git ansible
.get_ansible_files
:
&get_ansible_files
-
s3cmd get --force -r --host=$AWS_HOST --host-bucket=$AWS_HOST s3://cheaha-cloud-ansible-files/ ansible/files/
script
:
-
cp ansible/inventory/production .
-
cp -R ansible/playbooks .
build_docker_image
:
image
:
docker:20.10.17
...
...
@@ -99,10 +63,9 @@ build_docker_image:
terraform --version'
-
docker push --all-tags $CI_REGISTRY_IMAGE
rules
:
-
if
:
$CI_COMMIT_REF_NAME == "main"
-
if
:
$CI_PIPELINE_SOURCE == "merge_request_event"
changes
:
-
Dockerfile
allow_failure
:
true
when
:
manual
# Manual trigger for the build
build_base_image
:
stage
:
build
...
...
@@ -135,6 +98,10 @@ build_base_image:
reports
:
dotenv
:
image.env
expire_in
:
30 days
rules
:
-
if
:
$CI_COMMIT_REF_NAME == "main"
-
if
:
$CI_PIPELINE_SOURCE == "merge_request_event"
when
:
manual
# Manual trigger for the build
build_compute_image
:
stage
:
build
...
...
@@ -158,6 +125,10 @@ build_compute_image:
-
packer init openstack-compute
-
packer validate openstack-compute
-
packer build -machine-readable openstack-compute | tee compute_build.log
rules
:
-
if
:
$CI_COMMIT_REF_NAME == "main"
-
if
:
$CI_PIPELINE_SOURCE == "merge_request_event"
when
:
manual
# Manual trigger for the build
build_gpu_image
:
stage
:
build
...
...
@@ -192,9 +163,9 @@ build_gpu_image:
exit 1
fi
rules
:
-
if
:
$
SKIP_GPU_BUILD
== "
true
"
when
:
never
-
when
:
always
-
if
:
$
CI_COMMIT_REF_NAME
== "
main
"
-
if
:
$CI_PIPELINE_SOURCE == "merge_request_event"
when
:
manual
# Manual trigger for the build
build_ood_image
:
stage
:
build
...
...
@@ -209,8 +180,6 @@ build_ood_image:
-o CRI_XCBC/group_vars/knightly
-
'
sed
-i
-E
"s/(lts_access_key:
).*/\1\"${AWS_ACCESS_KEY_ID}\"/"
CRI_XCBC/group_vars/knightly'
-
'
sed
-i
-E
"s/(lts_secret_key:
).*/\1\"${AWS_SECRET_ACCESS_KEY}\"/"
CRI_XCBC/group_vars/knightly'
-
'
sed
-i
-E
"s/(user_register_app_key:
).*/\1\"${SELF_REG_APP_KEY}\"/"
CRI_XCBC/group_vars/knightly'
-
'
sed
-i
-E
"s/(celery_user_password:
).*/\1\"${CELERY_PASSWD}\"/"
CRI_XCBC/group_vars/knightly'
-
'
sed
-i
-E
"s|(ssh_pub_key:
).*|\1\"{{
lookup('
'
file'
'
,
'
'
${SSH_PUB_KEY}'
'
)
}}\"|"
CRI_XCBC/group_vars/knightly'
-
export PKR_VAR_flavor="${OOD_BUILD_FLAVOR:-$PKR_VAR_flavor}"
-
packer init openstack-ood
...
...
@@ -225,303 +194,8 @@ build_ood_image:
sed -i -E "s/(ood_servername: ).*/\1\"$CI_COMMIT_REF_SLUG.$FLOATING_IP.nip.io\"/" CRI_XCBC/group_vars/knightly
elif [ $CI_PIPELINE_SOURCE == 'schedule' ]; then
export PKR_VAR_image_name="ood-${BUILD_DATE}"
echo INSTANCE_FLAVOR="${OOD_INSTANCE_FLAVOR:-cpu16-64g}" | tee -a $CI_PROJECT_DIR/image.env
echo OOD_INSTANCE_NAME="ood-knightly" | tee -a $CI_PROJECT_DIR/image.env
echo FLOATING_IP=$TEST_IP | tee -a $CI_PROJECT_DIR/image.env
fi
-
>
PKR_VAR_build_instance_name="ood-${CRI_XCBC_HEAD}"
PKR_VAR_image_date_suffix=false
packer build -machine-readable openstack-ood | tee ood_build.log
-
export BUILT_OOD_IMAGE_ID=$(grep 'Image:' ood_build.log | awk '{print $4}')
-
echo BUILT_OOD_IMAGE_ID=${BUILT_OOD_IMAGE_ID} | tee -a $CI_PROJECT_DIR/image.env
-
openstack image set --property CRI_XCBC_prod=${CRI_XCBC_prod} --property CRI_XCBC_dev=${CRI_XCBC_dev} --property PACKER_IMAGE_HEAD=${PACKER_IMAGE_HEAD} ${BUILT_OOD_IMAGE_ID}
artifacts
:
reports
:
dotenv
:
image.env
test_ood_image
:
stage
:
test
needs
:
[
build_ood_image
]
environment
:
name
:
knightly
tags
:
-
build
script
:
-
openstack image set --accept $BUILT_OOD_IMAGE_ID
-
FAILED=false
-
|
eval $(ssh-agent -s)
chmod 400 "$SSH_PRIV_KEY"
ssh-add "$SSH_PRIV_KEY"
mkdir ~/.ssh
chmod 700 ~/.ssh
-
OLD_INSTANCE_IP=$(openstack floating ip list --floating-ip-address $CHEAHA_IP -c "Fixed IP Address" -f value)
-
echo $OLD_INSTANCE_IP
-
|
if [ ! -z $OLD_INSTANCE_IP ]; then
export OLD_INSTANCE_ID=$(openstack server list --name $OOD_INSTANCE_NAME --ip $OLD_INSTANCE_IP -c ID -f value)
fi
-
echo OLD_INSTANCE_ID=$OLD_INSTANCE_ID | tee -a instance.env
-
|
cat > user_data.txt << OEOF
#!/bin/bash
echo "Starting user_data: \$(date)"
cat > /etc/resolv.conf << EOF
search openstack.internal cm.cluster rc.uab.edu ib.cluster drac.cluster eth.cluster ib-hdr.cluster
nameserver 172.20.0.25
EOF
echo "$DEV_KEY" >> /root/.ssh/authorized_keys
mkdir -p /run/shibboleth
chown shibd:shibd /run/shibboleth
echo "Installing s3cmd: \$(date)"
pip3 install s3cmd
echo "Downloading hostkey via s3cmd: \$(date)"
s3cmd get --force -r --access_key=$AWS_ACCESS_KEY_ID --secret_key=$AWS_SECRET_ACCESS_KEY --host=$AWS_HOST --host-bucket=$AWS_HOST s3://knightly-key/ /etc/ssh/
echo "Download completed: \$(date)"
OEOF
-
>
export NEW_INSTANCE_ID=$(openstack server create
-c id -f value --image $BUILT_OOD_IMAGE_ID
--network $OOD_INSTANCE_NETWORK
--security-group ood-https-ports
--security-group node-exporter
--security-group allow-ssh
--user-data user_data.txt
--flavor $INSTANCE_FLAVOR
--wait
$OOD_INSTANCE_NAME)
-
echo NEW_INSTANCE_ID=$NEW_INSTANCE_ID | tee -a instance.env
-
openstack server add floating ip $NEW_INSTANCE_ID $FLOATING_IP
-
>
curl --retry 10 --retry-delay 20 --retry-connrefused https://knightly.rc.uab.edu/Shibboleth.sso/Metadata --resolve knightly.rc.uab.edu:443:$FLOATING_IP -kf
|| FAILED=true
-
|
cp "$SSH_KNOWN_HOSTS" ~/.ssh/known_hosts
chmod 644 ~/.ssh/known_hosts
until ssh acctsvc@$FLOATING_IP hostname; do sleep 5; done
ssh acctsvc@$FLOATING_IP '[ $(mount | grep "etc/auto" | wc -l) -eq 6 ]' || FAILED=true
-
|
if [ "$FAILED" = true ]; then
if [ "${DELETE_WHEN_FAILED-true}" = true ]; then
openstack server delete $NEW_INSTANCE_ID
echo "DELETE_BUILT_IMAGE=true" | tee -a instance.env
fi
false
fi
-
openstack server remove floating ip $NEW_INSTANCE_ID $FLOATING_IP
artifacts
:
reports
:
dotenv
:
instance.env
rules
:
-
if
:
$CI_PIPELINE_SOURCE == "schedule"
when
:
always
test_ood_image_mr
:
stage
:
test
needs
:
[
build_ood_image
]
tags
:
-
build
script
:
-
export OOD_INSTANCE_NETWORK="cicd-net"
-
FAILED=false
-
|
eval $(ssh-agent -s)
chmod 400 "$SSH_PRIV_KEY"
ssh-add "$SSH_PRIV_KEY"
mkdir ~/.ssh
chmod 700 ~/.ssh
-
|
cat > user_data.txt << OEOF
#!/bin/bash
cat > /etc/resolv.conf << EOF
search openstack.internal cm.cluster rc.uab.edu ib.cluster drac.cluster eth.cluster ib-hdr.cluster
nameserver 172.20.0.25
EOF
echo "$DEV_KEY" >> /root/.ssh/authorized_keys
mkdir -p /run/shibboleth
chown shibd:shibd /run/shibboleth
OEOF
-
>
export NEW_INSTANCE_ID=$(openstack server create
-c id -f value --image $BUILT_OOD_IMAGE_ID
--network $OOD_INSTANCE_NETWORK
--security-group ood-https-ports
--security-group allow-ssh
--user-data user_data.txt
--flavor $INSTANCE_FLAVOR
--wait
$OOD_INSTANCE_NAME)
-
echo NEW_INSTANCE_ID=$NEW_INSTANCE_ID | tee -a instance.env
-
openstack server add floating ip $NEW_INSTANCE_ID $FLOATING_IP
-
>
curl --retry 10 --retry-delay 20 --retry-connrefused https://knightly.rc.uab.edu/Shibboleth.sso/Metadata --resolve knightly.rc.uab.edu:443:$FLOATING_IP -kf
|| FAILED=true
-
ssh -o StrictHostKeyChecking=no acctsvc@$FLOATING_IP '[ $(mount | grep "etc/auto" | wc -l) -eq 6 ]' || FAILED=true
-
|
if [ "$FAILED" = true ]; then
if [ "${DELETE_WHEN_FAILED-true}" = true ]; then
openstack server delete $NEW_INSTANCE_ID
openstack image delete $BUILT_OOD_IMAGE_ID
fi
false
fi
artifacts
:
reports
:
dotenv
:
instance.env
rules
:
-
if
:
$CI_MERGE_REQUEST_ID
deploy_review
:
stage
:
deploy
script
:
-
echo "Deploy Review App"
environment
:
name
:
review/$CI_COMMIT_REF_SLUG
url
:
https://$CI_COMMIT_REF_SLUG.$FLOATING_IP.nip.io
on_stop
:
stop_review
auto_stop_in
:
2 days
tags
:
-
build
rules
:
-
if
:
$CI_MERGE_REQUEST_ID
stop_review
:
stage
:
deploy
script
:
-
openstack server delete $NEW_INSTANCE_ID
-
openstack image delete $BUILT_OOD_IMAGE_ID
-
openstack floating ip delete $FLOATING_IP
environment
:
name
:
review/$CI_COMMIT_REF_SLUG
action
:
stop
tags
:
-
build
rules
:
-
if
:
$CI_MERGE_REQUEST_ID
when
:
manual
deploy_knightly
:
stage
:
deploy
environment
:
name
:
knightly
tags
:
-
build
script
:
-
|
if [ ! -z $OLD_INSTANCE_ID ]; then
openstack server remove floating ip $OLD_INSTANCE_ID $CAMPUS_IP
openstack server remove floating ip $OLD_INSTANCE_ID $CHEAHA_IP
fi
-
|
if [ ! -z $NEW_INSTANCE_ID ]; then
openstack server add floating ip $NEW_INSTANCE_ID $CAMPUS_IP
openstack server add floating ip $NEW_INSTANCE_ID $CHEAHA_IP
fi
only
:
-
schedules
deploy_cheaha
:
stage
:
deploy
environment
:
name
:
cheaha
tags
:
-
build
script
:
-
echo "Job placeholder to deploy to Cheaha"
when
:
manual
only
:
-
main
cleanup_knightly
:
stage
:
cleanup
environment
:
name
:
knightly
tags
:
-
build
script
:
-
>
SERVER_TO_BE_DELETE=($(openstack server list --name $OOD_INSTANCE_NAME --sort-column Image --sort-descending -f value -c ID
| awk -v NSTK=$NUM_SERVER_TO_KEEP -v OID=$OLD_INSTANCE_ID '$0 != OID {count++}
$0 != OID && count>NSTK {print}'))
-
|
for svr in ${SERVER_TO_BE_DELETE[@]}; do
echo "Deleting server $svr"
openstack server delete ${svr}
done
rules
:
-
if
:
$CI_PIPELINE_SOURCE == "schedule"
when
:
always
cleanup_integration
:
stage
:
cleanup
tags
:
-
build
script
:
-
OS_PROJECT_ID=$(openstack application credential show $OS_APPLICATION_CREDENTIAL_ID -f value -c project_id)
-
openstack image list --sort-column Name --sort-descending -f value -c Name -c ID --property owner=$OS_PROJECT_ID > images.txt
-
|
if [ "${DELETE_BUILT_IMAGE-false}" = true ]; then
openstack image delete $BUILT_OOD_IMAGE_ID
fi
-
>
OOD_IMAGE_TO_BE_DELETE=($(cat images.txt
| awk -v NITK=$NUM_IMAGE_TO_KEEP -v REGEX=ood-$TIMESTAMP_REGEX
'{if ($0 ~ REGEX) result[count++] = $1}
END {for(i=NITK;i<count;i++) print result[i]}'))
-
>
BASE_IMAGE_TO_BE_DELETE=($(cat images.txt
| awk -v NITK=$NUM_IMAGE_TO_KEEP -v REGEX=base-$TIMESTAMP_REGEX
'{if ($0 ~ REGEX) result[count++] = $1}
END {for(i=NITK;i<count;i++) print result[i]}'))
-
>
COMPUTE_IMAGE_TO_BE_DELETE=($(cat images.txt
| awk -v NITK=$NUM_IMAGE_TO_KEEP -v REGEX=compute-$TIMESTAMP_REGEX
'{if ($0 ~ REGEX) result[count++] = $1}
END {for(i=NITK;i<count;i++) print result[i]}'))
-
>
GPU_IMAGE_TO_BE_DELETE=($(cat images.txt
| awk -v NITK=$NUM_IMAGE_TO_KEEP -v REGEX=gpu-$TIMESTAMP_REGEX
'{if ($0 ~ REGEX) result[count++] = $1}
END {for(i=NITK;i<count;i++) print result[i]}'))
-
|
for img in ${OOD_IMAGE_TO_BE_DELETE[@]}; do
echo "Deleting image $img"
openstack image delete ${img}
done
-
|
for img in ${BASE_IMAGE_TO_BE_DELETE[@]}; do
echo "Deleting image $img"
openstack image delete ${img}
done
-
|
for img in ${COMPUTE_IMAGE_TO_BE_DELETE[@]}; do
echo "Deleting image $img"
openstack image delete ${img}
done
-
|
for img in ${GPU_IMAGE_TO_BE_DELETE[@]}; do
echo "Deleting image $img"
openstack image delete ${img}
done
rules
:
-
if
:
$CI_PIPELINE_SOURCE == "schedule"
when
:
always
cleanup_mr
:
stage
:
cleanup
tags
:
-
build
script
:
-
OS_PROJECT_ID=$(openstack application credential show $OS_APPLICATION_CREDENTIAL_ID -f value -c project_id)
-
>
IMAGE_TO_BE_DELETE=($(openstack image list --sort-column Name --sort-descending -f value -c Name -c ID --property owner=$OS_PROJECT_ID
| awk -v REGEX="(ood|base|compute|gpu)-PR-$CI_MERGE_REQUEST_IID" '{if ($0 ~ REGEX) print $1}'))
-
|
for img in ${IMAGE_TO_BE_DELETE[@]}; do
echo "Deleting image $img"
openstack image delete ${img}
done
echo INSTANCE_FLAVOR="${OOD_INSTANCE_FLAVOR:-cpu16-64g}" | tee -a
rules
:
-
if
:
$CI_COMMIT_REF_NAME == "main"
-
if
:
$CI_PIPELINE_SOURCE == "merge_request_event"
when
:
always
when
:
manual
# Manual trigger for the build
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment