Skip to content

Commit d9c5d8f

Browse files
sjpbbertiethorpe
andauthored
Delete build VMs in CI nightly cleanup (#777)
* delete build VMs in CI nightly cleanup * name build volumes and include in nightly cleanup * simplify cleanup of volumes and include fatimage build VMs --------- Co-authored-by: bertiethorpe <bertie443@gmail.com> Co-authored-by: bertiethorpe <84867280+bertiethorpe@users.noreply.github.com>
1 parent 72aff75 commit d9c5d8f

File tree

2 files changed

+16
-43
lines changed

2 files changed

+16
-43
lines changed

.github/workflows/nightly-cleanup.yml

Lines changed: 10 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -46,53 +46,20 @@ jobs:
4646
echo "${{ secrets[format('{0}_CLOUDS_YAML', env.CI_CLOUD)] }}" > ~/.config/openstack/clouds.yaml
4747
shell: bash
4848

49-
- name: Find CI clusters
49+
- name: Delete all CI clusters
5050
run: |
5151
. venv/bin/activate
52-
CI_CLUSTERS=$(openstack server list | grep --only-matching 'slurmci-RL.-[0-9]\+' | sort | uniq || true)
53-
echo "DEBUG: Raw CI clusters: $CI_CLUSTERS"
54-
55-
if [[ -z "$CI_CLUSTERS" ]]; then
56-
echo "No matching CI clusters found."
57-
else
58-
# Flatten multiline value so can be passed as env var
59-
CI_CLUSTERS_FORMATTED=$(echo "$CI_CLUSTERS" | tr '\n' ' ' | sed 's/ $//')
60-
echo "DEBUG: Formatted CI clusters: $CI_CLUSTERS_FORMATTED"
61-
echo "ci_clusters=$CI_CLUSTERS_FORMATTED" >> "$GITHUB_ENV"
62-
fi
52+
./dev/delete-cluster.py slurmci-RL --force
6353
shell: bash
64-
65-
- name: Delete CI clusters
54+
55+
- name: Delete all CI extra build VMs and volumes
6656
run: |
6757
. venv/bin/activate
68-
if [[ -z ${ci_clusters} ]]; then
69-
echo "No clusters to delete."
70-
exit 0
71-
fi
72-
73-
for cluster_prefix in ${ci_clusters}
74-
do
75-
echo "Processing cluster: $cluster_prefix"
76-
77-
# Get all servers with the matching name for control node
78-
CONTROL_SERVERS=$(openstack server list --name "${cluster_prefix}-control" --format json)
79-
80-
# Get unique server names to avoid duplicate cleanup
81-
UNIQUE_NAMES=$(echo "$CONTROL_SERVERS" | jq -r '.[].Name' | sort | uniq)
82-
for name in $UNIQUE_NAMES; do
83-
echo "Deleting cluster with control node: $name"
84-
85-
# Get the first matching server ID by name
86-
server=$(echo "$CONTROL_SERVERS" | jq -r '.[] | select(.Name=="'"$name"'") | .ID' | head -n1)
87-
88-
# Make sure server still exists (wasn't deleted earlier)
89-
if ! openstack server show "$server" &>/dev/null; then
90-
echo "Server $server no longer exists, skipping $name."
91-
continue
92-
fi
58+
./dev/delete-cluster.py openhpc-extra-RL --force
59+
shell: bash
9360

94-
echo "Deleting cluster $cluster_prefix (server $server)..."
95-
./dev/delete-cluster.py "$cluster_prefix" --force
96-
done
97-
done
61+
- name: Delete all fatimage build VMs and volumes
62+
run: |
63+
. venv/bin/activate
64+
./dev/delete-cluster.py openhpc-RL --force
9865
shell: bash

packer/openstack.pkr.hcl

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,11 @@ variable "volume_size" {
128128
default = 20
129129
}
130130

131+
variable "volume_name" {
132+
type = string
133+
default = null
134+
}
135+
131136
variable "image_disk_format" {
132137
type = string
133138
default = "raw"
@@ -162,6 +167,7 @@ source "openstack" "openhpc" {
162167
use_blockstorage_volume = var.use_blockstorage_volume
163168
volume_type = var.volume_type
164169
volume_size = var.volume_size
170+
volume_name = "${var.image_name}${local.image_name_version}"
165171
metadata = var.metadata
166172
instance_metadata = {
167173
ansible_init_disable = "true"

0 commit comments

Comments
 (0)