3
0
forked from jakub/ansible

Refactor update_uptime_kuma.yml: enhance compose file discovery and health check logic

This commit is contained in:
fencl
2025-10-05 12:57:26 +02:00
parent bd25ea0eb1
commit f077a811da

View File

@@ -21,6 +21,20 @@
# ---- Docker CLI prefix (consistent with your style) ----
docker_prefix: "unalias docker 2>/dev/null || true; DOCKER_CLI_HINTS=0; command docker"
# Candidate compose filenames we will try in working_dir and its parent
compose_candidates:
- docker-compose.yml
- docker-compose.yaml
- compose.yml
- compose.yaml
- stack.yml
- stack.yaml
# Whether to wait for Docker HEALTH=healthy before HTTP check
wait_for_health: true
health_timeout_secs: 120
health_poll_interval: 3
tasks:
- name: Ensure sshpass is installed (for password-based SSH) # English comments
ansible.builtin.apt:
@@ -54,16 +68,12 @@
kuma_labels: "{{ kuma_labels_raw.stdout | from_json }}"
failed_when: false
- name: Derive compose parameters (project, service, working_dir, compose_file)
- name: Derive compose parameters (project, service, working_dir)
ansible.builtin.set_fact:
kuma_project: "{{ kuma_labels['com.docker.compose.project'] | default('kuma') }}"
kuma_service: "{{ kuma_labels['com.docker.compose.service'] | default('uptime-kuma') }}"
kuma_workdir: "{{ kuma_labels['com.docker.compose.project.working_dir'] | default('') }}"
kuma_compose_file: >-
{{
(kuma_labels['com.docker.compose.project.working_dir'] | default('') ~ '/docker-compose.yml')
if (kuma_labels['com.docker.compose.project.working_dir'] | default('')) != '' else omit
}}
kuma_parentdir: "{{ (kuma_labels['com.docker.compose.project.working_dir'] | default('') | dirname) if (kuma_labels['com.docker.compose.project.working_dir'] | default('')) != '' else '' }}"
when: kuma_labels is defined
failed_when: false
@@ -74,35 +84,44 @@
project={{ kuma_project | default('n/a') }}
service={{ kuma_service | default('n/a') }}
working_dir={{ kuma_workdir | default('n/a') }}
compose_file={{ kuma_compose_file | default('n/a') }}
parent_dir={{ kuma_parentdir | default('n/a') }}
# --- Verify compose file existence on the VM ---
- name: Check that compose file exists on VM
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- "test -f '{{ kuma_compose_file }}' && echo OK || echo MISSING"
register: kuma_compose_check
# --- Try to locate a compose file among candidates (in working_dir and parent) ---
- name: Locate compose file on VM (first match wins)
ansible.builtin.shell: |
set -euo pipefail
wd='{{ kuma_workdir }}'
pd='{{ kuma_parentdir }}'
found=''
for dir in "$wd" "$pd"; do
[ -n "$dir" ] || continue
for f in {% for c in compose_candidates %}"{{ c }}"{% if not loop.last %} {% endif %}{% endfor %}; do
if [ -f "$dir/$f" ]; then
found="$dir/$f"
break 2
fi
done
done
if [ -n "$found" ]; then
printf '%s\n' "$found"
fi
args:
executable: /bin/bash
register: kuma_compose_guess
changed_when: false
when: kuma_compose_file is defined
delegate_to: proxmox
- name: Fail early if compose file is missing
ansible.builtin.fail:
msg: >-
Compose file not found on VM: {{ kuma_compose_file | default('?') }}.
Discovered working_dir={{ kuma_workdir | default('?') }}, project={{ kuma_project | default('?') }}, service={{ kuma_service | default('?') }}.
The container seems to be managed by Portainer; expected path like /data/compose/<stack_id>/<version>/docker-compose.yml.
when: kuma_compose_file is not defined or (kuma_compose_check.stdout | default('MISSING')) != "OK"
- name: Set compose_file fact if found
ansible.builtin.set_fact:
kuma_compose_file: "{{ kuma_compose_guess.stdout | trim }}"
when: (kuma_compose_guess.stdout | default('') | trim) != ''
- name: Debug | Compose file resolution
ansible.builtin.debug:
msg: |
Compose resolution:
chosen_file={{ kuma_compose_file | default('NONE') }}
will_fallback_with_project_directory={{ kuma_compose_file is not defined }}
# --- Pull latest image first (generic pull) ---
- name: Pull image louislam/uptime-kuma:latest
@@ -123,8 +142,9 @@
register: kuma_pull
changed_when: false
# --- Compose pull/up for the discovered service only ---
- name: docker compose pull {{ kuma_service }}
# --- Compose pull (prefer -f if we have a file, else use --project-directory) ---
- name: docker compose pull {{ kuma_service }} (with -f)
when: kuma_compose_file is defined
ansible.builtin.command:
argv:
- sshpass
@@ -144,7 +164,31 @@
register: kuma_comp_pull
changed_when: false
- name: docker compose up --no-deps --force-recreate {{ kuma_service }}
- name: docker compose pull {{ kuma_service }} (fallback --project-directory)
when: kuma_compose_file is not defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose --project-name {{ kuma_project }} --project-directory '{{ kuma_workdir }}'
pull {{ kuma_service }} >/dev/null
register: kuma_comp_pull
changed_when: false
# --- Compose up (prefer -f, else --project-directory) ---
- name: docker compose up --no-deps --force-recreate {{ kuma_service }} (with -f)
when: kuma_compose_file is defined
ansible.builtin.command:
argv:
- sshpass
@@ -165,6 +209,28 @@
register: kuma_comp_up
changed_when: false
- name: docker compose up --no-deps --force-recreate {{ kuma_service }} (fallback --project-directory)
when: kuma_compose_file is not defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose --project-name {{ kuma_project }} --project-directory '{{ kuma_workdir }}'
up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null
register: kuma_comp_up
changed_when: false
- name: Show outputs of compose pull/up
ansible.builtin.debug:
msg: |
@@ -179,6 +245,45 @@
fail_msg: "docker compose pull/up failed (see previous stderr)."
success_msg: "Uptime Kuma updated and recreated successfully."
# ---- Optional: wait for Docker HEALTH=healthy before HTTP check ----
- name: Wait for container HEALTH=healthy (optional)
when: wait_for_health
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- |
set -euo pipefail
svc="{{ kuma_service }}"
# Grab the actual container name for this service in the project
cname=$({{ docker_prefix }} ps --format '{{"{{"}}.Names{{"}}"}}' \
| awk '/{{ kuma_project }}.*{{ kuma_service }}/ {print; exit}')
end=$(( $(date +%s) + {{ health_timeout_secs }} ))
while :; do
status=$({{ docker_prefix }} inspect -f '{{"{{"}}.State.Health.Status{{"}}"}}' "$cname" 2>/dev/null || echo "unknown")
if [ "$status" = "healthy" ]; then
echo "healthy"
exit 0
fi
if [ $(date +%s) -ge $end ]; then
echo "timeout:$status"
exit 1
fi
sleep {{ health_poll_interval }}
done
register: kuma_health
changed_when: false
failed_when: "kuma_health.rc != 0"
# ---- Health check from the controller: wait for 200 on the public URL ----
- name: Uptime Kuma | Wait for web to return 200 (controller first)
ansible.builtin.uri: