3
0
forked from jakub/ansible

Refactor update_uptime_kuma.yml: simplify variable definitions and enhance command execution logic

This commit is contained in:
fencl
2025-10-05 13:07:11 +02:00
parent f077a811da
commit 3c0f29e3cb

View File

@@ -1,6 +1,4 @@
# nextcloud/update_uptime_kuma.yml
- name: Update Uptime Kuma on VM via Proxmox (auto-discover compose path)
- name: Update Uptime Kuma on VM via Proxmox
hosts: proxmox
gather_facts: false
become: true
@@ -8,32 +6,30 @@
become_method: sudo
vars:
# ---- VM access via sshpass (same pattern as your Collabora playbook) ----
# --- Connection to VM (provided by Semaphore env vars) ---
vm_ip: "{{ lookup('env', 'VM_IP') }}"
vm_user: "{{ lookup('env', 'VM_USER') }}"
vm_pass: "{{ lookup('env', 'VM_PASS') }}"
use_sudo: false
# ---- Inputs / defaults ----
kuma_url: "https://monitor.martinfencl.eu/"
kuma_container_name: "uptime-kuma-dev" # running container name to inspect
# --- Uptime Kuma specifics ---
kuma_project: "uptime-kuma" # docker compose project name
kuma_compose_file: "/data/compose/kuma/uptime-kuma.yml" # adjust to your path
kuma_service: "uptime-kuma" # service name from compose
kuma_port: 3001
# ---- Docker CLI prefix (consistent with your style) ----
# Optional external URL for controller-side readiness check.
# If empty/undefined, controller check is skipped and we only probe from the VM.
kuma_url: "{{ lookup('env', 'KUMA_URL') | default('', true) }}"
# Docker command prefix (keeps behavior consistent and quiet)
docker_prefix: "unalias docker 2>/dev/null || true; DOCKER_CLI_HINTS=0; command docker"
# Candidate compose filenames we will try in working_dir and its parent
compose_candidates:
- docker-compose.yml
- docker-compose.yaml
- compose.yml
- compose.yaml
- stack.yml
- stack.yaml
# Whether to wait for Docker HEALTH=healthy before HTTP check
wait_for_health: true
health_timeout_secs: 120
health_poll_interval: 3
# Commands to run on the target VM
kuma_commands:
- "{{ docker_prefix }} pull -q louislam/uptime-kuma:latest >/dev/null"
- "{{ docker_prefix }} compose -p {{ kuma_project }} -f {{ kuma_compose_file }} pull {{ kuma_service }} >/dev/null"
- "{{ docker_prefix }} compose -p {{ kuma_project }} -f {{ kuma_compose_file }} up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null"
tasks:
- name: Ensure sshpass is installed (for password-based SSH) # English comments
@@ -42,8 +38,7 @@
state: present
update_cache: yes
# --- Discover compose metadata from the running container labels ---
- name: Discover compose labels from the container (project, service, working_dir)
- name: Run Uptime Kuma update commands on VM (via SSH) # Avoid leaking password in logs
ansible.builtin.command:
argv:
- sshpass
@@ -57,252 +52,60 @@
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ docker_prefix }} inspect {{ kuma_container_name }}
--format '{{"{{"}}json .Config.Labels{{"}}"}}'
register: kuma_labels_raw
- "{{ ('sudo ' if use_sudo else '') + item }}"
loop: "{{ kuma_commands }}"
register: kuma_cmds
changed_when: false
no_log: true # <- hides password and raw argv from logs
- name: Parse compose labels JSON
ansible.builtin.set_fact:
kuma_labels: "{{ kuma_labels_raw.stdout | from_json }}"
failed_when: false
- name: Derive compose parameters (project, service, working_dir)
ansible.builtin.set_fact:
kuma_project: "{{ kuma_labels['com.docker.compose.project'] | default('kuma') }}"
kuma_service: "{{ kuma_labels['com.docker.compose.service'] | default('uptime-kuma') }}"
kuma_workdir: "{{ kuma_labels['com.docker.compose.project.working_dir'] | default('') }}"
kuma_parentdir: "{{ (kuma_labels['com.docker.compose.project.working_dir'] | default('') | dirname) if (kuma_labels['com.docker.compose.project.working_dir'] | default('')) != '' else '' }}"
when: kuma_labels is defined
failed_when: false
- name: Debug | Discovered compose info
- name: Show summarized outputs for each command (sanitized)
ansible.builtin.debug:
msg: |
Discovered:
project={{ kuma_project | default('n/a') }}
service={{ kuma_service | default('n/a') }}
working_dir={{ kuma_workdir | default('n/a') }}
parent_dir={{ kuma_parentdir | default('n/a') }}
CMD: {{ item_short }}
RC: {{ item.rc }}
STDOUT:
{{ (item.stdout | default('')).strip() }}
STDERR:
{{ (item.stderr | default('')).strip() }}
loop: "{{ kuma_cmds.results }}"
vars:
# Print a redacted/short form of the command so we don't leak the password
item_short: >-
{{
(item.cmd | map('quote') | list)[-1] # last element is the remote "bash -lc" payload
}}
when: kuma_cmds is defined
# --- Try to locate a compose file among candidates (in working_dir and parent) ---
- name: Locate compose file on VM (first match wins)
ansible.builtin.shell: |
set -euo pipefail
wd='{{ kuma_workdir }}'
pd='{{ kuma_parentdir }}'
found=''
for dir in "$wd" "$pd"; do
[ -n "$dir" ] || continue
for f in {% for c in compose_candidates %}"{{ c }}"{% if not loop.last %} {% endif %}{% endfor %}; do
if [ -f "$dir/$f" ]; then
found="$dir/$f"
break 2
fi
done
done
if [ -n "$found" ]; then
printf '%s\n' "$found"
fi
args:
executable: /bin/bash
register: kuma_compose_guess
changed_when: false
delegate_to: proxmox
- name: Set compose_file fact if found
ansible.builtin.set_fact:
kuma_compose_file: "{{ kuma_compose_guess.stdout | trim }}"
when: (kuma_compose_guess.stdout | default('') | trim) != ''
- name: Debug | Compose file resolution
ansible.builtin.debug:
msg: |
Compose resolution:
chosen_file={{ kuma_compose_file | default('NONE') }}
will_fallback_with_project_directory={{ kuma_compose_file is not defined }}
# --- Pull latest image first (generic pull) ---
- name: Pull image louislam/uptime-kuma:latest
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- "{{ ('sudo ' if use_sudo else '') + docker_prefix }} pull -q louislam/uptime-kuma:latest >/dev/null"
register: kuma_pull
changed_when: false
# --- Compose pull (prefer -f if we have a file, else use --project-directory) ---
- name: docker compose pull {{ kuma_service }} (with -f)
when: kuma_compose_file is defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose -p {{ kuma_project }} -f '{{ kuma_compose_file }}' pull {{ kuma_service }} >/dev/null
register: kuma_comp_pull
changed_when: false
- name: docker compose pull {{ kuma_service }} (fallback --project-directory)
when: kuma_compose_file is not defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose --project-name {{ kuma_project }} --project-directory '{{ kuma_workdir }}'
pull {{ kuma_service }} >/dev/null
register: kuma_comp_pull
changed_when: false
# --- Compose up (prefer -f, else --project-directory) ---
- name: docker compose up --no-deps --force-recreate {{ kuma_service }} (with -f)
when: kuma_compose_file is defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose -p {{ kuma_project }} -f '{{ kuma_compose_file }}'
up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null
register: kuma_comp_up
changed_when: false
- name: docker compose up --no-deps --force-recreate {{ kuma_service }} (fallback --project-directory)
when: kuma_compose_file is not defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose --project-name {{ kuma_project }} --project-directory '{{ kuma_workdir }}'
up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null
register: kuma_comp_up
changed_when: false
- name: Show outputs of compose pull/up
ansible.builtin.debug:
msg: |
PULL rc={{ kuma_comp_pull.rc }} stderr="{{ (kuma_comp_pull.stderr | default('')).strip() }}"
UP rc={{ kuma_comp_up.rc }} stderr="{{ (kuma_comp_up.stderr | default('')).strip() }}"
- name: Assert compose pull/up succeeded
- name: Fail play if any Uptime Kuma command failed
ansible.builtin.assert:
that:
- kuma_comp_pull.rc == 0
- kuma_comp_up.rc == 0
fail_msg: "docker compose pull/up failed (see previous stderr)."
success_msg: "Uptime Kuma updated and recreated successfully."
that: "item.rc == 0"
fail_msg: "Uptime Kuma update failed on VM: {{ (item.cmd | last) }} (rc={{ item.rc }})"
success_msg: "All Uptime Kuma update commands succeeded."
loop: "{{ kuma_cmds.results }}"
# ---- Optional: wait for Docker HEALTH=healthy before HTTP check ----
- name: Wait for container HEALTH=healthy (optional)
when: wait_for_health
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- |
set -euo pipefail
svc="{{ kuma_service }}"
# Grab the actual container name for this service in the project
cname=$({{ docker_prefix }} ps --format '{{"{{"}}.Names{{"}}"}}' \
| awk '/{{ kuma_project }}.*{{ kuma_service }}/ {print; exit}')
end=$(( $(date +%s) + {{ health_timeout_secs }} ))
while :; do
status=$({{ docker_prefix }} inspect -f '{{"{{"}}.State.Health.Status{{"}}"}}' "$cname" 2>/dev/null || echo "unknown")
if [ "$status" = "healthy" ]; then
echo "healthy"
exit 0
fi
if [ $(date +%s) -ge $end ]; then
echo "timeout:$status"
exit 1
fi
sleep {{ health_poll_interval }}
done
register: kuma_health
changed_when: false
failed_when: "kuma_health.rc != 0"
# -------------------------
# Readiness checks
# -------------------------
# ---- Health check from the controller: wait for 200 on the public URL ----
- name: Uptime Kuma | Wait for web to return 200 (controller first)
- name: Kuma | Wait for web to respond (controller-side, if URL provided)
ansible.builtin.uri:
url: "{{ kuma_url }}"
url: "{{ (kuma_url | regex_replace('/$','')) + '/' }}"
method: GET
return_content: true
validate_certs: true
# Validate TLS only when using https://
validate_certs: "{{ (kuma_url | default('')) is match('^https://') }}"
status_code: 200
register: kuma_controller
delegate_to: localhost
run_once: true
retries: 15
when: kuma_url is defined and (kuma_url | length) > 0
retries: 20
delay: 2
until: kuma_controller.status == 200
until: kuma_controller.status == 200 and ('Uptime Kuma' in (kuma_controller.content | default('')))
failed_when: false
changed_when: false
# ---- Optional VM-side fetch (double-check from VM) ----
- name: Uptime Kuma | VM-side fetch HTML (via Python)
- name: Kuma | VM-side probe (fallback to localhost:port, no auth)
ansible.builtin.command:
argv:
- sshpass
@@ -318,40 +121,53 @@
- -lc
- |
python3 - <<'PY'
import sys, urllib.request, ssl
# Simple readiness probe: fetch "/" and look for "Uptime Kuma" in HTML
import sys, urllib.request
try:
ctx = ssl.create_default_context()
with urllib.request.urlopen("{{ kuma_url }}", timeout=15, context=ctx) as r:
sys.stdout.write(r.read().decode(errors="ignore"))
with urllib.request.urlopen("http://127.0.0.1:{{ kuma_port }}/", timeout=15) as r:
body = r.read(8192).decode(errors='ignore')
if 'Uptime Kuma' in body:
print("OK")
except Exception:
pass
PY
register: kuma_vm
changed_when: false
failed_when: false
when: kuma_controller.status | default(0) != 200 or kuma_controller.content is not defined
# Run only if controller check missing or didn't succeed
when: (kuma_url is not defined) or (kuma_url | length == 0) or (kuma_controller.status | default(0)) != 200
# ---- Pick HTML source (controller wins) ----
- name: Uptime Kuma | Choose HTML (controller wins, else VM)
- name: Kuma | Decide readiness source
ansible.builtin.set_fact:
kuma_html: >-
kuma_ready: >-
{{
(kuma_controller.content
if (kuma_controller.status|default(0))==200 and (kuma_controller.content is defined)
else ((kuma_vm.stdout | default('') | trim | length > 0) | ternary(kuma_vm.stdout | trim, omit))
(
(kuma_controller is defined)
and (kuma_controller.status|default(0))==200
and ('Uptime Kuma' in (kuma_controller.content|default('')))
) or
(
(kuma_vm.stdout | default('')) | trim == 'OK'
)
}}
failed_when: false
kuma_ready_source: >-
{{
(
(kuma_controller is defined)
and (kuma_controller.status|default(0))==200
and ('Uptime Kuma' in (kuma_controller.content|default('')))
)
| ternary('controller', 'vm')
}}
- name: Uptime Kuma | Print concise summary
- name: Kuma | Print concise summary
ansible.builtin.debug:
msg: >-
Uptime Kuma reachable at {{ kuma_url }}
(HTTP {{ kuma_controller.status | default('unknown') }}).
Title="{{ (kuma_html | default('') | regex_search('(?is)<title[^>]*>(.*?)</title>', '\\1')) | default('n/a') }}"
when: kuma_html is defined
Uptime Kuma is {{ 'READY' if kuma_ready else 'NOT READY' }}
(checked via {{ kuma_ready_source }}).
URL={{ (kuma_url if (kuma_url|default('')|length>0) else 'http://'+vm_ip+':'+(kuma_port|string)) }}
- name: Uptime Kuma | Web unavailable (after retries)
- name: Kuma | Not ready after retries
ansible.builtin.debug:
msg: "Uptime Kuma web není dostupný ani po pokusech."
when: kuma_html is not defined
msg: "Kuma web není dostupná ani po pokusech."
when: not kuma_ready