3
0
forked from jakub/ansible
Files
ansible_fencl/nextcloud/update_uptime_kuma.yml

358 lines
12 KiB
YAML

# nextcloud/update_uptime_kuma.yml
- name: Update Uptime Kuma on VM via Proxmox (auto-discover compose path)
hosts: proxmox
gather_facts: false
become: true
become_user: root
become_method: sudo
vars:
# ---- VM access via sshpass (same pattern as your Collabora playbook) ----
vm_ip: "{{ lookup('env', 'VM_IP') }}"
vm_user: "{{ lookup('env', 'VM_USER') }}"
vm_pass: "{{ lookup('env', 'VM_PASS') }}"
use_sudo: false
# ---- Inputs / defaults ----
kuma_url: "https://monitor.martinfencl.eu/"
kuma_container_name: "uptime-kuma-dev" # running container name to inspect
# ---- Docker CLI prefix (consistent with your style) ----
docker_prefix: "unalias docker 2>/dev/null || true; DOCKER_CLI_HINTS=0; command docker"
# Candidate compose filenames we will try in working_dir and its parent
compose_candidates:
- docker-compose.yml
- docker-compose.yaml
- compose.yml
- compose.yaml
- stack.yml
- stack.yaml
# Whether to wait for Docker HEALTH=healthy before HTTP check
wait_for_health: true
health_timeout_secs: 120
health_poll_interval: 3
tasks:
- name: Ensure sshpass is installed (for password-based SSH) # English comments
ansible.builtin.apt:
name: sshpass
state: present
update_cache: yes
# --- Discover compose metadata from the running container labels ---
- name: Discover compose labels from the container (project, service, working_dir)
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ docker_prefix }} inspect {{ kuma_container_name }}
--format '{{"{{"}}json .Config.Labels{{"}}"}}'
register: kuma_labels_raw
changed_when: false
- name: Parse compose labels JSON
ansible.builtin.set_fact:
kuma_labels: "{{ kuma_labels_raw.stdout | from_json }}"
failed_when: false
- name: Derive compose parameters (project, service, working_dir)
ansible.builtin.set_fact:
kuma_project: "{{ kuma_labels['com.docker.compose.project'] | default('kuma') }}"
kuma_service: "{{ kuma_labels['com.docker.compose.service'] | default('uptime-kuma') }}"
kuma_workdir: "{{ kuma_labels['com.docker.compose.project.working_dir'] | default('') }}"
kuma_parentdir: "{{ (kuma_labels['com.docker.compose.project.working_dir'] | default('') | dirname) if (kuma_labels['com.docker.compose.project.working_dir'] | default('')) != '' else '' }}"
when: kuma_labels is defined
failed_when: false
- name: Debug | Discovered compose info
ansible.builtin.debug:
msg: |
Discovered:
project={{ kuma_project | default('n/a') }}
service={{ kuma_service | default('n/a') }}
working_dir={{ kuma_workdir | default('n/a') }}
parent_dir={{ kuma_parentdir | default('n/a') }}
# --- Try to locate a compose file among candidates (in working_dir and parent) ---
- name: Locate compose file on VM (first match wins)
ansible.builtin.shell: |
set -euo pipefail
wd='{{ kuma_workdir }}'
pd='{{ kuma_parentdir }}'
found=''
for dir in "$wd" "$pd"; do
[ -n "$dir" ] || continue
for f in {% for c in compose_candidates %}"{{ c }}"{% if not loop.last %} {% endif %}{% endfor %}; do
if [ -f "$dir/$f" ]; then
found="$dir/$f"
break 2
fi
done
done
if [ -n "$found" ]; then
printf '%s\n' "$found"
fi
args:
executable: /bin/bash
register: kuma_compose_guess
changed_when: false
delegate_to: proxmox
- name: Set compose_file fact if found
ansible.builtin.set_fact:
kuma_compose_file: "{{ kuma_compose_guess.stdout | trim }}"
when: (kuma_compose_guess.stdout | default('') | trim) != ''
- name: Debug | Compose file resolution
ansible.builtin.debug:
msg: |
Compose resolution:
chosen_file={{ kuma_compose_file | default('NONE') }}
will_fallback_with_project_directory={{ kuma_compose_file is not defined }}
# --- Pull latest image first (generic pull) ---
- name: Pull image louislam/uptime-kuma:latest
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- "{{ ('sudo ' if use_sudo else '') + docker_prefix }} pull -q louislam/uptime-kuma:latest >/dev/null"
register: kuma_pull
changed_when: false
# --- Compose pull (prefer -f if we have a file, else use --project-directory) ---
- name: docker compose pull {{ kuma_service }} (with -f)
when: kuma_compose_file is defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose -p {{ kuma_project }} -f '{{ kuma_compose_file }}' pull {{ kuma_service }} >/dev/null
register: kuma_comp_pull
changed_when: false
- name: docker compose pull {{ kuma_service }} (fallback --project-directory)
when: kuma_compose_file is not defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose --project-name {{ kuma_project }} --project-directory '{{ kuma_workdir }}'
pull {{ kuma_service }} >/dev/null
register: kuma_comp_pull
changed_when: false
# --- Compose up (prefer -f, else --project-directory) ---
- name: docker compose up --no-deps --force-recreate {{ kuma_service }} (with -f)
when: kuma_compose_file is defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose -p {{ kuma_project }} -f '{{ kuma_compose_file }}'
up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null
register: kuma_comp_up
changed_when: false
- name: docker compose up --no-deps --force-recreate {{ kuma_service }} (fallback --project-directory)
when: kuma_compose_file is not defined
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- >
{{ ('sudo ' if use_sudo else '') + docker_prefix }}
compose --project-name {{ kuma_project }} --project-directory '{{ kuma_workdir }}'
up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null
register: kuma_comp_up
changed_when: false
- name: Show outputs of compose pull/up
ansible.builtin.debug:
msg: |
PULL rc={{ kuma_comp_pull.rc }} stderr="{{ (kuma_comp_pull.stderr | default('')).strip() }}"
UP rc={{ kuma_comp_up.rc }} stderr="{{ (kuma_comp_up.stderr | default('')).strip() }}"
- name: Assert compose pull/up succeeded
ansible.builtin.assert:
that:
- kuma_comp_pull.rc == 0
- kuma_comp_up.rc == 0
fail_msg: "docker compose pull/up failed (see previous stderr)."
success_msg: "Uptime Kuma updated and recreated successfully."
# ---- Optional: wait for Docker HEALTH=healthy before HTTP check ----
- name: Wait for container HEALTH=healthy (optional)
when: wait_for_health
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- |
set -euo pipefail
svc="{{ kuma_service }}"
# Grab the actual container name for this service in the project
cname=$({{ docker_prefix }} ps --format '{{"{{"}}.Names{{"}}"}}' \
| awk '/{{ kuma_project }}.*{{ kuma_service }}/ {print; exit}')
end=$(( $(date +%s) + {{ health_timeout_secs }} ))
while :; do
status=$({{ docker_prefix }} inspect -f '{{"{{"}}.State.Health.Status{{"}}"}}' "$cname" 2>/dev/null || echo "unknown")
if [ "$status" = "healthy" ]; then
echo "healthy"
exit 0
fi
if [ $(date +%s) -ge $end ]; then
echo "timeout:$status"
exit 1
fi
sleep {{ health_poll_interval }}
done
register: kuma_health
changed_when: false
failed_when: "kuma_health.rc != 0"
# ---- Health check from the controller: wait for 200 on the public URL ----
- name: Uptime Kuma | Wait for web to return 200 (controller first)
ansible.builtin.uri:
url: "{{ kuma_url }}"
method: GET
return_content: true
validate_certs: true
status_code: 200
register: kuma_controller
delegate_to: localhost
run_once: true
retries: 15
delay: 2
until: kuma_controller.status == 200
failed_when: false
changed_when: false
# ---- Optional VM-side fetch (double-check from VM) ----
- name: Uptime Kuma | VM-side fetch HTML (via Python)
ansible.builtin.command:
argv:
- sshpass
- -p
- "{{ vm_pass }}"
- ssh
- -o
- StrictHostKeyChecking=no
- -o
- ConnectTimeout=15
- "{{ vm_user }}@{{ vm_ip }}"
- bash
- -lc
- |
python3 - <<'PY'
import sys, urllib.request, ssl
try:
ctx = ssl.create_default_context()
with urllib.request.urlopen("{{ kuma_url }}", timeout=15, context=ctx) as r:
sys.stdout.write(r.read().decode(errors="ignore"))
except Exception:
pass
PY
register: kuma_vm
changed_when: false
failed_when: false
when: kuma_controller.status | default(0) != 200 or kuma_controller.content is not defined
# ---- Pick HTML source (controller wins) ----
- name: Uptime Kuma | Choose HTML (controller wins, else VM)
ansible.builtin.set_fact:
kuma_html: >-
{{
(kuma_controller.content
if (kuma_controller.status|default(0))==200 and (kuma_controller.content is defined)
else ((kuma_vm.stdout | default('') | trim | length > 0) | ternary(kuma_vm.stdout | trim, omit))
)
}}
failed_when: false
- name: Uptime Kuma | Print concise summary
ansible.builtin.debug:
msg: >-
Uptime Kuma reachable at {{ kuma_url }}
(HTTP {{ kuma_controller.status | default('unknown') }}).
Title="{{ (kuma_html | default('') | regex_search('(?is)<title[^>]*>(.*?)</title>', '\\1')) | default('n/a') }}"
when: kuma_html is defined
- name: Uptime Kuma | Web unavailable (after retries)
ansible.builtin.debug:
msg: "Uptime Kuma web není dostupný ani po pokusech."
when: kuma_html is not defined