# nextcloud/update_uptime_kuma.yml - name: Update Uptime Kuma on VM via Proxmox (auto-discover compose path) hosts: proxmox gather_facts: false become: true become_user: root become_method: sudo vars: # ---- VM access via sshpass (same pattern as your Collabora playbook) ---- vm_ip: "{{ lookup('env', 'VM_IP') }}" vm_user: "{{ lookup('env', 'VM_USER') }}" vm_pass: "{{ lookup('env', 'VM_PASS') }}" use_sudo: false # ---- Inputs / defaults ---- kuma_url: "https://monitor.martinfencl.eu/" kuma_container_name: "uptime-kuma-dev" # running container name to inspect # ---- Docker CLI prefix (consistent with your style) ---- docker_prefix: "unalias docker 2>/dev/null || true; DOCKER_CLI_HINTS=0; command docker" tasks: - name: Ensure sshpass is installed (for password-based SSH) # English comments ansible.builtin.apt: name: sshpass state: present update_cache: yes # --- Discover compose metadata from the running container labels --- - name: Discover compose labels from the container (project, service, working_dir) ansible.builtin.command: argv: - sshpass - -p - "{{ vm_pass }}" - ssh - -o - StrictHostKeyChecking=no - -o - ConnectTimeout=15 - "{{ vm_user }}@{{ vm_ip }}" - bash - -lc - > {{ docker_prefix }} inspect {{ kuma_container_name }} --format '{{"{{"}}json .Config.Labels{{"}}"}}' register: kuma_labels_raw changed_when: false - name: Parse compose labels JSON ansible.builtin.set_fact: kuma_labels: "{{ kuma_labels_raw.stdout | from_json }}" failed_when: false - name: Derive compose parameters (project, service, working_dir, compose_file) ansible.builtin.set_fact: kuma_project: "{{ kuma_labels['com.docker.compose.project'] | default('kuma') }}" kuma_service: "{{ kuma_labels['com.docker.compose.service'] | default('uptime-kuma') }}" kuma_workdir: "{{ kuma_labels['com.docker.compose.project.working_dir'] | default('') }}" kuma_compose_file: >- {{ (kuma_labels['com.docker.compose.project.working_dir'] | default('') ~ '/docker-compose.yml') if (kuma_labels['com.docker.compose.project.working_dir'] | default('')) != '' else omit }} when: kuma_labels is defined failed_when: false - name: Debug | Discovered compose info ansible.builtin.debug: msg: | Discovered: project={{ kuma_project | default('n/a') }} service={{ kuma_service | default('n/a') }} working_dir={{ kuma_workdir | default('n/a') }} compose_file={{ kuma_compose_file | default('n/a') }} # --- Verify compose file existence on the VM --- - name: Check that compose file exists on VM ansible.builtin.command: argv: - sshpass - -p - "{{ vm_pass }}" - ssh - -o - StrictHostKeyChecking=no - -o - ConnectTimeout=15 - "{{ vm_user }}@{{ vm_ip }}" - bash - -lc - "test -f '{{ kuma_compose_file }}' && echo OK || echo MISSING" register: kuma_compose_check changed_when: false when: kuma_compose_file is defined - name: Fail early if compose file is missing ansible.builtin.fail: msg: >- Compose file not found on VM: {{ kuma_compose_file | default('?') }}. Discovered working_dir={{ kuma_workdir | default('?') }}, project={{ kuma_project | default('?') }}, service={{ kuma_service | default('?') }}. The container seems to be managed by Portainer; expected path like /data/compose///docker-compose.yml. when: kuma_compose_file is not defined or (kuma_compose_check.stdout | default('MISSING')) != "OK" # --- Pull latest image first (generic pull) --- - name: Pull image louislam/uptime-kuma:latest ansible.builtin.command: argv: - sshpass - -p - "{{ vm_pass }}" - ssh - -o - StrictHostKeyChecking=no - -o - ConnectTimeout=15 - "{{ vm_user }}@{{ vm_ip }}" - bash - -lc - "{{ ('sudo ' if use_sudo else '') + docker_prefix }} pull -q louislam/uptime-kuma:latest >/dev/null" register: kuma_pull changed_when: false # --- Compose pull/up for the discovered service only --- - name: docker compose pull {{ kuma_service }} ansible.builtin.command: argv: - sshpass - -p - "{{ vm_pass }}" - ssh - -o - StrictHostKeyChecking=no - -o - ConnectTimeout=15 - "{{ vm_user }}@{{ vm_ip }}" - bash - -lc - > {{ ('sudo ' if use_sudo else '') + docker_prefix }} compose -p {{ kuma_project }} -f '{{ kuma_compose_file }}' pull {{ kuma_service }} >/dev/null register: kuma_comp_pull changed_when: false - name: docker compose up --no-deps --force-recreate {{ kuma_service }} ansible.builtin.command: argv: - sshpass - -p - "{{ vm_pass }}" - ssh - -o - StrictHostKeyChecking=no - -o - ConnectTimeout=15 - "{{ vm_user }}@{{ vm_ip }}" - bash - -lc - > {{ ('sudo ' if use_sudo else '') + docker_prefix }} compose -p {{ kuma_project }} -f '{{ kuma_compose_file }}' up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null register: kuma_comp_up changed_when: false - name: Show outputs of compose pull/up ansible.builtin.debug: msg: | PULL rc={{ kuma_comp_pull.rc }} stderr="{{ (kuma_comp_pull.stderr | default('')).strip() }}" UP rc={{ kuma_comp_up.rc }} stderr="{{ (kuma_comp_up.stderr | default('')).strip() }}" - name: Assert compose pull/up succeeded ansible.builtin.assert: that: - kuma_comp_pull.rc == 0 - kuma_comp_up.rc == 0 fail_msg: "docker compose pull/up failed (see previous stderr)." success_msg: "Uptime Kuma updated and recreated successfully." # ---- Health check from the controller: wait for 200 on the public URL ---- - name: Uptime Kuma | Wait for web to return 200 (controller first) ansible.builtin.uri: url: "{{ kuma_url }}" method: GET return_content: true validate_certs: true status_code: 200 register: kuma_controller delegate_to: localhost run_once: true retries: 15 delay: 2 until: kuma_controller.status == 200 failed_when: false changed_when: false # ---- Optional VM-side fetch (double-check from VM) ---- - name: Uptime Kuma | VM-side fetch HTML (via Python) ansible.builtin.command: argv: - sshpass - -p - "{{ vm_pass }}" - ssh - -o - StrictHostKeyChecking=no - -o - ConnectTimeout=15 - "{{ vm_user }}@{{ vm_ip }}" - bash - -lc - | python3 - <<'PY' import sys, urllib.request, ssl try: ctx = ssl.create_default_context() with urllib.request.urlopen("{{ kuma_url }}", timeout=15, context=ctx) as r: sys.stdout.write(r.read().decode(errors="ignore")) except Exception: pass PY register: kuma_vm changed_when: false failed_when: false when: kuma_controller.status | default(0) != 200 or kuma_controller.content is not defined # ---- Pick HTML source (controller wins) ---- - name: Uptime Kuma | Choose HTML (controller wins, else VM) ansible.builtin.set_fact: kuma_html: >- {{ (kuma_controller.content if (kuma_controller.status|default(0))==200 and (kuma_controller.content is defined) else ((kuma_vm.stdout | default('') | trim | length > 0) | ternary(kuma_vm.stdout | trim, omit)) ) }} failed_when: false - name: Uptime Kuma | Print concise summary ansible.builtin.debug: msg: >- Uptime Kuma reachable at {{ kuma_url }} (HTTP {{ kuma_controller.status | default('unknown') }}). Title="{{ (kuma_html | default('') | regex_search('(?is)]*>(.*?)', '\\1')) | default('n/a') }}" when: kuma_html is defined - name: Uptime Kuma | Web unavailable (after retries) ansible.builtin.debug: msg: "Uptime Kuma web nenĂ­ dostupnĂ˝ ani po pokusech." when: kuma_html is not defined