diff --git a/docker-compose/docker-compose-uptimekuma.yml b/docker-compose/docker-compose-uptimekuma.yml new file mode 100644 index 0000000..1182262 --- /dev/null +++ b/docker-compose/docker-compose-uptimekuma.yml @@ -0,0 +1,12 @@ +version: '3.8' + +services: + uptime-kuma: + container_name: uptime-kuma-dev + image: louislam/uptime-kuma:latest + volumes: + #- ./data:/app/data + + - /data/compose/kuma/data:/app/data + ports: + - "3001:3001" # : diff --git a/old/update_uptime_kuma.yml b/old/update_uptime_kuma.yml new file mode 100644 index 0000000..ee607df --- /dev/null +++ b/old/update_uptime_kuma.yml @@ -0,0 +1,194 @@ +# nextcloud/update_uptime_kuma.yml + +- name: Update Uptime Kuma on VM via Proxmox + hosts: linux_servers + gather_facts: false + become: true + become_user: root + become_method: sudo + + vars: + # VM connection (provided by Semaphore env vars) + vm_ip: "{{ lookup('env', 'VM_IP') }}" + vm_user: "{{ lookup('env', 'VM_USER') }}" + vm_pass: "{{ lookup('env', 'VM_PASS') }}" + use_sudo: false + + # --- Debug mode (controlled via Semaphore variable) --- + DEBUG: "{{ lookup('env', 'DEBUG') | default(0) | int }}" + RETRIES: "{{ lookup('env', 'RETRIES') | default(25) | int }}" + + # Uptime Kuma specifics + kuma_project: "uptime-kuma" + kuma_compose_file: "/data/compose/uptime-kuma/docker-compose-uptime-kuma.yml" + kuma_service: "uptime-kuma" + kuma_image: "louislam/uptime-kuma:latest" + kuma_port: 3001 + + # Optional external URL for controller-side readiness check (e.g., https://kuma.example.com) + # If empty/undefined, controller check is skipped and we only probe from the VM. + kuma_url: "{{ lookup('env', 'KUMA_URL') | default('', true) }}" + + # Fixed container name used in your compose (conflicts with previous/Portainer-run container) + kuma_container_name: "uptime-kuma-dev" + + # Retry policy + kuma_retries: "{{ RETRIES }}" + kuma_delay: 2 + + # Docker command prefix (consistent behavior) + docker_prefix: "unalias docker 2>/dev/null || true; DOCKER_CLI_HINTS=0; command docker" + + # Commands to run on the target VM (quiet outputs like in Collabora play) + kuma_commands: + - "{{ docker_prefix }} pull -q {{ kuma_image }} >/dev/null" + - "{{ docker_prefix }} compose -p {{ kuma_project }} -f {{ kuma_compose_file }} pull {{ kuma_service }} >/dev/null" + # remove conflicting container name before compose up (silently) + - "{{ docker_prefix }} rm -f {{ kuma_container_name }} >/dev/null 2>&1 || true" + - "{{ docker_prefix }} compose -p {{ kuma_project }} -f {{ kuma_compose_file }} up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null" + + tasks: + - name: Ensure sshpass is installed (for password-based SSH) # English comments + ansible.builtin.apt: + name: sshpass + state: present + update_cache: yes + + - name: Run Uptime Kuma update commands on VM (via SSH) # use SSHPASS env, hide item label + ansible.builtin.command: + argv: + - sshpass + - -e # read password from SSHPASS environment + - ssh + - -o + - StrictHostKeyChecking=no + - -o + - ConnectTimeout=15 + - "{{ vm_user }}@{{ vm_ip }}" + - bash + - -lc + - "{{ ('sudo ' if use_sudo else '') + item }}" + environment: + SSHPASS: "{{ vm_pass }}" # supply password via environment + loop: "{{ kuma_commands }}" + loop_control: + index_var: idx # capture loop index + label: "cmd-{{ idx }}" # avoid printing full command in (item=...) line + register: kuma_cmds + changed_when: false + no_log: "{{ DEBUG == 0 }}" # hide outputs and env when not debugging + + - name: Show outputs for each Uptime Kuma command + ansible.builtin.debug: + msg: | + CMD: {{ item.item }} + RC: {{ item.rc }} + STDOUT: + {{ (item.stdout | default('')).strip() }} + STDERR: + {{ (item.stderr | default('')).strip() }} + loop: "{{ kuma_cmds.results }}" + when: DEBUG == 1 + + - name: Fail play if any Uptime Kuma command failed # also hide item label + ansible.builtin.assert: + that: "item.rc == 0" + fail_msg: "Uptime Kuma update failed on VM: {{ item.item }} (rc={{ item.rc }})" + success_msg: "All Uptime Kuma update commands succeeded." + loop: "{{ kuma_cmds.results }}" + loop_control: + index_var: idx + label: "cmd-{{ idx }}" + + # ------------------------- + # Readiness checks (controller first, then VM fallback) + # ------------------------- + + - name: Kuma | Wait for homepage (controller first, with retries) + ansible.builtin.uri: + url: "{{ (kuma_url | regex_replace('/$','')) + '/' }}" + method: GET + return_content: true + # Validate TLS only when using https:// + validate_certs: "{{ (kuma_url | default('')) is match('^https://') }}" + status_code: 200 + register: kuma_controller + delegate_to: localhost + run_once: true + when: kuma_url is defined and (kuma_url | length) > 0 + retries: "{{ kuma_retries }}" + delay: "{{ kuma_delay }}" + until: kuma_controller.status == 200 + failed_when: false + changed_when: false + + - name: Kuma | VM-side fetch (HTML via Python, with retries) # use SSHPASS env here too + ansible.builtin.command: + argv: + - sshpass + - -e + - ssh + - -o + - StrictHostKeyChecking=no + - -o + - ConnectTimeout=15 + - "{{ vm_user }}@{{ vm_ip }}" + - bash + - -lc + - | + python3 - <<'PY' + # Fetch Kuma homepage from localhost and print HTML to stdout + import urllib.request, sys + try: + with urllib.request.urlopen("http://127.0.0.1:{{ kuma_port }}/", timeout=15) as r: + sys.stdout.write(r.read().decode(errors='ignore')) + except Exception: + pass + PY + environment: + SSHPASS: "{{ vm_pass }}" + register: kuma_vm + changed_when: false + failed_when: false + when: kuma_controller.status | default(0) != 200 or kuma_controller.content is not defined + retries: "{{ kuma_retries }}" + delay: "{{ kuma_delay }}" + until: (kuma_vm.stdout | default('') | trim | length) > 0 and ('Uptime Kuma' in (kuma_vm.stdout | default(''))) + no_log: "{{ DEBUG == 0 }}" # hide command and output when not debugging + + - name: Kuma | Choose homepage HTML (controller wins, else VM) # safe guard against empty result + ansible.builtin.set_fact: + kuma_home_html: >- + {{ + ( + kuma_controller.content + if (kuma_controller is defined) + and ((kuma_controller.status|default(0))==200) + and (kuma_controller.content is defined) + else + (kuma_vm.stdout | default('') | trim) + ) + }} + when: + - (kuma_controller is defined and (kuma_controller.status|default(0))==200 and (kuma_controller.content is defined)) + or ((kuma_vm.stdout | default('') | trim | length) > 0) + + - name: Kuma | Print concise summary + ansible.builtin.debug: + msg: >- + Uptime Kuma homepage {{ 'reachable' if (kuma_home_html is defined) else 'NOT reachable' }}. + Source={{ 'controller' if ((kuma_controller is defined) and (kuma_controller.status|default(0))==200 and (kuma_controller.content is defined)) else 'vm' if (kuma_vm.stdout|default('')|trim|length>0) else 'n/a' }}; + length={{ (kuma_home_html | default('')) | length }}; + contains('Uptime Kuma')={{ (kuma_home_html is defined) and ('Uptime Kuma' in kuma_home_html) }} + when: DEBUG == 1 + + - name: Kuma | Homepage unavailable (after retries) + ansible.builtin.debug: + msg: "Kuma web není dostupná ani po pokusech." + when: kuma_home_html is not defined and DEBUG == 1 + + # Optional detailed dump (short excerpt only) + - name: Kuma | HTML excerpt (debug) + ansible.builtin.debug: + msg: "{{ (kuma_home_html | default(''))[:500] }}" + when: kuma_home_html is defined and DEBUG == 1 diff --git a/update_uptime_kuma.yml b/update_uptime_kuma.yml index ee607df..4db2a17 100644 --- a/update_uptime_kuma.yml +++ b/update_uptime_kuma.yml @@ -1,194 +1,86 @@ -# nextcloud/update_uptime_kuma.yml +# update_uptimekuma.yml -- name: Update Uptime Kuma on VM via Proxmox - hosts: linux_servers +- name: Update Uptime Kuma + hosts: pve2_vm gather_facts: false - become: true - become_user: root - become_method: sudo vars: - # VM connection (provided by Semaphore env vars) - vm_ip: "{{ lookup('env', 'VM_IP') }}" - vm_user: "{{ lookup('env', 'VM_USER') }}" - vm_pass: "{{ lookup('env', 'VM_PASS') }}" - use_sudo: false + # Compose sync (controller -> target) + compose_local_dir: "{{ playbook_dir }}/docker-compose" + compose_remote_base: "/home/{{ ansible_user }}/.ansible-compose" + compose_remote_dir: "{{ compose_remote_base }}/docker-compose" + compose_remote_archive: "{{ compose_remote_base }}/docker-compose.tar.gz" - # --- Debug mode (controlled via Semaphore variable) --- - DEBUG: "{{ lookup('env', 'DEBUG') | default(0) | int }}" - RETRIES: "{{ lookup('env', 'RETRIES') | default(25) | int }}" - - # Uptime Kuma specifics - kuma_project: "uptime-kuma" - kuma_compose_file: "/data/compose/uptime-kuma/docker-compose-uptime-kuma.yml" - kuma_service: "uptime-kuma" - kuma_image: "louislam/uptime-kuma:latest" - kuma_port: 3001 - - # Optional external URL for controller-side readiness check (e.g., https://kuma.example.com) - # If empty/undefined, controller check is skipped and we only probe from the VM. - kuma_url: "{{ lookup('env', 'KUMA_URL') | default('', true) }}" - - # Fixed container name used in your compose (conflicts with previous/Portainer-run container) - kuma_container_name: "uptime-kuma-dev" - - # Retry policy - kuma_retries: "{{ RETRIES }}" - kuma_delay: 2 - - # Docker command prefix (consistent behavior) - docker_prefix: "unalias docker 2>/dev/null || true; DOCKER_CLI_HINTS=0; command docker" - - # Commands to run on the target VM (quiet outputs like in Collabora play) - kuma_commands: - - "{{ docker_prefix }} pull -q {{ kuma_image }} >/dev/null" - - "{{ docker_prefix }} compose -p {{ kuma_project }} -f {{ kuma_compose_file }} pull {{ kuma_service }} >/dev/null" - # remove conflicting container name before compose up (silently) - - "{{ docker_prefix }} rm -f {{ kuma_container_name }} >/dev/null 2>&1 || true" - - "{{ docker_prefix }} compose -p {{ kuma_project }} -f {{ kuma_compose_file }} up -d --no-deps --force-recreate {{ kuma_service }} >/dev/null" + # Uptime Kuma settings + uptimekuma_project: uptimekuma + uptimekuma_compose_filename: "docker-compose-uptimekuma.yml" + uptimekuma_service: uptime-kuma + uptimekuma_port: 3001 tasks: - - name: Ensure sshpass is installed (for password-based SSH) # English comments - ansible.builtin.apt: - name: sshpass - state: present - update_cache: yes + - name: Ensure remote base directory exists + ansible.builtin.file: + path: "{{ compose_remote_base }}" + state: directory + mode: "0755" - - name: Run Uptime Kuma update commands on VM (via SSH) # use SSHPASS env, hide item label - ansible.builtin.command: - argv: - - sshpass - - -e # read password from SSHPASS environment - - ssh - - -o - - StrictHostKeyChecking=no - - -o - - ConnectTimeout=15 - - "{{ vm_user }}@{{ vm_ip }}" - - bash - - -lc - - "{{ ('sudo ' if use_sudo else '') + item }}" - environment: - SSHPASS: "{{ vm_pass }}" # supply password via environment - loop: "{{ kuma_commands }}" - loop_control: - index_var: idx # capture loop index - label: "cmd-{{ idx }}" # avoid printing full command in (item=...) line - register: kuma_cmds - changed_when: false - no_log: "{{ DEBUG == 0 }}" # hide outputs and env when not debugging - - - name: Show outputs for each Uptime Kuma command - ansible.builtin.debug: - msg: | - CMD: {{ item.item }} - RC: {{ item.rc }} - STDOUT: - {{ (item.stdout | default('')).strip() }} - STDERR: - {{ (item.stderr | default('')).strip() }} - loop: "{{ kuma_cmds.results }}" - when: DEBUG == 1 - - - name: Fail play if any Uptime Kuma command failed # also hide item label - ansible.builtin.assert: - that: "item.rc == 0" - fail_msg: "Uptime Kuma update failed on VM: {{ item.item }} (rc={{ item.rc }})" - success_msg: "All Uptime Kuma update commands succeeded." - loop: "{{ kuma_cmds.results }}" - loop_control: - index_var: idx - label: "cmd-{{ idx }}" - - # ------------------------- - # Readiness checks (controller first, then VM fallback) - # ------------------------- - - - name: Kuma | Wait for homepage (controller first, with retries) - ansible.builtin.uri: - url: "{{ (kuma_url | regex_replace('/$','')) + '/' }}" - method: GET - return_content: true - # Validate TLS only when using https:// - validate_certs: "{{ (kuma_url | default('')) is match('^https://') }}" - status_code: 200 - register: kuma_controller + - name: Create local archive of docker-compose directory (controller) + ansible.builtin.archive: + path: "{{ compose_local_dir }}/" + dest: "/tmp/docker-compose.tar.gz" + format: gz delegate_to: localhost run_once: true - when: kuma_url is defined and (kuma_url | length) > 0 - retries: "{{ kuma_retries }}" - delay: "{{ kuma_delay }}" - until: kuma_controller.status == 200 - failed_when: false - changed_when: false - - name: Kuma | VM-side fetch (HTML via Python, with retries) # use SSHPASS env here too - ansible.builtin.command: - argv: - - sshpass - - -e - - ssh - - -o - - StrictHostKeyChecking=no - - -o - - ConnectTimeout=15 - - "{{ vm_user }}@{{ vm_ip }}" - - bash - - -lc - - | - python3 - <<'PY' - # Fetch Kuma homepage from localhost and print HTML to stdout - import urllib.request, sys - try: - with urllib.request.urlopen("http://127.0.0.1:{{ kuma_port }}/", timeout=15) as r: - sys.stdout.write(r.read().decode(errors='ignore')) - except Exception: - pass - PY - environment: - SSHPASS: "{{ vm_pass }}" - register: kuma_vm - changed_when: false - failed_when: false - when: kuma_controller.status | default(0) != 200 or kuma_controller.content is not defined - retries: "{{ kuma_retries }}" - delay: "{{ kuma_delay }}" - until: (kuma_vm.stdout | default('') | trim | length) > 0 and ('Uptime Kuma' in (kuma_vm.stdout | default(''))) - no_log: "{{ DEBUG == 0 }}" # hide command and output when not debugging + - name: Upload archive to remote host + ansible.builtin.copy: + src: "/tmp/docker-compose.tar.gz" + dest: "{{ compose_remote_archive }}" + mode: "0644" - - name: Kuma | Choose homepage HTML (controller wins, else VM) # safe guard against empty result - ansible.builtin.set_fact: - kuma_home_html: >- - {{ - ( - kuma_controller.content - if (kuma_controller is defined) - and ((kuma_controller.status|default(0))==200) - and (kuma_controller.content is defined) - else - (kuma_vm.stdout | default('') | trim) - ) - }} - when: - - (kuma_controller is defined and (kuma_controller.status|default(0))==200 and (kuma_controller.content is defined)) - or ((kuma_vm.stdout | default('') | trim | length) > 0) + - name: Recreate remote compose directory + ansible.builtin.file: + path: "{{ compose_remote_dir }}" + state: absent - - name: Kuma | Print concise summary - ansible.builtin.debug: - msg: >- - Uptime Kuma homepage {{ 'reachable' if (kuma_home_html is defined) else 'NOT reachable' }}. - Source={{ 'controller' if ((kuma_controller is defined) and (kuma_controller.status|default(0))==200 and (kuma_controller.content is defined)) else 'vm' if (kuma_vm.stdout|default('')|trim|length>0) else 'n/a' }}; - length={{ (kuma_home_html | default('')) | length }}; - contains('Uptime Kuma')={{ (kuma_home_html is defined) and ('Uptime Kuma' in kuma_home_html) }} - when: DEBUG == 1 + - name: Ensure remote compose directory exists + ansible.builtin.file: + path: "{{ compose_remote_dir }}" + state: directory + mode: "0755" - - name: Kuma | Homepage unavailable (after retries) - ansible.builtin.debug: - msg: "Kuma web není dostupná ani po pokusech." - when: kuma_home_html is not defined and DEBUG == 1 + - name: Extract archive on remote host + ansible.builtin.unarchive: + src: "{{ compose_remote_archive }}" + dest: "{{ compose_remote_dir }}" + remote_src: true - # Optional detailed dump (short excerpt only) - - name: Kuma | HTML excerpt (debug) - ansible.builtin.debug: - msg: "{{ (kuma_home_html | default(''))[:500] }}" - when: kuma_home_html is defined and DEBUG == 1 + - name: Pull latest Uptime Kuma image + community.docker.docker_compose_v2: + project_name: "{{ uptimekuma_project }}" + project_src: "{{ compose_remote_dir }}" + files: + - "{{ uptimekuma_compose_filename }}" + pull: always + + - name: Recreate Uptime Kuma service + community.docker.docker_compose_v2: + project_name: "{{ uptimekuma_project }}" + project_src: "{{ compose_remote_dir }}" + files: + - "{{ uptimekuma_compose_filename }}" + services: + - "{{ uptimekuma_service }}" + state: present + recreate: always + + - name: Wait for Uptime Kuma port + ansible.builtin.wait_for: + host: 127.0.0.1 + port: "{{ uptimekuma_port }}" + timeout: 120 + + - name: Check Uptime Kuma HTTP endpoint + ansible.builtin.uri: + url: "http://127.0.0.1:{{ uptimekuma_port }}/" + status_code: 200