feat: update deployment configuration and encrypted env loader
- Update Ansible playbooks and roles for application deployment - Add new Gitea/Traefik troubleshooting playbooks - Update Docker Compose configurations (base, local, staging, production) - Enhance EncryptedEnvLoader with improved error handling - Add deployment scripts (autossh setup, migration, secret testing) - Update CI/CD workflows and documentation - Add Semaphore stack configuration
This commit is contained in:
@@ -62,11 +62,6 @@
|
||||
set_fact:
|
||||
server_vpn_ip: "{{ (wireguard_server_config_read.content | b64decode | regex_search('Address = ([0-9.]+)', '\\1')) | first | default('10.8.0.1') }}"
|
||||
|
||||
- name: Set default DNS servers if not provided
|
||||
set_fact:
|
||||
wireguard_dns_servers: "{{ [server_vpn_ip] }}"
|
||||
when: wireguard_dns_servers | length == 0
|
||||
|
||||
- name: Extract WireGuard server IP octets
|
||||
set_fact:
|
||||
wireguard_server_ip_octets: "{{ server_vpn_ip.split('.') }}"
|
||||
|
||||
192
deployment/ansible/playbooks/check-gitea-bad-gateway.yml
Normal file
192
deployment/ansible/playbooks/check-gitea-bad-gateway.yml
Normal file
@@ -0,0 +1,192 @@
|
||||
---
|
||||
- name: Diagnose Gitea Bad Gateway Issue
|
||||
hosts: production
|
||||
gather_facts: yes
|
||||
become: no
|
||||
|
||||
vars:
|
||||
gitea_stack_path: "{{ stacks_base_path }}/gitea"
|
||||
|
||||
tasks:
|
||||
- name: Check if Gitea stack directory exists
|
||||
stat:
|
||||
path: "{{ gitea_stack_path }}"
|
||||
register: gitea_stack_dir
|
||||
|
||||
- name: Display Gitea stack directory status
|
||||
debug:
|
||||
msg: "Gitea stack path: {{ gitea_stack_path }} - Exists: {{ gitea_stack_dir.stat.exists }}"
|
||||
|
||||
- name: Check Gitea container status
|
||||
shell: |
|
||||
cd {{ gitea_stack_path }}
|
||||
echo "=== Gitea Container Status ==="
|
||||
docker compose ps 2>&1 || echo "Could not check container status"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: gitea_status
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
when: gitea_stack_dir.stat.exists
|
||||
|
||||
- name: Display Gitea container status
|
||||
debug:
|
||||
msg: "{{ gitea_status.stdout_lines }}"
|
||||
when: gitea_stack_dir.stat.exists
|
||||
|
||||
- name: Check if Gitea container is running
|
||||
shell: |
|
||||
docker ps --filter name=gitea --format "{{ '{{' }}.Names{{ '}}' }}: {{ '{{' }}.Status{{ '}}' }}"
|
||||
register: gitea_running
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Gitea running status
|
||||
debug:
|
||||
msg: "{{ gitea_running.stdout_lines if gitea_running.stdout else 'Gitea container not found' }}"
|
||||
|
||||
- name: Check Gitea logs (last 50 lines)
|
||||
shell: |
|
||||
cd {{ gitea_stack_path }}
|
||||
echo "=== Gitea Logs (Last 50 lines) ==="
|
||||
docker compose logs --tail=50 gitea 2>&1 || echo "Could not read Gitea logs"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: gitea_logs
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
when: gitea_stack_dir.stat.exists
|
||||
|
||||
- name: Display Gitea logs
|
||||
debug:
|
||||
msg: "{{ gitea_logs.stdout_lines }}"
|
||||
when: gitea_stack_dir.stat.exists
|
||||
|
||||
- name: Check Gitea container health
|
||||
shell: |
|
||||
docker inspect gitea --format '{{ '{{' }}.State.Health.Status{{ '}}' }}' 2>&1 || echo "Could not check health"
|
||||
register: gitea_health
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Gitea health status
|
||||
debug:
|
||||
msg: "Gitea health: {{ gitea_health.stdout }}"
|
||||
|
||||
- name: Test Gitea health endpoint from container
|
||||
shell: |
|
||||
docker exec gitea curl -f http://localhost:3000/api/healthz 2>&1 || echo "Health check failed"
|
||||
register: gitea_internal_health
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display internal health check result
|
||||
debug:
|
||||
msg: "{{ gitea_internal_health.stdout_lines }}"
|
||||
|
||||
- name: Check if Gitea is reachable from Traefik network
|
||||
shell: |
|
||||
docker exec traefik curl -f http://gitea:3000/api/healthz 2>&1 || echo "Could not reach Gitea from Traefik network"
|
||||
register: gitea_from_traefik
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Traefik to Gitea connectivity
|
||||
debug:
|
||||
msg: "{{ gitea_from_traefik.stdout_lines }}"
|
||||
|
||||
- name: Check Traefik logs for Gitea errors
|
||||
shell: |
|
||||
cd {{ stacks_base_path }}/traefik
|
||||
echo "=== Traefik Logs - Gitea related (Last 30 lines) ==="
|
||||
docker compose logs --tail=100 traefik 2>&1 | grep -i "gitea" | tail -30 || echo "No Gitea-related logs found"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: traefik_gitea_logs
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Traefik Gitea logs
|
||||
debug:
|
||||
msg: "{{ traefik_gitea_logs.stdout_lines }}"
|
||||
|
||||
- name: Check Docker networks
|
||||
shell: |
|
||||
echo "=== Docker Networks ==="
|
||||
docker network ls
|
||||
echo ""
|
||||
echo "=== Traefik Network Details ==="
|
||||
docker network inspect traefik-public 2>&1 | grep -E "(Name|Subnet|Containers|gitea)" || echo "Could not inspect traefik-public network"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: network_info
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display network info
|
||||
debug:
|
||||
msg: "{{ network_info.stdout_lines }}"
|
||||
|
||||
- name: Check if Gitea is in traefik-public network
|
||||
shell: |
|
||||
docker network inspect traefik-public 2>&1 | grep -i "gitea" || echo "Gitea not found in traefik-public network"
|
||||
register: gitea_in_network
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Gitea network membership
|
||||
debug:
|
||||
msg: "{{ gitea_in_network.stdout_lines }}"
|
||||
|
||||
- name: Check Gitea container configuration
|
||||
shell: |
|
||||
echo "=== Gitea Container Labels ==="
|
||||
docker inspect gitea --format '{{ '{{' }}range .Config.Labels{{ '}}' }}{{ '{{' }}.Key{{ '}}' }}={{ '{{' }}.Value{{ '}}' }}{{ '{{' }}\n{{ '}}' }}{{ '{{' }}end{{ '}}' }}' 2>&1 | grep -i traefik || echo "No Traefik labels found"
|
||||
register: gitea_labels
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Gitea labels
|
||||
debug:
|
||||
msg: "{{ gitea_labels.stdout_lines }}"
|
||||
|
||||
- name: Check Traefik service registration
|
||||
shell: |
|
||||
docker exec traefik wget -qO- http://localhost:8080/api/http/services 2>&1 | grep -i gitea || echo "Gitea service not found in Traefik API"
|
||||
register: traefik_service
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Traefik service registration
|
||||
debug:
|
||||
msg: "{{ traefik_service.stdout_lines }}"
|
||||
|
||||
- name: Test external Gitea access
|
||||
shell: |
|
||||
echo "=== Testing External Gitea Access ==="
|
||||
curl -k -H "User-Agent: Mozilla/5.0" -s -o /dev/null -w "HTTP Status: %{http_code}\n" https://git.michaelschiemer.de/ 2>&1 || echo "Connection failed"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: external_test
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display external test result
|
||||
debug:
|
||||
msg: "{{ external_test.stdout_lines }}"
|
||||
|
||||
- name: Summary
|
||||
debug:
|
||||
msg:
|
||||
- "=== DIAGNOSIS SUMMARY ==="
|
||||
- "1. Check if Gitea container is running"
|
||||
- "2. Check if Gitea is in traefik-public network"
|
||||
- "3. Check Gitea health endpoint (port 3000)"
|
||||
- "4. Check Traefik can reach Gitea"
|
||||
- "5. Check Traefik logs for errors"
|
||||
- ""
|
||||
- "Common issues:"
|
||||
- "- Container not running: Restart with 'docker compose up -d' in {{ gitea_stack_path }}"
|
||||
- "- Not in network: Recreate container or add to network"
|
||||
- "- Health check failing: Check Gitea logs for errors"
|
||||
- "- Traefik can't reach: Check network configuration"
|
||||
70
deployment/ansible/playbooks/check-traefik-gitea-config.yml
Normal file
70
deployment/ansible/playbooks/check-traefik-gitea-config.yml
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
- name: Check Traefik Gitea Configuration
|
||||
hosts: production
|
||||
gather_facts: yes
|
||||
become: no
|
||||
|
||||
vars:
|
||||
traefik_stack_path: "{{ stacks_base_path }}/traefik"
|
||||
|
||||
tasks:
|
||||
- name: Check Traefik logs for Gitea errors
|
||||
shell: |
|
||||
cd {{ traefik_stack_path }}
|
||||
echo "=== Traefik Logs - Gitea errors (Last 50 lines) ==="
|
||||
docker compose logs --tail=100 traefik 2>&1 | grep -i "gitea\|502\|bad gateway" | tail -50 || echo "No Gitea-related errors found"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: traefik_errors
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Traefik errors
|
||||
debug:
|
||||
msg: "{{ traefik_errors.stdout_lines }}"
|
||||
|
||||
- name: Check dynamic Gitea configuration on server
|
||||
shell: |
|
||||
cat {{ traefik_stack_path }}/dynamic/gitea.yml 2>&1 || echo "File not found"
|
||||
register: gitea_dynamic_config
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display dynamic Gitea config
|
||||
debug:
|
||||
msg: "{{ gitea_dynamic_config.stdout_lines }}"
|
||||
|
||||
- name: Test if Traefik can resolve gitea hostname
|
||||
shell: |
|
||||
docker exec traefik getent hosts gitea 2>&1 || echo "Cannot resolve gitea hostname"
|
||||
register: traefik_resolve
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Traefik resolve result
|
||||
debug:
|
||||
msg: "{{ traefik_resolve.stdout_lines }}"
|
||||
|
||||
- name: Get Gitea container IP
|
||||
shell: |
|
||||
docker inspect gitea --format '{{ '{{' }}range.NetworkSettings.Networks{{ '}}' }}{{ '{{' }}if eq .NetworkID (printf "%s" (docker network inspect traefik-public --format "{{ '{{' }}.Id{{ '}}' }}")){{ '}}' }}{{ '{{' }}.IPAddress{{ '}}' }}{{ '{{' }}end{{ '}}' }}{{ '{{' }}end{{ '}}' }}' 2>&1 || echo "Could not get IP"
|
||||
register: gitea_ip
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Gitea IP
|
||||
debug:
|
||||
msg: "Gitea IP in traefik-public network: {{ gitea_ip.stdout }}"
|
||||
|
||||
- name: Test connectivity from Traefik to Gitea IP
|
||||
shell: |
|
||||
GITEA_IP="{{ gitea_ip.stdout | default('172.21.0.3') }}"
|
||||
docker exec traefik wget -qO- --timeout=5 "http://$GITEA_IP:3000/api/healthz" 2>&1 || echo "Cannot connect to Gitea at $GITEA_IP:3000"
|
||||
register: traefik_connect
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
when: gitea_ip.stdout is defined and gitea_ip.stdout != ""
|
||||
|
||||
- name: Display connectivity result
|
||||
debug:
|
||||
msg: "{{ traefik_connect.stdout_lines }}"
|
||||
@@ -50,21 +50,34 @@
|
||||
group: "{{ ansible_user }}"
|
||||
mode: '0755'
|
||||
|
||||
- name: Check if docker-compose.yml exists in application stack
|
||||
- name: Check if docker-compose.base.yml exists in application stack
|
||||
stat:
|
||||
path: "{{ app_stack_path }}/docker-compose.yml"
|
||||
register: compose_file_exists
|
||||
path: "{{ app_stack_path }}/docker-compose.base.yml"
|
||||
register: compose_base_exists
|
||||
when: not (application_sync_files | default(false) | bool)
|
||||
|
||||
- name: Fail if docker-compose.yml doesn't exist
|
||||
- name: Check if docker-compose.production.yml exists in application stack
|
||||
stat:
|
||||
path: "{{ app_stack_path }}/docker-compose.production.yml"
|
||||
register: compose_prod_exists
|
||||
when: not (application_sync_files | default(false) | bool)
|
||||
|
||||
- name: Fail if docker-compose files don't exist
|
||||
fail:
|
||||
msg: |
|
||||
Application Stack docker-compose.yml not found at {{ app_stack_path }}/docker-compose.yml
|
||||
Application Stack docker-compose files not found at {{ app_stack_path }}
|
||||
|
||||
Required files:
|
||||
- docker-compose.base.yml
|
||||
- docker-compose.production.yml
|
||||
|
||||
The Application Stack must be deployed first via:
|
||||
ansible-playbook -i inventory/production.yml playbooks/setup-infrastructure.yml
|
||||
|
||||
This will create the application stack with docker-compose.yml and .env file.
|
||||
when: not compose_file_exists.stat.exists
|
||||
This will create the application stack with docker-compose files and .env file.
|
||||
when:
|
||||
- not (application_sync_files | default(false) | bool)
|
||||
- (not compose_base_exists.stat.exists or not compose_prod_exists.stat.exists)
|
||||
|
||||
- name: Create backup directory
|
||||
file:
|
||||
@@ -75,31 +88,47 @@
|
||||
mode: '0755'
|
||||
|
||||
tasks:
|
||||
- name: Verify docker-compose.yml exists
|
||||
- name: Verify docker-compose files exist
|
||||
stat:
|
||||
path: "{{ app_stack_path }}/docker-compose.yml"
|
||||
register: compose_file_check
|
||||
path: "{{ app_stack_path }}/docker-compose.base.yml"
|
||||
register: compose_base_check
|
||||
when: not (application_sync_files | default(false) | bool)
|
||||
|
||||
- name: Verify docker-compose.production.yml exists
|
||||
stat:
|
||||
path: "{{ app_stack_path }}/docker-compose.production.yml"
|
||||
register: compose_prod_check
|
||||
when: not (application_sync_files | default(false) | bool)
|
||||
|
||||
- name: Fail if docker-compose.yml doesn't exist
|
||||
- name: Fail if docker-compose files don't exist
|
||||
fail:
|
||||
msg: |
|
||||
Application Stack docker-compose.yml not found at {{ app_stack_path }}/docker-compose.yml
|
||||
Application Stack docker-compose files not found at {{ app_stack_path }}
|
||||
|
||||
Required files:
|
||||
- docker-compose.base.yml
|
||||
- docker-compose.production.yml
|
||||
|
||||
The Application Stack must be deployed first via:
|
||||
ansible-playbook -i inventory/production.yml playbooks/setup-infrastructure.yml
|
||||
|
||||
This will create the application stack with docker-compose.yml and .env file.
|
||||
when: not compose_file_check.stat.exists
|
||||
This will create the application stack with docker-compose files and .env file.
|
||||
when:
|
||||
- not (application_sync_files | default(false) | bool)
|
||||
- (not compose_base_check.stat.exists or not compose_prod_check.stat.exists)
|
||||
|
||||
- name: Backup current deployment metadata
|
||||
shell: |
|
||||
docker compose -f {{ app_stack_path }}/docker-compose.yml ps --format json 2>/dev/null > {{ backups_path }}/{{ deployment_timestamp | regex_replace(':', '-') }}/current_containers.json || true
|
||||
docker compose -f {{ app_stack_path }}/docker-compose.yml config 2>/dev/null > {{ backups_path }}/{{ deployment_timestamp | regex_replace(':', '-') }}/docker-compose-config.yml || true
|
||||
docker compose -f {{ app_stack_path }}/docker-compose.base.yml -f {{ app_stack_path }}/docker-compose.production.yml ps --format json 2>/dev/null > {{ backups_path }}/{{ deployment_timestamp | regex_replace(':', '-') }}/current_containers.json || true
|
||||
docker compose -f {{ app_stack_path }}/docker-compose.base.yml -f {{ app_stack_path }}/docker-compose.production.yml config 2>/dev/null > {{ backups_path }}/{{ deployment_timestamp | regex_replace(':', '-') }}/docker-compose-config.yml || true
|
||||
args:
|
||||
executable: /bin/bash
|
||||
changed_when: false
|
||||
ignore_errors: yes
|
||||
when: compose_file_check.stat.exists
|
||||
when:
|
||||
- not (application_sync_files | default(false) | bool)
|
||||
- compose_base_exists.stat.exists | default(false)
|
||||
- compose_prod_exists.stat.exists | default(false)
|
||||
|
||||
- name: Login to Docker registry (if credentials provided)
|
||||
community.docker.docker_login:
|
||||
@@ -128,9 +157,19 @@
|
||||
msg: "Failed to pull image {{ app_image }}:{{ image_tag }}"
|
||||
when: image_pull.failed
|
||||
|
||||
- name: Update docker-compose.yml with new image tag (all services)
|
||||
# Sync files first if application_sync_files=true (before updating docker-compose.production.yml)
|
||||
- name: Sync application stack files
|
||||
import_role:
|
||||
name: application
|
||||
vars:
|
||||
application_sync_files: "{{ application_sync_files | default(false) }}"
|
||||
application_compose_recreate: "never" # Don't recreate yet, just sync files
|
||||
application_remove_orphans: false
|
||||
when: application_sync_files | default(false) | bool
|
||||
|
||||
- name: Update docker-compose.production.yml with new image tag (all services)
|
||||
replace:
|
||||
path: "{{ app_stack_path }}/docker-compose.yml"
|
||||
path: "{{ app_stack_path }}/docker-compose.production.yml"
|
||||
# Match both localhost:5000 and registry.michaelschiemer.de (or any registry URL)
|
||||
regexp: '^(\s+image:\s+)(localhost:5000|registry\.michaelschiemer\.de|{{ docker_registry }})/{{ app_name }}:.*$'
|
||||
replace: '\1{{ app_image }}:{{ image_tag }}'
|
||||
@@ -142,13 +181,13 @@
|
||||
import_role:
|
||||
name: application
|
||||
vars:
|
||||
application_sync_files: false
|
||||
application_sync_files: false # Already synced above, don't sync again
|
||||
application_compose_recreate: "always"
|
||||
application_remove_orphans: true
|
||||
|
||||
- name: Get deployed image information
|
||||
shell: |
|
||||
docker compose -f {{ app_stack_path }}/docker-compose.yml config | grep -E "^\s+image:" | head -1 | awk '{print $2}' || echo "unknown"
|
||||
docker compose -f {{ app_stack_path }}/docker-compose.base.yml -f {{ app_stack_path }}/docker-compose.production.yml config | grep -E "^\s+image:" | head -1 | awk '{print $2}' || echo "unknown"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: deployed_image
|
||||
|
||||
@@ -0,0 +1,143 @@
|
||||
---
|
||||
- name: Fix Gitea Traefik Configuration - Remove Dynamic Config and Use Labels
|
||||
hosts: production
|
||||
gather_facts: yes
|
||||
become: no
|
||||
|
||||
vars:
|
||||
traefik_stack_path: "{{ stacks_base_path }}/traefik"
|
||||
gitea_stack_path: "{{ stacks_base_path }}/gitea"
|
||||
|
||||
tasks:
|
||||
- name: Backup dynamic Gitea configuration
|
||||
shell: |
|
||||
cd {{ traefik_stack_path }}/dynamic
|
||||
if [ -f gitea.yml ]; then
|
||||
cp gitea.yml gitea.yml.backup-$(date +%Y%m%d-%H%M%S)
|
||||
echo "Backed up to gitea.yml.backup-$(date +%Y%m%d-%H%M%S)"
|
||||
else
|
||||
echo "File not found, nothing to backup"
|
||||
fi
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: backup_result
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display backup result
|
||||
debug:
|
||||
msg: "{{ backup_result.stdout_lines }}"
|
||||
|
||||
- name: Remove dynamic Gitea configuration
|
||||
file:
|
||||
path: "{{ traefik_stack_path }}/dynamic/gitea.yml"
|
||||
state: absent
|
||||
register: remove_config
|
||||
|
||||
- name: Restart Traefik to reload configuration
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ traefik_stack_path }}"
|
||||
state: present
|
||||
pull: never
|
||||
recreate: always
|
||||
services:
|
||||
- traefik
|
||||
register: traefik_restart
|
||||
when: remove_config.changed
|
||||
|
||||
- name: Wait for Traefik to be ready
|
||||
wait_for:
|
||||
port: 443
|
||||
host: localhost
|
||||
timeout: 30
|
||||
delegate_to: localhost
|
||||
when: traefik_restart.changed
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Check if Gitea docker-compose.yml already has Traefik labels
|
||||
shell: |
|
||||
grep -q "traefik.enable=true" {{ gitea_stack_path }}/docker-compose.yml && echo "Labels already present" || echo "Labels missing"
|
||||
register: labels_check
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Copy docker-compose.yml from local to ensure labels are present
|
||||
copy:
|
||||
src: "{{ playbook_dir }}/../../stacks/gitea/docker-compose.yml"
|
||||
dest: "{{ gitea_stack_path }}/docker-compose.yml"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "{{ ansible_user }}"
|
||||
mode: '0644'
|
||||
register: labels_added
|
||||
when: "'Labels missing' in labels_check.stdout"
|
||||
|
||||
- name: Recreate Gitea container with labels
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ gitea_stack_path }}"
|
||||
state: present
|
||||
pull: never
|
||||
recreate: always
|
||||
remove_orphans: no
|
||||
register: gitea_recreate
|
||||
when: labels_added.changed
|
||||
|
||||
- name: Wait for Gitea to be healthy
|
||||
shell: |
|
||||
for i in {1..30}; do
|
||||
if docker exec gitea curl -f http://localhost:3000/api/healthz >/dev/null 2>&1; then
|
||||
echo "Gitea is healthy"
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting for Gitea... ($i/30)"
|
||||
sleep 2
|
||||
done
|
||||
echo "Health check timeout"
|
||||
exit 1
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: health_wait
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
when: gitea_recreate.changed
|
||||
|
||||
- name: Display health wait result
|
||||
debug:
|
||||
msg: "{{ health_wait.stdout_lines }}"
|
||||
when: gitea_recreate.changed
|
||||
|
||||
- name: Check Traefik service registration
|
||||
shell: |
|
||||
sleep 5 # Give Traefik time to discover
|
||||
docker exec traefik wget -qO- http://localhost:8080/api/http/services 2>&1 | grep -i gitea || echo "Service not found (may take a few seconds)"
|
||||
register: traefik_service
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Traefik service registration
|
||||
debug:
|
||||
msg: "{{ traefik_service.stdout_lines }}"
|
||||
|
||||
- name: Test external Gitea access
|
||||
shell: |
|
||||
sleep 3 # Give Traefik time to update routing
|
||||
curl -k -H "User-Agent: Mozilla/5.0" -s -o /dev/null -w "HTTP Status: %{http_code}\n" https://git.michaelschiemer.de/ 2>&1 || echo "Connection failed"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: external_test
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display external test result
|
||||
debug:
|
||||
msg: "{{ external_test.stdout_lines }}"
|
||||
|
||||
- name: Summary
|
||||
debug:
|
||||
msg:
|
||||
- "=== FIX SUMMARY ==="
|
||||
- "Dynamic config removed: {{ 'Yes' if remove_config.changed else 'Already removed' }}"
|
||||
- "Labels added to docker-compose.yml: {{ 'Yes' if labels_added.changed else 'Already present' }}"
|
||||
- "Gitea container recreated: {{ 'Yes' if gitea_recreate.changed else 'No' }}"
|
||||
- ""
|
||||
- "Gitea should now be accessible via https://git.michaelschiemer.de"
|
||||
- "If issue persists, check Traefik logs for errors"
|
||||
139
deployment/ansible/playbooks/fix-gitea-traefik-labels.yml
Normal file
139
deployment/ansible/playbooks/fix-gitea-traefik-labels.yml
Normal file
@@ -0,0 +1,139 @@
|
||||
---
|
||||
- name: Fix Gitea Traefik Labels
|
||||
hosts: production
|
||||
gather_facts: yes
|
||||
become: no
|
||||
|
||||
vars:
|
||||
gitea_stack_path: "{{ stacks_base_path }}/gitea"
|
||||
|
||||
tasks:
|
||||
- name: Check current Gitea container status
|
||||
shell: |
|
||||
cd {{ gitea_stack_path }}
|
||||
docker compose ps gitea
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: gitea_status_before
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display current status
|
||||
debug:
|
||||
msg: "{{ gitea_status_before.stdout_lines }}"
|
||||
|
||||
- name: Check current Traefik labels
|
||||
shell: |
|
||||
docker inspect gitea --format '{{ '{{' }}range .Config.Labels{{ '}}' }}{{ '{{' }}.Key{{ '}}' }}={{ '{{' }}.Value{{ '}}' }}{{ '{{' }}\n{{ '}}' }}{{ '{{' }}end{{ '}}' }}' 2>&1 | grep -i traefik || echo "No Traefik labels found"
|
||||
register: current_labels
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display current labels
|
||||
debug:
|
||||
msg: "{{ current_labels.stdout_lines }}"
|
||||
|
||||
- name: Recreate Gitea container with Traefik labels
|
||||
community.docker.docker_compose_v2:
|
||||
project_src: "{{ gitea_stack_path }}"
|
||||
state: present
|
||||
pull: never
|
||||
recreate: always
|
||||
remove_orphans: no
|
||||
register: gitea_recreate
|
||||
|
||||
- name: Wait for Gitea to be ready
|
||||
wait_for:
|
||||
port: 3000
|
||||
host: localhost
|
||||
timeout: 60
|
||||
delegate_to: localhost
|
||||
when: gitea_recreate.changed
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Wait for Gitea health check
|
||||
shell: |
|
||||
for i in {1..30}; do
|
||||
if docker exec gitea curl -f http://localhost:3000/api/healthz >/dev/null 2>&1; then
|
||||
echo "Gitea is healthy"
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting for Gitea to be healthy... ($i/30)"
|
||||
sleep 2
|
||||
done
|
||||
echo "Gitea health check timeout"
|
||||
exit 1
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: health_wait
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
when: gitea_recreate.changed
|
||||
|
||||
- name: Display health wait result
|
||||
debug:
|
||||
msg: "{{ health_wait.stdout_lines }}"
|
||||
when: gitea_recreate.changed
|
||||
|
||||
- name: Check new Gitea container status
|
||||
shell: |
|
||||
cd {{ gitea_stack_path }}
|
||||
docker compose ps gitea
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: gitea_status_after
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display new status
|
||||
debug:
|
||||
msg: "{{ gitea_status_after.stdout_lines }}"
|
||||
|
||||
- name: Check new Traefik labels
|
||||
shell: |
|
||||
docker inspect gitea --format '{{ '{{' }}range .Config.Labels{{ '}}' }}{{ '{{' }}.Key{{ '}}' }}={{ '{{' }}.Value{{ '}}' }}{{ '{{' }}\n{{ '}}' }}{{ '{{' }}end{{ '}}' }}' 2>&1 | grep -i traefik || echo "No Traefik labels found"
|
||||
register: new_labels
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display new labels
|
||||
debug:
|
||||
msg: "{{ new_labels.stdout_lines }}"
|
||||
|
||||
- name: Check Traefik service registration
|
||||
shell: |
|
||||
docker exec traefik wget -qO- http://localhost:8080/api/http/services 2>&1 | grep -i gitea || echo "Gitea service not found (may take a few seconds to register)"
|
||||
register: traefik_service
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display Traefik service registration
|
||||
debug:
|
||||
msg: "{{ traefik_service.stdout_lines }}"
|
||||
|
||||
- name: Test external Gitea access
|
||||
shell: |
|
||||
echo "Testing external access..."
|
||||
sleep 5 # Give Traefik time to update
|
||||
curl -k -H "User-Agent: Mozilla/5.0" -s -o /dev/null -w "HTTP Status: %{http_code}\n" https://git.michaelschiemer.de/ 2>&1 || echo "Connection failed"
|
||||
args:
|
||||
executable: /bin/bash
|
||||
register: external_test
|
||||
ignore_errors: yes
|
||||
failed_when: false
|
||||
|
||||
- name: Display external test result
|
||||
debug:
|
||||
msg: "{{ external_test.stdout_lines }}"
|
||||
|
||||
- name: Summary
|
||||
debug:
|
||||
msg:
|
||||
- "=== FIX SUMMARY ==="
|
||||
- "Container recreated: {{ 'Yes' if gitea_recreate.changed else 'No' }}"
|
||||
- "Traefik labels: {{ 'Fixed' if 'traefik' in new_labels.stdout|lower else 'Still missing' }}"
|
||||
- ""
|
||||
- "If the issue persists:"
|
||||
- "1. Check Traefik logs: cd {{ stacks_base_path }}/traefik && docker compose logs traefik"
|
||||
- "2. Verify Traefik can reach Gitea: docker exec traefik ping -c 2 gitea"
|
||||
- "3. Check Gitea logs for errors: cd {{ gitea_stack_path }} && docker compose logs gitea"
|
||||
@@ -10,6 +10,7 @@
|
||||
wireguard_config_file: "{{ wireguard_config_path }}/{{ wireguard_interface }}.conf"
|
||||
wireguard_client_configs_path: "/etc/wireguard/clients"
|
||||
wireguard_local_client_configs_dir: "{{ playbook_dir }}/../wireguard-clients"
|
||||
wireguard_dns_servers: []
|
||||
|
||||
tasks:
|
||||
- name: Validate client name
|
||||
@@ -80,18 +81,19 @@
|
||||
|
||||
- name: Extract server IP from config
|
||||
set_fact:
|
||||
server_vpn_ip: "{{ (wireguard_server_config_read.content | b64decode | regex_search('Address = ([0-9.]+)')) | default(['10.8.0.1']) | first }}"
|
||||
server_vpn_ip: "{{ (wireguard_server_config_read.content | b64decode | regex_search('Address = ([0-9.]+)', '\\\\1')) | first | default('10.8.0.1') }}"
|
||||
failed_when: false
|
||||
|
||||
- name: Set default DNS servers
|
||||
set_fact:
|
||||
wireguard_dns_servers: "{{ [server_vpn_ip] }}"
|
||||
|
||||
- name: Extract WireGuard server IP octets
|
||||
set_fact:
|
||||
wireguard_server_ip_octets: "{{ server_vpn_ip.split('.') }}"
|
||||
wireguard_server_ip_octets: "{{ (server_vpn_ip | default('')).split('.') }}"
|
||||
when: client_ip == ""
|
||||
|
||||
- name: Fail if server VPN IP is invalid
|
||||
fail:
|
||||
msg: "Server VPN IP '{{ server_vpn_ip }}' ist ungültig – bitte wg0.conf prüfen."
|
||||
when: client_ip == "" and (wireguard_server_ip_octets | length) < 4
|
||||
|
||||
- name: Gather existing client addresses
|
||||
set_fact:
|
||||
existing_client_ips: "{{ (wireguard_server_config_read.content | b64decode | regex_findall('AllowedIPs = ([0-9A-Za-z.]+)/32', '\\\\1')) }}"
|
||||
@@ -109,7 +111,7 @@
|
||||
wireguard_server_ip_octets[2],
|
||||
next_octet_candidate
|
||||
] | join('.') }}"
|
||||
when: client_ip == ""
|
||||
when: client_ip == "" and (wireguard_server_ip_octets | length) >= 4
|
||||
|
||||
- name: Generate NEW client private key
|
||||
command: "wg genkey"
|
||||
|
||||
@@ -35,45 +35,37 @@
|
||||
file: "{{ vault_file }}"
|
||||
no_log: yes
|
||||
|
||||
- name: Ensure secrets directory exists
|
||||
- name: Ensure secrets directory exists for Docker Compose secrets
|
||||
file:
|
||||
path: "{{ secrets_path }}"
|
||||
path: "{{ app_stack_path }}/secrets"
|
||||
state: directory
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "{{ ansible_user }}"
|
||||
mode: '0700'
|
||||
|
||||
- name: Create .env.production file
|
||||
template:
|
||||
src: "{{ playbook_dir }}/../templates/.env.production.j2"
|
||||
dest: "{{ secrets_path }}/.env.production"
|
||||
- name: Create Docker Compose secret files from vault
|
||||
copy:
|
||||
content: "{{ item.value }}"
|
||||
dest: "{{ app_stack_path }}/secrets/{{ item.name }}.txt"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "{{ ansible_user }}"
|
||||
mode: '0600'
|
||||
no_log: yes
|
||||
|
||||
- name: Create Docker secrets from vault (disabled for compose-only deployment)
|
||||
docker_secret:
|
||||
name: "{{ item.name }}"
|
||||
data: "{{ item.value }}"
|
||||
state: present
|
||||
loop:
|
||||
- name: db_password
|
||||
- name: db_user_password
|
||||
value: "{{ vault_db_password }}"
|
||||
- name: redis_password
|
||||
value: "{{ vault_redis_password }}"
|
||||
- name: app_key
|
||||
value: "{{ vault_app_key }}"
|
||||
- name: jwt_secret
|
||||
value: "{{ vault_jwt_secret }}"
|
||||
- name: mail_password
|
||||
value: "{{ vault_mail_password }}"
|
||||
- name: vault_encryption_key
|
||||
value: "{{ vault_encryption_key | default(vault_app_key) }}"
|
||||
- name: git_token
|
||||
value: "{{ vault_git_token | default('') }}"
|
||||
no_log: yes
|
||||
when: false
|
||||
|
||||
- name: Set secure permissions on secrets directory
|
||||
file:
|
||||
path: "{{ secrets_path }}"
|
||||
path: "{{ app_stack_path }}/secrets"
|
||||
state: directory
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "{{ ansible_user }}"
|
||||
|
||||
Reference in New Issue
Block a user