feat: update deployment configuration and encrypted env loader
- Update Ansible playbooks and roles for application deployment - Add new Gitea/Traefik troubleshooting playbooks - Update Docker Compose configurations (base, local, staging, production) - Enhance EncryptedEnvLoader with improved error handling - Add deployment scripts (autossh setup, migration, secret testing) - Update CI/CD workflows and documentation - Add Semaphore stack configuration
This commit is contained in:
@@ -123,12 +123,22 @@ jobs:
|
||||
fi
|
||||
|
||||
if [ -z "$CHANGED_FILES" ] && [ "$FORCE" != "true" ]; then
|
||||
# No diff information available; fall back to building to stay safe
|
||||
echo "⚠️ Keine Änderungsinformation gefunden – bilde Image sicherheitshalber."
|
||||
echo "needs_build=true" >> "$GITHUB_OUTPUT"
|
||||
echo "changed_files=<none>" >> "$GITHUB_OUTPUT"
|
||||
echo "needs_runtime_build=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
# No diff information available; assume no build needed if this is not initial commit
|
||||
# Only skip if we can detect this is not the first commit
|
||||
if git rev-parse HEAD^ >/dev/null 2>&1; then
|
||||
echo "ℹ️ Keine Änderungsinformation gefunden, aber HEAD^ existiert – überspringe Build."
|
||||
echo "needs_build=false" >> "$GITHUB_OUTPUT"
|
||||
echo "changed_files=<none>" >> "$GITHUB_OUTPUT"
|
||||
echo "needs_runtime_build=false" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
else
|
||||
# First commit or detached state - build to be safe
|
||||
echo "⚠️ Keine Änderungsinformation gefunden – bilde Image sicherheitshalber."
|
||||
echo "needs_build=true" >> "$GITHUB_OUTPUT"
|
||||
echo "changed_files=<none>" >> "$GITHUB_OUTPUT"
|
||||
echo "needs_runtime_build=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
NEEDS_BUILD=true
|
||||
@@ -160,6 +170,8 @@ jobs:
|
||||
SUMMARY="Nur Doku-/Teständerungen – Container-Build wird übersprungen"
|
||||
elif [ "$NEEDS_BUILD" = "false" ] && [ "$OTHER_NON_IGNORED" = "true" ]; then
|
||||
SUMMARY="Keine Build-Trigger gefunden – Container-Build wird übersprungen"
|
||||
elif [ "$NEEDS_BUILD" = "true" ]; then
|
||||
SUMMARY="Runtime-relevante Änderungen erkannt – Container-Build wird ausgeführt"
|
||||
fi
|
||||
else
|
||||
RUNTIME_BUILD=true
|
||||
@@ -187,7 +199,7 @@ jobs:
|
||||
runtime-base:
|
||||
name: Build Runtime Base Image
|
||||
needs: changes
|
||||
if: always()
|
||||
if: needs.changes.outputs.needs_runtime_build == 'true'
|
||||
runs-on: docker-build
|
||||
outputs:
|
||||
image_ref: ${{ steps.set-result.outputs.image_ref }}
|
||||
@@ -396,6 +408,7 @@ jobs:
|
||||
echo "image_ref=$TARGET_REGISTRY/$RUNTIME_IMAGE_NAME:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "built=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
# When runtime build is skipped, output empty but build job will use default latest image
|
||||
echo "image_ref=" >> "$GITHUB_OUTPUT"
|
||||
echo "built=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
@@ -727,6 +740,24 @@ jobs:
|
||||
echo " Image: $IMAGE_NAME"
|
||||
echo " Tags: latest, $TAG, git-$SHORT_SHA"
|
||||
|
||||
# Build cache sources - branch-specific and general caches
|
||||
CACHE_SOURCES=(
|
||||
"type=registry,ref=${CACHE_TARGET}/${IMAGE_NAME}:buildcache"
|
||||
"type=registry,ref=${REGISTRY_TO_USE}/${IMAGE_NAME}:latest"
|
||||
"type=registry,ref=${REGISTRY_TO_USE}/${IMAGE_NAME}:${REF_NAME}-cache"
|
||||
)
|
||||
|
||||
# If this is not the first build, try to use previous commit's tag as cache
|
||||
if git rev-parse HEAD^ >/dev/null 2>&1; then
|
||||
PREV_SHORT_SHA=$(git rev-parse --short=7 HEAD^)
|
||||
CACHE_SOURCES+=("type=registry,ref=${REGISTRY_TO_USE}/${IMAGE_NAME}:git-${PREV_SHORT_SHA}")
|
||||
fi
|
||||
|
||||
CACHE_FROM_ARGS=""
|
||||
for CACHE_SRC in "${CACHE_SOURCES[@]}"; do
|
||||
CACHE_FROM_ARGS="${CACHE_FROM_ARGS} --cache-from ${CACHE_SRC}"
|
||||
done
|
||||
|
||||
docker buildx build \
|
||||
--platform linux/amd64 \
|
||||
--file ./Dockerfile.production \
|
||||
@@ -734,9 +765,9 @@ jobs:
|
||||
--tag "${REGISTRY_TO_USE}/${IMAGE_NAME}:latest" \
|
||||
--tag "${REGISTRY_TO_USE}/${IMAGE_NAME}:${TAG}" \
|
||||
--tag "${REGISTRY_TO_USE}/${IMAGE_NAME}:git-${SHORT_SHA}" \
|
||||
--cache-from type=registry,ref="${CACHE_TARGET}/${IMAGE_NAME}:buildcache" \
|
||||
--cache-from type=registry,ref="${REGISTRY_TO_USE}/${IMAGE_NAME}:latest" \
|
||||
${CACHE_FROM_ARGS} \
|
||||
--cache-to type=registry,ref="${CACHE_TARGET}/${IMAGE_NAME}:buildcache",mode=max \
|
||||
--cache-to type=registry,ref="${REGISTRY_TO_USE}/${IMAGE_NAME}:${REF_NAME}-cache",mode=max \
|
||||
--build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') \
|
||||
--build-arg GIT_COMMIT=${COMMIT_SHA} \
|
||||
--build-arg GIT_BRANCH=${REF_NAME} \
|
||||
@@ -787,7 +818,9 @@ jobs:
|
||||
deploy-staging:
|
||||
name: Auto-deploy to Staging
|
||||
needs: [changes, build, runtime-base]
|
||||
if: github.ref_name == 'staging' || github.head_ref == 'staging' || (github.ref_name == '' && contains(github.ref, 'staging'))
|
||||
if: |
|
||||
(github.ref_name == 'staging' || github.head_ref == 'staging' || (github.ref_name == '' && contains(github.ref, 'staging'))) &&
|
||||
(needs.build.result == 'success' || needs.build.result == 'skipped')
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: staging
|
||||
@@ -952,21 +985,29 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
|
||||
# If docker-compose.yml doesn't exist, it will be created from repo
|
||||
if [ ! -f docker-compose.yml ]; then
|
||||
echo "⚠️ docker-compose.yml not found, copying from repo..."
|
||||
cp /workspace/repo/deployment/stacks/staging/docker-compose.yml . || {
|
||||
echo "❌ Failed to copy docker-compose.yml"
|
||||
# Copy base and staging docker-compose files if they don't exist
|
||||
if [ ! -f docker-compose.base.yml ]; then
|
||||
echo "⚠️ docker-compose.base.yml not found, copying from repo..."
|
||||
cp /workspace/repo/docker-compose.base.yml . || {
|
||||
echo "❌ Failed to copy docker-compose.base.yml"
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
# Update docker-compose.yml with new image tag
|
||||
echo "📝 Updating docker-compose.yml..."
|
||||
sed -i "s|image:.*/${IMAGE_NAME}:.*|image: ${DEPLOY_IMAGE}|g" docker-compose.yml
|
||||
if [ ! -f docker-compose.staging.yml ]; then
|
||||
echo "⚠️ docker-compose.staging.yml not found, copying from repo..."
|
||||
cp /workspace/repo/docker-compose.staging.yml . || {
|
||||
echo "❌ Failed to copy docker-compose.staging.yml"
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
echo "✅ Updated docker-compose.yml:"
|
||||
grep "image:" docker-compose.yml | head -5
|
||||
# Update docker-compose.staging.yml with new image tag
|
||||
echo "📝 Updating docker-compose.staging.yml with new image tag..."
|
||||
sed -i "s|image:.*/${IMAGE_NAME}:.*|image: ${DEPLOY_IMAGE}|g" docker-compose.staging.yml
|
||||
|
||||
echo "✅ Updated docker-compose.staging.yml:"
|
||||
grep "image:" docker-compose.staging.yml | head -5
|
||||
|
||||
# Ensure networks exist
|
||||
echo "🔗 Ensuring Docker networks exist..."
|
||||
@@ -974,7 +1015,8 @@ jobs:
|
||||
docker network create staging-internal 2>/dev/null || true
|
||||
|
||||
echo "🔄 Starting/updating services..."
|
||||
docker compose up -d --pull always --force-recreate || {
|
||||
# Use --pull missing instead of --pull always since we already pulled the specific image
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.staging.yml up -d --pull missing --force-recreate || {
|
||||
echo "❌ Failed to start services"
|
||||
exit 1
|
||||
}
|
||||
@@ -982,27 +1024,32 @@ jobs:
|
||||
echo "⏳ Waiting for services to start..."
|
||||
sleep 15
|
||||
|
||||
# Force containers to pull latest code from Git repository
|
||||
echo "🔄 Pulling latest code from Git repository in staging-app container..."
|
||||
docker compose exec -T staging-app bash -c "cd /var/www/html && git -c safe.directory=/var/www/html fetch origin staging && git -c safe.directory=/var/www/html reset --hard origin/staging && git -c safe.directory=/var/www/html clean -fd" || echo "⚠️ Git pull failed, container will sync on next restart"
|
||||
# Pull latest code from Git repository only if image was actually rebuilt
|
||||
# Skip if build was skipped (no changes detected) - container already has latest code
|
||||
if [ "${{ needs.build.result }}" = "success" ] && [ -n "${{ needs.build.outputs.image_url }}" ] && [ "${{ needs.build.outputs.image_url }}" != "null" ]; then
|
||||
echo "🔄 Pulling latest code from Git repository in staging-app container (image was rebuilt)..."
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.staging.yml exec -T staging-app bash -c "cd /var/www/html && git -c safe.directory=/var/www/html fetch origin staging && git -c safe.directory=/var/www/html reset --hard origin/staging && git -c safe.directory=/var/www/html clean -fd" || echo "⚠️ Git pull failed, container will sync on next restart"
|
||||
else
|
||||
echo "ℹ️ Skipping Git pull - no new image built, container already has latest code"
|
||||
fi
|
||||
|
||||
# Also trigger a restart to ensure entrypoint script runs
|
||||
echo "🔄 Restarting staging-app to ensure all services are up-to-date..."
|
||||
docker compose restart staging-app || echo "⚠️ Failed to restart staging-app"
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.staging.yml restart staging-app || echo "⚠️ Failed to restart staging-app"
|
||||
|
||||
# Fix nginx upstream configuration - critical fix for 502 errors
|
||||
# sites-available/default uses 127.0.0.1:9000 but PHP-FPM runs in staging-app container
|
||||
echo "🔧 Fixing nginx PHP-FPM upstream configuration (post-deploy fix)..."
|
||||
sleep 5
|
||||
docker compose exec -T staging-nginx sed -i '/upstream php-upstream {/,/}/s|server 127.0.0.1:9000;|server staging-app:9000;|g' /etc/nginx/sites-available/default || echo "⚠️ Upstream fix (127.0.0.1) failed"
|
||||
docker compose exec -T staging-nginx sed -i '/upstream php-upstream {/,/}/s|server localhost:9000;|server staging-app:9000;|g' /etc/nginx/sites-available/default || echo "⚠️ Upstream fix (localhost) failed"
|
||||
docker compose exec -T staging-nginx nginx -t && docker compose restart staging-nginx || echo "⚠️ Nginx config test or restart failed"
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.staging.yml exec -T staging-nginx sed -i '/upstream php-upstream {/,/}/s|server 127.0.0.1:9000;|server staging-app:9000;|g' /etc/nginx/sites-available/default || echo "⚠️ Upstream fix (127.0.0.1) failed"
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.staging.yml exec -T staging-nginx sed -i '/upstream php-upstream {/,/}/s|server localhost:9000;|server staging-app:9000;|g' /etc/nginx/sites-available/default || echo "⚠️ Upstream fix (localhost) failed"
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.staging.yml exec -T staging-nginx nginx -t && docker compose -f docker-compose.base.yml -f docker-compose.staging.yml restart staging-nginx || echo "⚠️ Nginx config test or restart failed"
|
||||
echo "✅ Nginx configuration fixed and reloaded"
|
||||
|
||||
echo "⏳ Waiting for services to stabilize..."
|
||||
sleep 10
|
||||
echo "📊 Container status:"
|
||||
docker compose ps
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.staging.yml ps
|
||||
|
||||
echo "✅ Staging deployment completed!"
|
||||
EOF
|
||||
@@ -1137,15 +1184,33 @@ jobs:
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "📝 Updating docker-compose.yml..."
|
||||
sed -i "s|image:.*/${IMAGE_NAME}:.*|image: ${FULL_IMAGE}|g" docker-compose.yml
|
||||
sed -i "s|image:.*/${IMAGE_NAME}@.*|image: ${FULL_IMAGE}|g" docker-compose.yml
|
||||
# Copy base and production docker-compose files if they don't exist
|
||||
if [ ! -f docker-compose.base.yml ]; then
|
||||
echo "⚠️ docker-compose.base.yml not found, copying from repo..."
|
||||
cp /workspace/repo/docker-compose.base.yml . || {
|
||||
echo "❌ Failed to copy docker-compose.base.yml"
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
echo "✅ Updated docker-compose.yml:"
|
||||
grep "image:" docker-compose.yml | head -5
|
||||
if [ ! -f docker-compose.production.yml ]; then
|
||||
echo "⚠️ docker-compose.production.yml not found, copying from repo..."
|
||||
cp /workspace/repo/docker-compose.production.yml . || {
|
||||
echo "❌ Failed to copy docker-compose.production.yml"
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
echo "📝 Updating docker-compose.production.yml with new image tag..."
|
||||
sed -i "s|image:.*/${IMAGE_NAME}:.*|image: ${FULL_IMAGE}|g" docker-compose.production.yml
|
||||
sed -i "s|image:.*/${IMAGE_NAME}@.*|image: ${FULL_IMAGE}|g" docker-compose.production.yml
|
||||
|
||||
echo "✅ Updated docker-compose.production.yml:"
|
||||
grep "image:" docker-compose.production.yml | head -5
|
||||
|
||||
echo "🔄 Restarting services..."
|
||||
docker compose up -d --pull always --force-recreate || {
|
||||
# Use --pull missing instead of --pull always since we already pulled the specific image
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.production.yml up -d --pull missing --force-recreate || {
|
||||
echo "❌ Failed to restart services"
|
||||
exit 1
|
||||
}
|
||||
@@ -1154,7 +1219,7 @@ jobs:
|
||||
sleep 10
|
||||
|
||||
echo "📊 Container status:"
|
||||
docker compose ps
|
||||
docker compose -f docker-compose.base.yml -f docker-compose.production.yml ps
|
||||
|
||||
echo "✅ Production deployment completed!"
|
||||
EOF
|
||||
|
||||
@@ -41,6 +41,7 @@ jobs:
|
||||
fi
|
||||
echo "target_ref=$TARGET" >> "$GITHUB_OUTPUT"
|
||||
echo "TARGET_REF=$TARGET" >> $GITHUB_ENV
|
||||
echo "BRANCH_NAME=$TARGET" >> $GITHUB_ENV
|
||||
|
||||
- name: Download CI helpers
|
||||
shell: bash
|
||||
@@ -173,14 +174,28 @@ jobs:
|
||||
|
||||
IMAGE_NAME="${{ env.RUNTIME_IMAGE_NAME }}"
|
||||
DATE_TAG="warm-$(date -u +%Y%m%d%H%M)"
|
||||
BRANCH_NAME="${{ env.BRANCH_NAME || 'main' }}"
|
||||
|
||||
# Build cache sources - multiple sources for better cache hit rate
|
||||
CACHE_SOURCES=(
|
||||
"type=registry,ref=${TARGET_REGISTRY}/${IMAGE_NAME}:buildcache"
|
||||
"type=registry,ref=${TARGET_REGISTRY}/${IMAGE_NAME}:${BRANCH_NAME}-cache"
|
||||
"type=registry,ref=${TARGET_REGISTRY}/${IMAGE_NAME}:latest"
|
||||
)
|
||||
|
||||
CACHE_FROM_ARGS=""
|
||||
for CACHE_SRC in "${CACHE_SOURCES[@]}"; do
|
||||
CACHE_FROM_ARGS="${CACHE_FROM_ARGS} --cache-from ${CACHE_SRC}"
|
||||
done
|
||||
|
||||
docker buildx build \
|
||||
--platform linux/amd64 \
|
||||
--file ./Dockerfile.production \
|
||||
--target runtime-base \
|
||||
--build-arg RUNTIME_IMAGE=runtime-base \
|
||||
--cache-from type=registry,ref="$TARGET_REGISTRY/$IMAGE_NAME:buildcache" \
|
||||
--cache-to type=registry,ref="$TARGET_REGISTRY/$IMAGE_NAME:buildcache",mode=max \
|
||||
${CACHE_FROM_ARGS} \
|
||||
--cache-to type=registry,ref="${TARGET_REGISTRY}/${IMAGE_NAME}:buildcache",mode=max \
|
||||
--cache-to type=registry,ref="${TARGET_REGISTRY}/${IMAGE_NAME}:${BRANCH_NAME}-cache",mode=max \
|
||||
--tag "$TARGET_REGISTRY/$IMAGE_NAME:$DATE_TAG" \
|
||||
--push \
|
||||
.
|
||||
@@ -201,6 +216,7 @@ jobs:
|
||||
|
||||
IMAGE_NAME="${{ env.IMAGE_NAME }}"
|
||||
DATE_TAG="warm-$(date -u +%Y%m%d%H%M)"
|
||||
BRANCH_NAME="${{ env.BRANCH_NAME || 'main' }}"
|
||||
|
||||
DEFAULT_RUNTIME="$CACHE_TARGET/${{ env.RUNTIME_IMAGE_NAME }}:latest"
|
||||
RUNTIME_ARG="runtime-base"
|
||||
@@ -208,12 +224,25 @@ jobs:
|
||||
RUNTIME_ARG="$DEFAULT_RUNTIME"
|
||||
fi
|
||||
|
||||
# Build cache sources - multiple sources for better cache hit rate
|
||||
CACHE_SOURCES=(
|
||||
"type=registry,ref=${CACHE_TARGET}/${IMAGE_NAME}:buildcache"
|
||||
"type=registry,ref=${REGISTRY_TO_USE}/${IMAGE_NAME}:${BRANCH_NAME}-cache"
|
||||
"type=registry,ref=${REGISTRY_TO_USE}/${IMAGE_NAME}:latest"
|
||||
)
|
||||
|
||||
CACHE_FROM_ARGS=""
|
||||
for CACHE_SRC in "${CACHE_SOURCES[@]}"; do
|
||||
CACHE_FROM_ARGS="${CACHE_FROM_ARGS} --cache-from ${CACHE_SRC}"
|
||||
done
|
||||
|
||||
docker buildx build \
|
||||
--platform linux/amd64 \
|
||||
--file ./Dockerfile.production \
|
||||
--build-arg RUNTIME_IMAGE="$RUNTIME_ARG" \
|
||||
--cache-from type=registry,ref="$CACHE_TARGET/$IMAGE_NAME:buildcache" \
|
||||
--cache-to type=registry,ref="$CACHE_TARGET/$IMAGE_NAME:buildcache",mode=max \
|
||||
${CACHE_FROM_ARGS} \
|
||||
--cache-to type=registry,ref="${CACHE_TARGET}/${IMAGE_NAME}:buildcache",mode=max \
|
||||
--cache-to type=registry,ref="${REGISTRY_TO_USE}/${IMAGE_NAME}:${BRANCH_NAME}-cache",mode=max \
|
||||
--tag "$REGISTRY_TO_USE/$IMAGE_NAME:$DATE_TAG" \
|
||||
--push \
|
||||
.
|
||||
|
||||
@@ -15,9 +15,6 @@ on:
|
||||
- main
|
||||
- staging
|
||||
|
||||
env:
|
||||
CACHE_DIR: /tmp/composer-cache
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Run Tests & Quality Checks
|
||||
@@ -77,23 +74,27 @@ jobs:
|
||||
|
||||
cd /workspace/repo
|
||||
|
||||
- name: Restore Composer cache
|
||||
- name: Get Composer cache directory
|
||||
id: composer-cache
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -d "$CACHE_DIR/vendor" ]; then
|
||||
echo "📦 Restore composer dependencies"
|
||||
cp -r "$CACHE_DIR/vendor" /workspace/repo/vendor || true
|
||||
fi
|
||||
echo "dir=$(composer global config cache-dir 2>/dev/null | cut -d' ' -f3 || echo "$HOME/.composer/cache")" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache Composer dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.composer-cache.outputs.dir }}
|
||||
vendor/
|
||||
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-composer-
|
||||
|
||||
- name: Install PHP dependencies
|
||||
run: |
|
||||
cd /workspace/repo
|
||||
composer install --no-interaction --prefer-dist --optimize-autoloader --ignore-platform-req=php
|
||||
|
||||
- name: Save Composer cache
|
||||
run: |
|
||||
mkdir -p "$CACHE_DIR"
|
||||
cp -r /workspace/repo/vendor "$CACHE_DIR/vendor" || true
|
||||
|
||||
- name: PHPStan (baseline)
|
||||
run: |
|
||||
cd /workspace/repo
|
||||
@@ -104,6 +105,42 @@ jobs:
|
||||
cd /workspace/repo
|
||||
make cs || echo "⚠️ php-cs-fixer dry run issues detected"
|
||||
|
||||
- name: Validate .env.base for secrets
|
||||
run: |
|
||||
cd /workspace/repo
|
||||
if [ -f .env.base ]; then
|
||||
echo "🔍 Checking .env.base for secrets..."
|
||||
# Check for potential secrets (case-insensitive)
|
||||
if grep -qiE "(password|secret|key|token|encryption|vault)" .env.base | grep -v "^#" | grep -v "FILE=" | grep -v "^$$" > /dev/null; then
|
||||
echo "::error::.env.base contains potential secrets! Secrets should be in .env.local or Docker Secrets."
|
||||
echo "⚠️ Found potential secrets in .env.base:"
|
||||
grep -iE "(password|secret|key|token|encryption|vault)" .env.base | grep -v "^#" | grep -v "FILE=" | grep -v "^$$" || true
|
||||
echo ""
|
||||
echo "💡 Move secrets to:"
|
||||
echo " - .env.local (for local development)"
|
||||
echo " - Docker Secrets (for production/staging)"
|
||||
exit 1
|
||||
else
|
||||
echo "✅ .env.base does not contain secrets"
|
||||
fi
|
||||
else
|
||||
echo "ℹ️ .env.base not found (optional during migration)"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "🔍 Checking docker-compose.base.yml for hardcoded passwords..."
|
||||
if grep -E "(PASSWORD|SECRET|TOKEN).*:-[^}]*[^}]}" docker-compose.base.yml 2>/dev/null | grep -v "^#" | grep -v "FILE=" > /dev/null; then
|
||||
echo "::error::docker-compose.base.yml contains hardcoded password fallbacks! Passwords must be set explicitly."
|
||||
echo "⚠️ Found hardcoded password fallbacks:"
|
||||
grep -E "(PASSWORD|SECRET|TOKEN).*:-[^}]*[^}]}" docker-compose.base.yml | grep -v "^#" | grep -v "FILE=" || true
|
||||
echo ""
|
||||
echo "💡 Remove fallback values (:-...) from base file"
|
||||
echo " Passwords must be set in .env.local or via Docker Secrets"
|
||||
exit 1
|
||||
else
|
||||
echo "✅ docker-compose.base.yml does not contain hardcoded password fallbacks"
|
||||
fi
|
||||
|
||||
- name: Tests temporarily skipped
|
||||
run: |
|
||||
echo "⚠️ Tests temporarily skipped due to PHP 8.5 compatibility issues"
|
||||
|
||||
@@ -11,7 +11,120 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-changes:
|
||||
name: Check for Dependency Changes
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
dependencies_changed: ${{ steps.filter.outputs.dependencies_changed }}
|
||||
steps:
|
||||
- name: Download CI helpers
|
||||
shell: bash
|
||||
env:
|
||||
CI_TOKEN: ${{ secrets.CI_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
REF="${{ github.sha }}"
|
||||
if [ -z "$REF" ]; then
|
||||
REF="${{ github.ref_name }}"
|
||||
fi
|
||||
if [ -z "$REF" ]; then
|
||||
REF="${{ github.head_ref }}"
|
||||
fi
|
||||
if [ -z "$REF" ]; then
|
||||
REF="main"
|
||||
fi
|
||||
URL="https://git.michaelschiemer.de/${{ github.repository }}/raw/${REF}/scripts/ci/clone_repo.sh"
|
||||
mkdir -p /tmp/ci-tools
|
||||
if [ -n "$CI_TOKEN" ]; then
|
||||
curl -sfL -u "$CI_TOKEN:x-oauth-basic" "$URL" -o /tmp/ci-tools/clone_repo.sh
|
||||
else
|
||||
curl -sfL "$URL" -o /tmp/ci-tools/clone_repo.sh
|
||||
fi
|
||||
chmod +x /tmp/ci-tools/clone_repo.sh
|
||||
|
||||
- name: Analyse changed files
|
||||
id: filter
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
REF_NAME="${{ github.ref_name }}"
|
||||
if [ -z "$REF_NAME" ]; then
|
||||
REF_NAME="${{ github.head_ref }}"
|
||||
fi
|
||||
if [ -z "$REF_NAME" ]; then
|
||||
REF_NAME="main"
|
||||
fi
|
||||
|
||||
REPO="${{ github.repository }}"
|
||||
WORKDIR="/workspace/repo"
|
||||
|
||||
export CI_REPOSITORY="$REPO"
|
||||
export CI_TOKEN="${{ secrets.CI_TOKEN }}"
|
||||
export CI_REF_NAME="$REF_NAME"
|
||||
export CI_DEFAULT_BRANCH="main"
|
||||
export CI_TARGET_DIR="$WORKDIR"
|
||||
export CI_FETCH_DEPTH="2"
|
||||
|
||||
/tmp/ci-tools/clone_repo.sh
|
||||
|
||||
cd "$WORKDIR"
|
||||
|
||||
# For scheduled or manual runs, always run the scan
|
||||
if [ "${{ github.event_name }}" = "schedule" ] || [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
echo "dependencies_changed=true" >> "$GITHUB_OUTPUT"
|
||||
echo "ℹ️ Scheduled/manual run - will scan dependencies"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
CHANGED_FILES=""
|
||||
EVENT_BEFORE="${{ github.event.before }}"
|
||||
|
||||
if [ "${{ github.event_name }}" = "push" ] && [ -n "$EVENT_BEFORE" ]; then
|
||||
if git rev-parse "$EVENT_BEFORE" >/dev/null 2>&1; then
|
||||
CHANGED_FILES="$(git diff --name-only "$EVENT_BEFORE" HEAD || true)"
|
||||
else
|
||||
git fetch origin "$EVENT_BEFORE" --depth 1 || true
|
||||
if git rev-parse "$EVENT_BEFORE" >/dev/null 2>&1; then
|
||||
CHANGED_FILES="$(git diff --name-only "$EVENT_BEFORE" HEAD || true)"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$CHANGED_FILES" ]; then
|
||||
if git rev-parse HEAD^ >/dev/null 2>&1; then
|
||||
CHANGED_FILES="$(git diff --name-only HEAD^ HEAD || true)"
|
||||
else
|
||||
git fetch origin "$REF_NAME" --depth 50 || true
|
||||
if git rev-parse HEAD^ >/dev/null 2>&1; then
|
||||
CHANGED_FILES="$(git diff --name-only HEAD^ HEAD || true)"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
DEPENDENCIES_CHANGED=false
|
||||
|
||||
if [ -n "$CHANGED_FILES" ]; then
|
||||
while IFS= read -r FILE; do
|
||||
[ -z "$FILE" ] && continue
|
||||
if echo "$FILE" | grep -Eq "^(composer\.json|composer\.lock)$"; then
|
||||
DEPENDENCIES_CHANGED=true
|
||||
break
|
||||
fi
|
||||
done <<< "$CHANGED_FILES"
|
||||
fi
|
||||
|
||||
echo "dependencies_changed=$DEPENDENCIES_CHANGED" >> "$GITHUB_OUTPUT"
|
||||
|
||||
if [ "$DEPENDENCIES_CHANGED" = "true" ]; then
|
||||
echo "ℹ️ Dependencies changed - security scan will run"
|
||||
else
|
||||
echo "ℹ️ No dependency changes detected - skipping security scan"
|
||||
fi
|
||||
|
||||
security-audit:
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.dependencies_changed == 'true' || github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||
name: Composer Security Audit
|
||||
runs-on: php-ci # Uses pre-built PHP 8.5 CI image with Composer pre-installed
|
||||
|
||||
@@ -55,6 +168,22 @@ jobs:
|
||||
|
||||
cd /workspace/repo
|
||||
|
||||
- name: Get Composer cache directory
|
||||
id: composer-cache
|
||||
shell: bash
|
||||
run: |
|
||||
echo "dir=$(composer global config cache-dir 2>/dev/null | cut -d' ' -f3 || echo "$HOME/.composer/cache")" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache Composer dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
${{ steps.composer-cache.outputs.dir }}
|
||||
vendor/
|
||||
key: ${{ runner.os }}-composer-security-${{ hashFiles('**/composer.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-composer-security-
|
||||
|
||||
- name: Validate composer.json and composer.lock
|
||||
run: |
|
||||
cd /workspace/repo
|
||||
@@ -63,13 +192,6 @@ jobs:
|
||||
# Try to update lock file if needed
|
||||
composer update --lock --no-interaction || echo "⚠️ Could not update lock file, but continuing..."
|
||||
|
||||
- name: Cache Composer packages (simple)
|
||||
run: |
|
||||
if [ -d "/tmp/composer-cache/vendor" ]; then
|
||||
echo "📦 Restoring cached dependencies..."
|
||||
cp -r /tmp/composer-cache/vendor /workspace/repo/vendor || true
|
||||
fi
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd /workspace/repo
|
||||
@@ -77,11 +199,6 @@ jobs:
|
||||
# TODO: Remove --ignore-platform-req=php when dependencies are updated (estimated: 1 month)
|
||||
composer install --prefer-dist --no-progress --no-dev --ignore-platform-req=php
|
||||
|
||||
- name: Save Composer cache
|
||||
run: |
|
||||
mkdir -p /tmp/composer-cache
|
||||
cp -r /workspace/repo/vendor /tmp/composer-cache/vendor || true
|
||||
|
||||
- name: Run Composer Security Audit
|
||||
id: security-audit
|
||||
run: |
|
||||
|
||||
Reference in New Issue
Block a user