Compare commits

..

No commits in common. "810d7e0cb9aa11f53042a6b522f6c3c05f561bad" and "81b2fbc91cfc7c2097518f076d09f05fe111897b" have entirely different histories.

13 changed files with 641 additions and 303 deletions

1
.gitignore vendored
View file

@ -1,6 +1,7 @@
# Project
.env
lego.env
docker-compose.yml
data/*
data/nginx/*
!data

111
README.md
View file

@ -1,13 +1,15 @@
# `sso.s1q.dev` - Base23 SSO for all services
# `sso.base23.de` - Base23 SSO for all services
[Authentik](https://goauthentik.io/) based SSO for our sevices.
## Table of Contents
- [`sso.s1q.dev` - Base23 SSO for all services](#ssos1qdev---base23-sso-for-all-services)
- [`sso.base23.de` - Base23 SSO for all services](#ssobase23de---base23-sso-for-all-services)
- [Table of Contents](#table-of-contents)
- [Prerequisites](#prerequisites)
- [Server Setup](#server-setup)
- [Tailscale](#tailscale)
- [Base23 Docker registry login](#base23-docker-registry-login)
- [CrowdSec](#crowdsec)
- [Setup CrowdSec Repo](#setup-crowdsec-repo)
- [Install CrowdSec](#install-crowdsec)
@ -23,7 +25,32 @@
## Prerequisites
- dokploy
### Server Setup
```shell
apt update \
&& apt upgrade -y \
&& for pkg in docker.io docker-doc docker-compose podman-docker containerd runc; do sudo apt remove $pkg; done \
&& apt install ca-certificates curl \
&& install -m 0755 -d /etc/apt/keyrings \
&& curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc \
&& chmod a+r /etc/apt/keyrings/docker.asc \
&& echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian \
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
tee /etc/apt/sources.list.d/docker.list > /dev/null \
&& apt update \
&& apt install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin \
&& mkdir -p /var/lib/apps \
&& ln -s /var/lib/apps \
&& apt install -y git vim \
&& TEMP_DIR=$(mktemp -d) \
&& curl -fsSL https://github.com/go-acme/lego/releases/download/v4.20.2/lego_v4.20.2_linux_amd64.tar.gz -o ${TEMP_DIR}/lego_v4.20.2_linux_amd64.tar.gz \
&& tar xzvf ${TEMP_DIR}/lego_v4.20.2_linux_amd64.tar.gz --directory=${TEMP_DIR} \
&& install -m 755 -o root -g root "${TEMP_DIR}/lego" "/usr/local/bin" \
&& rm -rf ${TEMP_DIR} \
&& unset TEMP_DIR
```
### Tailscale
@ -34,12 +61,18 @@ printf "Enter preauthkey for Tailscale: " \
&& curl -fsSL https://pkgs.tailscale.com/stable/debian/bookworm.tailscale-keyring.list | sudo tee /etc/apt/sources.list.d/tailscale.list \
&& apt-get update \
&& apt-get install tailscale \
&& tailscale up --login-server https://vpn.s1q.dev --authkey ${TAILSCALE_PREAUTHKEY} \
&& tailscale up --login-server https://vpn.base23.de --authkey ${TAILSCALE_PREAUTHKEY} --advertise-tags=tag:prod-servers \
&& sleep 2 \
&& tailscale status \
&& unset TAILSCALE_PREAUTHKEY
```
### Base23 Docker registry login
```shell
docker login -u gitlab+deploy-token-5 registry.git.base23.de
```
### CrowdSec
#### Setup CrowdSec Repo
@ -83,9 +116,9 @@ systemctl restart crowdsec; systemctl status crowdsec.service
Whitelist Tailscale IPs:
```shell
cat << EOF > /etc/crowdsec/parsers/s02-enrich/01-s1q-dev-tailscale.yaml \
cat << EOF > /etc/crowdsec/parsers/s02-enrich/01-base23-tailscale.yaml \
&& systemctl restart crowdsec; journalctl -xef -u crowdsec.service
name: s1q-dev/tailscale ## Must be unqiue
name: base23/tailscale ## Must be unqiue
description: "Whitelist events from Tailscale Subnet"
whitelist:
reason: "Tailscale clients"
@ -94,18 +127,18 @@ whitelist:
EOF
```
Whitelist my current Public IPs:
Whitelist our current Public IPs:
```shell
mkdir -p /etc/crowdsec/postoverflows/s01-whitelist/ \
&& cat << EOF > /etc/crowdsec/postoverflows/s01-whitelist/01-s1q-dev-public-ips.yaml \
&& cat << EOF > /etc/crowdsec/postoverflows/s01-whitelist/01-base23-public-ips.yaml \
&& crowdsec -t && systemctl restart crowdsec; systemctl status crowdsec.service
name: s1q-dev/public-ips ## Must be unqiue
description: "Whitelist events from s1q-dev public IPs"
name: base23/public-ips ## Must be unqiue
description: "Whitelist events from base23 public IPs"
whitelist:
reason: "s1q-dev Public IPs"
reason: "Base23 Public IPs"
expression:
- evt.Overflow.Alert.Source.IP in LookupHost("r3w.de")
- evt.Overflow.Alert.Source.IP in LookupHost("asterix.ddns.base23.de")
EOF
```
@ -118,8 +151,8 @@ cscli collections install firix/authentik \
---
source: docker
container_name_regexp:
- sso-s1q-dev-de-server-*
- sso-s1q-dev-de-worker-*
- sso-base23-de-server-*
- sso-base23-de-worker-*
labels:
type: authentik
EOF
@ -144,8 +177,8 @@ Setup notifications:
```shell
cd /root/apps \
&& git clone ssh://git@git.base23.de:222/base23/sso.s1q.dev.git \
&& cd sso.s1q.dev \
&& git clone ssh://git@git.base23.de:222/base23/sso.base23.de.git \
&& cd sso.base23.de \
&& ./scripts/init.sh
```
@ -169,47 +202,37 @@ TARGET_IPV6=$(dig +short "${TARGET_DOMAIN}" AAAA | grep -E '^(([0-9a-fA-F]{1,4}:
### Fist run
```shell
./scripts/compose.sh build --no-cache \
docker compose build --no-cache \
--build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') \
--build-arg SRC_REV=$(git rev-parse --short HEAD) \
&& ./scripts/compose.sh up -d; ./scripts/compose.sh logs -f
&& docker compose up -d; docker compose logs -f
```
## Upgrade
### Test
This is intended for testing the upgrades before rollout on prod.
1. Check if the backups are up to date: `./scripts/compose.sh run --rm restore-cli /usr/local/bin/restic snapshots`
2. Create a new branch `git checkout -b <version>`.
3. Download the the docker compose for the version you want to update:
`curl -fsSL -o docker-compose.yml https://goauthentik.io/version/<version>/docker-compose.yml`
4. Update `AUTHENTIK_TAG` to the desired tag in `env.test.template`.
5. Check the upstream `docker-compose.yml` file against ours for changes made in the configuration. Check the Postgresql and Redis docker tags. Minor revisions of Postgresql *should be fine*, check the changelogs for any issues and if none are present raise to the latest Minor version (e.g. 16.6 -> 16.9). Redis should be less problematic for upgrades, check nonetheless.
6. Run `diff --color='auto' env.test.template .env` to display the diff between
1. Check if the backups are up to date: `docker compose run --rm restore-cli /usr/local/bin/restic snapshots`
2. Update `AUTHENTIK_TAG` to the desired tag in `env.test.template`.
3. Commit & push changes to the Repo.
4. Run `diff --color='auto' env.test.template .env` to display the diff between
`env.test.template` and `.env`.
7. Port the changes made to `.env`.
8. `./scripts/compose.sh pull`
9. `./scripts/compose.sh down`
10. `./scripts/compose.sh up -d; ./scripts/compose.sh logs -f`
9. Check the logs for any issues during during startup. Check if https://sso.test.base23.de is available and test the sso login (e.g https://whoami.test.base23.de)
10. Apply changes for test to prod files (`docker-compose.<stage>.yml`, `env.<stage>.template`), commit & push changes to the Repo in a new branch and create a merge request in preparation for the prod upgrade
5. Port the made changes to `.env`.
6. `docker-compose-2.32.4 pull`
7. `docker-compose-2.32.4 down`
8. `docker-compose-2.32.4 up -d; docker-compose-2.32.4 logs -f`
### Prod
It is expected that the Upgrade has already been performed and tested on https://sso.test.base23.de, and the changes have been merged into main
1. Check if the backups are up to date: `./scripts/compose.sh run --rm restore-cli /usr/local/bin/restic snapshots`
2. Create a new branch `git checkout -b <version>`.
3. Download the the docker compose for the version you want to update:
`curl -fsSL -o docker-compose.yml https://goauthentik.io/version/<version>/docker-compose.yml`
4. Update `AUTHENTIK_TAG` to the desired tag in `env.prod.template`.
5. Commit & push changes to the Repo.
6. Run `diff --color='auto' env.prod.template .env` to display the diff between
1. Check if the backups are up to date: `docker compose run --rm restore-cli /usr/local/bin/restic snapshots`
2. Update `AUTHENTIK_TAG` to the desired tag in `env.prod.template`.
3. Commit & push changes to the Repo.
4. Run `diff --color='auto' env.prod.template .env` to display the diff between
`env.prod.template` and `.env`.
7. Port the made changes to `.env`.
8. `./scripts/compose.sh pull`
9. `./scripts/compose.sh down`
10. `./scripts/compose.sh up -d; ./scripts/compose.sh logs -f`
5. Port the made changes to `.env`.
6. `docker compose pull`
7. `docker compose down`
8. `docker compose up -d; docker compose logs -f`
## Disaster recovery / restore

View file

@ -0,0 +1,90 @@
# Upstream where your authentik server is hosted.
upstream authentik {
server server:9443;
# Improve performance by keeping some connections alive.
keepalive 10;
}
# Upgrade WebSocket if requested, otherwise use keepalive
map $http_upgrade $connection_upgrade_keepalive {
default upgrade;
'' '';
}
# Server just for serving a health endpoint
server {
listen 127.0.0.1:8181;
server_name localhost;
# replace with the IP address of your resolver
resolver ${NGINX_RESOLVER};
# Handle /health separately without serving any files
location = /health {
access_log off;
default_type text/plain;
return 200 'OK';
}
}
# Redirect to HTTPS
server {
listen ${NGINX_HTTP_PORT};
listen [::]:${NGINX_HTTP_PORT};
server_name ${NGINX_SERVERNAME};
# Exclude Let's Encrypt directory from redirection
location /.well-known/acme-challenge/ {
root /var/www/letsencrypt;
}
# Redirect all other traffic to HTTPS
location / {
return 302 https://$host$request_uri;
}
}
# HTTPS Server
server {
listen ${NGINX_HTTPS_PORT} ssl;
listen [::]:${NGINX_HTTPS_PORT} ssl;
http2 on;
server_name ${NGINX_SERVERNAME};
ssl_certificate /etc/nginx/ssl/certs/_.base23.de.crt;
ssl_certificate_key /etc/nginx/ssl/certs/_.base23.de.key;
ssl_session_timeout ${NGINX_SSL_SESSION_TIMEOUT};
ssl_session_cache ${NGINX_SSL_SESSION_CACHE};
ssl_dhparam /etc/nginx/ssl/dhparams.pem;
# intermediate configuration
ssl_protocols ${NGINX_SSL_PROTOCOLS};
ssl_ciphers ${NGINX_SSL_CIPHERS};
ssl_prefer_server_ciphers ${NGINX_SSL_PREFER_SERVER_CIPHERS};
# HSTS (ngx_http_headers_module is required) (63072000 seconds)
add_header Strict-Transport-Security ${NGINX_HEADER_STRICT_TRANSPORT_SECURITY};
# OCSP stapling
ssl_stapling ${NGINX_SSL_STAPLING};
ssl_stapling_verify ${NGINX_SSL_STAPLING_VERIFY};
# verify chain of trust of OCSP response using Root CA and Intermediate certs
ssl_trusted_certificate /etc/nginx/ssl/certs/_.base23.de.issuer.crt;
# replace with the IP address of your resolver
resolver ${NGINX_RESOLVER};
client_max_body_size 50m;
location / {
proxy_pass https://authentik;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade_keepalive;
}
}

View file

@ -1,121 +0,0 @@
---
services:
geoipupdate:
image: "maxmindinc/geoipupdate:latest"
volumes:
- "geoip:/usr/share/GeoIP"
environment:
GEOIPUPDATE_EDITION_IDS: "GeoLite2-City GeoLite2-ASN"
GEOIPUPDATE_FREQUENCY: "8"
GEOIPUPDATE_ACCOUNT_ID: "${GEOIPUPDATE_ACCOUNT_ID:?MaxMind GeoIP account ID required}"
GEOIPUPDATE_LICENSE_KEY: "${GEOIPUPDATE_LICENSE_KEY:?MaxMind GeoIP license key required}"
postgresql:
image: docker.io/library/postgres:${POSTGRES_TAG:?POSTGRES_TAG is not configured}
volumes:
- database:/var/lib/postgresql/data
- backups_db:/var/lib/postgresql/backups
networks:
- backend
redis:
image: docker.io/library/redis:${REDIS_TAG:?REDIS_TAG is not configured}
networks:
- backend
server:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:?AUTHENTIK_TAG is not configured}
environment:
B23_ALLOW_UP: ${B23_ALLOW_UP:?Use the script ./scripts/compose.sh to run this compose!}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS:?PG_PASS is required. - Password for authentik's postgresql database}
ports: []
networks:
- backend
worker:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:?AUTHENTIK_TAG is not configured}
environment:
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS:?PG_PASS is required. - Password for authentik's postgresql database}
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./data/authentik/media:/media
- ./data/authentik/certs:/certs
- ./data/authentik/custom-templates:/templates
- geoip:/geoip
networks:
- backend
backup:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
restart: unless-stopped
environment:
PRE_COMMANDS: |-
cd /compose-project/
[[ $($(docker compose &>/dev/null); echo "$?") -eq 0 ]] || apk add --no-cache docker-cli-compose
docker compose exec -T postgresql pg_dump -U ${PG_USER:-authentik} -d ${PG_DB:-authentik} -f /var/lib/postgresql/backups/${PG_DB:-authentik}.sql
RUN_ON_STARTUP: "false"
BACKUP_CRON: "00 32 2 * * *"
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: '${RESTIC_REPO_PASSWORD:?Restic repository password is required}'
RESTIC_BACKUP_SOURCES: /var/lib/postgresql/backups /var/lib/authentik/backups /var/lib/lego/backups
RESTIC_BACKUP_ARGS: >-
--tag ${RESTIC_TAG:?Restic tag is required}
--verbose
RESTIC_FORGET_ARGS: >-
--keep-last 10
--keep-daily 7
--keep-weekly 5
--keep-monthly 12
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
- /var/run/docker.sock:/var/run/docker.sock
- ./docker-compose.yml:/compose-project/docker-compose.yml:ro
- ./.env:/compose-project/.env:ro
- backups_db:/var/lib/postgresql/backups:ro
- ./data/authentik/certs:/var/lib/authentik/backups/certs:ro
- ./data/authentik/custom-templates:/var/lib/authentik/backups/templates:ro
- ./data/authentik/media:/var/lib/authentik/backups/media:ro
- ./data/.lego:/var/lib/lego/backups:ro
prune-backup:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
restart: unless-stopped
environment:
SKIP_INIT: "true"
RUN_ON_STARTUP: "false"
PRUNE_CRON: "00 47 3 * * *"
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: '${RESTIC_REPO_PASSWORD:?Restic repository password is required}'
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
# prüft ob backups noch in Ordnung sind, evtl. erweitern zum prüfen der Daten
check-backup:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
restart: unless-stopped
environment:
#RUN_ON_STARTUP: "true"
SKIP_INIT: "true"
CHECK_CRON: "00 08 04 * * *"
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: ${RESTIC_REPO_PASSWORD}
#POST_COMMANDS_FAILURE: echo "Post Command Failure test" > /restic/message
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
restore-cli:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
entrypoint: /entrypoint-restore
volumes:
backups_db:
driver: local
geoip:
driver: local
networks:
backend:

View file

@ -1,13 +1,112 @@
---
# Based on authentiks official docker-compose.yml
# Version: 2025.2.1
services:
server:
geoipupdate:
image: "maxmindinc/geoipupdate:latest"
volumes:
- "geoip:/usr/share/GeoIP"
environment:
B23_ALLOW_UP: "true"
GEOIPUPDATE_EDITION_IDS: "GeoLite2-City GeoLite2-ASN"
GEOIPUPDATE_FREQUENCY: "8"
GEOIPUPDATE_ACCOUNT_ID: "${GEOIPUPDATE_ACCOUNT_ID:?MaxMind GeoIP account ID required}"
GEOIPUPDATE_LICENSE_KEY: "${GEOIPUPDATE_LICENSE_KEY:?MaxMind GeoIP license key required}"
postgresql:
image: docker.io/library/postgres:${POSTGRES_TAG:?POSTGRES_TAG is not configured}
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"]
start_period: 20s
interval: 30s
retries: 5
timeout: 5s
volumes:
- database:/var/lib/postgresql/data
- backups_db:/var/lib/postgresql/backups
environment:
POSTGRES_PASSWORD: ${PG_PASS:?database password required}
POSTGRES_USER: ${PG_USER:-authentik}
POSTGRES_DB: ${PG_DB:-authentik}
env_file:
- .env
networks:
- backend
redis:
image: docker.io/library/redis:${REDIS_TAG:?REDIS_TAG is not configured}
command: --save 60 1 --loglevel warning
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "redis-cli ping | grep PONG"]
start_period: 20s
interval: 30s
retries: 5
timeout: 3s
volumes:
- redis:/data
networks:
- backend
server:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:?AUTHENTIK_TAG is not configured}
restart: unless-stopped
command: server
environment:
AUTHENTIK_REDIS__HOST: redis
AUTHENTIK_POSTGRESQL__HOST: postgresql
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
AUTHENTIK_POSTGRESQL__NAME: ${PG_DB:-authentik}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS:?PG_PASS is required. - Password for authentik's postgresql database}
volumes:
- ./data/authentik/media:/media
- ./data/authentik/custom-templates:/templates
- geoip:/geoip
env_file:
- .env
depends_on:
postgresql:
condition: service_healthy
redis:
condition: service_healthy
networks:
- backend
- frontend
worker:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:?AUTHENTIK_TAG is not configured}
restart: unless-stopped
command: worker
environment:
AUTHENTIK_REDIS__HOST: redis
AUTHENTIK_POSTGRESQL__HOST: postgresql
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
AUTHENTIK_POSTGRESQL__NAME: ${PG_DB:-authentik}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS:?PG_PASS is required. - Password for authentik's postgresql database}
# `user: root` and the docker socket volume are optional.
# See more for the docker socket integration here:
# https://goauthentik.io/docs/outposts/integrations/docker
# Removing `user: root` also prevents the worker from fixing the permissions
# on the mounted folders, so when removing this make sure the folders have the correct UID/GID
# (1000:1000 by default)
user: root
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./data/authentik/media:/media
- ./data/authentik/certs:/certs
- ./data/authentik/custom-templates:/templates
- geoip:/geoip
env_file:
- .env
depends_on:
postgresql:
condition: service_healthy
redis:
condition: service_healthy
networks:
- backend
nginx:
build:
context: ./docker/nginx
@ -51,5 +150,96 @@ services:
networks:
- frontend
backup:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
restart: unless-stopped
environment:
PRE_COMMANDS: |-
cd /compose-project/
[[ $($(docker compose &>/dev/null); echo "$?") -eq 0 ]] || apk add --no-cache docker-cli-compose
docker compose exec -T postgresql pg_dump -U ${PG_USER:-authentik} -d ${PG_DB:-authentik} -f /var/lib/postgresql/backups/${PG_DB:-authentik}.sql
RUN_ON_STARTUP: "false"
BACKUP_CRON: "00 32 2 * * *"
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: '${RESTIC_REPO_PASSWORD:?Restic repository password is required}'
RESTIC_BACKUP_SOURCES: /var/lib/postgresql/backups /var/lib/authentik/backups /var/lib/lego/backups
RESTIC_BACKUP_ARGS: >-
--tag ${RESTIC_TAG:?Restic tag is required}
--verbose
RESTIC_FORGET_ARGS: >-
--keep-last 10
--keep-daily 7
--keep-weekly 5
--keep-monthly 12
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
- /var/run/docker.sock:/var/run/docker.sock
- ./docker-compose.yml:/compose-project/docker-compose.yml:ro
- ./.env:/compose-project/.env:ro
- backups_db:/var/lib/postgresql/backups:ro
- ./data/authentik/certs:/var/lib/authentik/backups/certs:ro
- ./data/authentik/custom-templates:/var/lib/authentik/backups/templates:ro
- ./data/authentik/media:/var/lib/authentik/backups/media:ro
- ./data/.lego:/var/lib/lego/backups:ro
prune-backup:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
restart: unless-stopped
environment:
SKIP_INIT: "true"
RUN_ON_STARTUP: "false"
PRUNE_CRON: "00 47 3 * * *"
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: '${RESTIC_REPO_PASSWORD:?Restic repository password is required}'
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
# prüft ob backups noch in Ordnung sind, evtl. erweitern zum prüfen der Daten
check-backup:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
restart: unless-stopped
environment:
#RUN_ON_STARTUP: "true"
SKIP_INIT: "true"
CHECK_CRON: "00 08 04 * * *"
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: ${RESTIC_REPO_PASSWORD}
#POST_COMMANDS_FAILURE: echo "Post Command Failure test" > /restic/message
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
restore-cli:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
entrypoint: /entrypoint-restore
environment:
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: '${RESTIC_REPO_PASSWORD:?Restic repository password is required}'
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
- ./docker-compose.yml:/compose-project/docker-compose.yml:ro
- ./.env:/compose-project/.env:ro
- backups_db:/var/lib/postgresql/backups:rw
- ./data/authentik/certs:/var/lib/authentik/backups/certs:rw
- ./data/authentik/custom-templates:/var/lib/authentik/backups/templates:rw
- ./data/authentik/media:/var/lib/authentik/backups/media:rw
- ./data/.lego:/var/lib/lego/backups:rw
volumes:
backups_db:
driver: local
database:
driver: local
redis:
driver: local
geoip:
driver: local
networks:
backend:
frontend:

View file

@ -1,8 +1,75 @@
---
# Based on authentiks official docker-compose.yml
# Version: 2025.2.1
services:
server:
geoipupdate:
image: "maxmindinc/geoipupdate:latest"
volumes:
- "geoip:/usr/share/GeoIP"
environment:
B23_ALLOW_UP: "true"
GEOIPUPDATE_EDITION_IDS: "GeoLite2-City GeoLite2-ASN"
GEOIPUPDATE_FREQUENCY: "8"
GEOIPUPDATE_ACCOUNT_ID: "${GEOIPUPDATE_ACCOUNT_ID:?MaxMind GeoIP account ID required}"
GEOIPUPDATE_LICENSE_KEY: "${GEOIPUPDATE_LICENSE_KEY:?MaxMind GeoIP license key required}"
postgresql:
image: docker.io/library/postgres:${POSTGRES_TAG:?POSTGRES_TAG is not configured}
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"]
start_period: 20s
interval: 30s
retries: 5
timeout: 5s
volumes:
- database:/var/lib/postgresql/data
- backups_db:/var/lib/postgresql/backups
environment:
POSTGRES_PASSWORD: ${PG_PASS:?database password required}
POSTGRES_USER: ${PG_USER:-authentik}
POSTGRES_DB: ${PG_DB:-authentik}
env_file:
- .env
networks:
- backend
redis:
image: docker.io/library/redis:${REDIS_TAG:?REDIS_TAG is not configured}
command: --save 60 1 --loglevel warning
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "redis-cli ping | grep PONG"]
start_period: 20s
interval: 30s
retries: 5
timeout: 3s
volumes:
- redis:/data
networks:
- backend
server:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:?AUTHENTIK_TAG is not configured}
restart: unless-stopped
command: server
environment:
AUTHENTIK_REDIS__HOST: redis
AUTHENTIK_POSTGRESQL__HOST: postgresql
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
AUTHENTIK_POSTGRESQL__NAME: ${PG_DB:-authentik}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS:?PG_PASS is required. - Password for authentik's postgresql database}
volumes:
- ./data/authentik/media:/media
- ./data/authentik/custom-templates:/templates
- geoip:/geoip
env_file:
- .env
depends_on:
postgresql:
condition: service_healthy
redis:
condition: service_healthy
networks:
- backend
- web
@ -17,6 +84,115 @@ services:
- "traefik.http.services.sso.loadbalancer.server.port=9443" # set port the container listenes to
- "traefik.http.services.sso.loadbalancer.server.scheme=https"
worker:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:?AUTHENTIK_TAG is not configured}
restart: unless-stopped
command: worker
environment:
AUTHENTIK_REDIS__HOST: redis
AUTHENTIK_POSTGRESQL__HOST: postgresql
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
AUTHENTIK_POSTGRESQL__NAME: ${PG_DB:-authentik}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS:?PG_PASS is required. - Password for authentik's postgresql database}
# `user: root` and the docker socket volume are optional.
# See more for the docker socket integration here:
# https://goauthentik.io/docs/outposts/integrations/docker
# Removing `user: root` also prevents the worker from fixing the permissions
# on the mounted folders, so when removing this make sure the folders have the correct UID/GID
# (1000:1000 by default)
user: root
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./data/authentik/media:/media
- ./data/authentik/certs:/certs
- ./data/authentik/custom-templates:/templates
- geoip:/geoip
env_file:
- .env
depends_on:
postgresql:
condition: service_healthy
redis:
condition: service_healthy
networks:
- backend
backup:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
restart: unless-stopped
environment:
PRE_COMMANDS: |-
cd /compose-project/
[[ $($(docker compose &>/dev/null); echo "$?") -eq 0 ]] || apk add --no-cache docker-cli-compose
docker compose exec -T postgresql pg_dump -U ${PG_USER:-authentik} -d ${PG_DB:-authentik} -f /var/lib/postgresql/backups/${PG_DB:-authentik}.sql
RUN_ON_STARTUP: "false"
BACKUP_CRON: "00 32 2 * * *"
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: '${RESTIC_REPO_PASSWORD:?Restic repository password is required}'
RESTIC_BACKUP_SOURCES: /var/lib/postgresql/backups /var/lib/authentik/backups /var/lib/lego/backups
RESTIC_BACKUP_ARGS: >-
--tag ${RESTIC_TAG:?Restic tag is required}
--verbose
RESTIC_FORGET_ARGS: >-
--keep-last 10
--keep-daily 7
--keep-weekly 5
--keep-monthly 12
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
- /var/run/docker.sock:/var/run/docker.sock
- ./docker-compose.yml:/compose-project/docker-compose.yml:ro
- ./.env:/compose-project/.env:ro
- backups_db:/var/lib/postgresql/backups:ro
- ./data/authentik/certs:/var/lib/authentik/backups/certs:ro
- ./data/authentik/custom-templates:/var/lib/authentik/backups/templates:ro
- ./data/authentik/media:/var/lib/authentik/backups/media:ro
- ./data/.lego:/var/lib/lego/backups:ro
prune-backup:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
restart: unless-stopped
environment:
SKIP_INIT: "true"
RUN_ON_STARTUP: "false"
PRUNE_CRON: "00 47 3 * * *"
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: '${RESTIC_REPO_PASSWORD:?Restic repository password is required}'
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
restore-cli:
image: registry.git.base23.de/base23/backup/resticker:${RESTICKER_TAG:?RESTICKER_TAG is not configured}
entrypoint: /entrypoint-restore
environment:
RESTIC_REPOSITORY: sftp://${RESTIC_REPO_USER:?Restic repository user is required}@${RESTIC_REPO_ADDRESS:?Restic repository address is requried}:${RESTIC_REPO_PORT:?Restic repository port is required}//backup
RESTIC_PASSWORD: '${RESTIC_REPO_PASSWORD:?Restic repository password is required}'
TZ: Europe/Berlin
volumes:
- ./data/restic/ssh/:/run/secrets/.ssh:ro
- ./docker-compose.yml:/compose-project/docker-compose.yml:ro
- ./.env:/compose-project/.env:ro
- backups_db:/var/lib/postgresql/backups:rw
- ./data/authentik/certs:/var/lib/authentik/backups/certs:rw
- ./data/authentik/custom-templates:/var/lib/authentik/backups/templates:rw
- ./data/authentik/media:/var/lib/authentik/backups/media:rw
- ./data/.lego:/var/lib/lego/backups:rw
volumes:
backups_db:
driver: local
database:
driver: local
redis:
driver: local
geoip:
driver: local
networks:
backend:
web:
external: true

View file

@ -1,92 +0,0 @@
---
services:
postgresql:
image: docker.io/library/postgres:16-alpine
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"]
start_period: 20s
interval: 30s
retries: 5
timeout: 5s
volumes:
- database:/var/lib/postgresql/data
environment:
POSTGRES_PASSWORD: ${PG_PASS:?database password required}
POSTGRES_USER: ${PG_USER:-authentik}
POSTGRES_DB: ${PG_DB:-authentik}
env_file:
- .env
redis:
image: docker.io/library/redis:alpine
command: --save 60 1 --loglevel warning
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "redis-cli ping | grep PONG"]
start_period: 20s
interval: 30s
retries: 5
timeout: 3s
volumes:
- redis:/data
server:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.4.2}
restart: unless-stopped
command: server
environment:
AUTHENTIK_SECRET_KEY: ${AUTHENTIK_SECRET_KEY:?secret key required}
AUTHENTIK_REDIS__HOST: redis
AUTHENTIK_POSTGRESQL__HOST: postgresql
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
AUTHENTIK_POSTGRESQL__NAME: ${PG_DB:-authentik}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
volumes:
- ./media:/media
- ./custom-templates:/templates
env_file:
- .env
ports:
- "${COMPOSE_PORT_HTTP:-9000}:9000"
- "${COMPOSE_PORT_HTTPS:-9443}:9443"
depends_on:
postgresql:
condition: service_healthy
redis:
condition: service_healthy
worker:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.4.2}
restart: unless-stopped
command: worker
environment:
AUTHENTIK_SECRET_KEY: ${AUTHENTIK_SECRET_KEY:?secret key required}
AUTHENTIK_REDIS__HOST: redis
AUTHENTIK_POSTGRESQL__HOST: postgresql
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
AUTHENTIK_POSTGRESQL__NAME: ${PG_DB:-authentik}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
# `user: root` and the docker socket volume are optional.
# See more for the docker socket integration here:
# https://goauthentik.io/docs/outposts/integrations/docker
# Removing `user: root` also prevents the worker from fixing the permissions
# on the mounted folders, so when removing this make sure the folders have the correct UID/GID
# (1000:1000 by default)
user: root
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./media:/media
- ./certs:/certs
- ./custom-templates:/templates
env_file:
- .env
depends_on:
postgresql:
condition: service_healthy
redis:
condition: service_healthy
volumes:
database:
driver: local
redis:
driver: local

83
docker/nginx/Dockerfile Normal file
View file

@ -0,0 +1,83 @@
ARG IMAGE=nginxinc/nginx-unprivileged:stable-bullseye
FROM $IMAGE AS builder
ARG HTTP_SUBSTITUTIONS_VERSION='e12e965ac1837ca709709f9a26f572a54d83430e'
ARG HEADERS_MORE_VERSION='0.37'
USER root
RUN set -x \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
build-essential \
libgd-dev \
libgeoip-dev \
libedit-dev \
libxslt1-dev \
libssl-dev \
libpcre2-dev \
libperl-dev \
zlib1g-dev \
unzip \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p /usr/src \
&& curl \
-o nginx.tar.gz \
https://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz \
&& curl -L \
-o nginx_substitutions_filter.zip \
https://codeload.github.com/yaoweibin/ngx_http_substitutions_filter_module/zip/${HTTP_SUBSTITUTIONS_VERSION} \
&& curl -L \
-o headers-more-nginx-module.zip \
https://codeload.github.com/openresty/headers-more-nginx-module/zip/refs/tags/v${HEADERS_MORE_VERSION} \
&& tar -zxC /usr/src -f nginx.tar.gz \
&& unzip nginx_substitutions_filter.zip -d /usr/src \
&& unzip headers-more-nginx-module.zip -d /usr/src \
&& rm nginx.tar.gz \
&& rm nginx_substitutions_filter.zip \
&& rm headers-more-nginx-module.zip \
&& cd /usr/src/nginx-${NGINX_VERSION} \
&& set -eux \
&& eval ./configure $(nginx -V 2>&1 | sed -n -e "s/^.*configure arguments: //p") \
--add-dynamic-module=/usr/src/ngx_http_substitutions_filter_module-${HTTP_SUBSTITUTIONS_VERSION} \
--add-dynamic-module=/usr/src/headers-more-nginx-module-${HEADERS_MORE_VERSION} \
--with-http_sub_module \
&& set +eux \
&& make \
&& make install
FROM $IMAGE
ARG BUILD_DATE=01.01.1970
ARG IMG_TITLE=docker-image
ARG IMAGE_VERSION=0
ARG SRC_REV=0
COPY --from=builder --chown=root:root --chmod=0644 /usr/lib/nginx/modules/ngx_http_subs_filter_module.so /usr/lib/nginx/modules/ngx_http_subs_filter_module.so
COPY --from=builder --chown=root:root --chmod=0644 /usr/lib/nginx/modules/ngx_http_headers_more_filter_module.so /usr/lib/nginx/modules/ngx_http_headers_more_filter_module.so
COPY --from=builder --chown=root:root --chmod=0755 /usr/sbin/nginx /usr/sbin/nginx
RUN sed -i '1iload_module /usr/lib/nginx/modules/ngx_http_subs_filter_module.so;\n' /etc/nginx/nginx.conf \
&& sed -i '1iload_module /usr/lib/nginx/modules/ngx_http_headers_more_filter_module.so;\n' /etc/nginx/nginx.conf
# Redirect log output to stdout and stderr
RUN set -x \
&& sed -i 's,/var/log/nginx/error.log,/dev/stderr,' /etc/nginx/nginx.conf \
&& sed -i 's,/var/log/nginx/access.log,/dev/stdout,' /etc/nginx/nginx.conf
# Healthcheck to ping the /health endpoint
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 CMD curl --fail http://localhost:8181/health || exit 1
LABEL \
maintainer="philip.henning@base23.de" \
org.opencontainers.image.created="${BUILD_DATE}" \
org.opencontainers.image.vendor="Base23 GmbH" \
org.opencontainers.image.authors="Philip Henning | Base23 GmbH <philip.henning@base23.de>" \
org.opencontainers.image.title="${IMG_TITLE}" \
org.opencontainers.image.description="rootless nginx image based on nginxinc/nginx-unprivileged with subst module" \
org.opencontainers.image.version="${IMAGE_VERSION}" \
org.opencontainers.image.source="https://git.base23.de/base23/sso.base23.de" \
org.opencontainers.image.revision="${SRC_REV}"

View file

@ -4,9 +4,9 @@ PUBLIC_DOMAIN=sso.base23.de
COMPOSE_PROJECT_NAME=sso-base23-de
# Server Versions
AUTHENTIK_TAG=2025.6.4
POSTGRES_TAG=16.9-alpine
REDIS_TAG=8.0-alpine
AUTHENTIK_TAG=2025.2.4
POSTGRES_TAG=16.6-alpine
REDIS_TAG=7.4.2-alpine
RESTICKER_TAG=0.0.2-restic0.17.0
NGINX_UNPRIVILEGED_TAG=1.27.2-bookworm

View file

@ -4,9 +4,9 @@ PUBLIC_DOMAIN=sso.test.base23.de
COMPOSE_PROJECT_NAME=sso-base23-de
# Server Versions
AUTHENTIK_TAG=2025.6.4
POSTGRES_TAG=16.9-alpine
REDIS_TAG=8.0-alpine
AUTHENTIK_TAG=2025.2.4
POSTGRES_TAG=16.6-alpine
REDIS_TAG=7.4.2-alpine
RESTICKER_TAG=0.0.2-restic0.17.0
# Error reporting & Logging

12
scripts/cert_renew.sh Executable file
View file

@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -euf -o pipefail
cd "$(dirname "$(realpath "$0")")/../"
lego \
--path ./data/.lego \
--email="acme@base23.de" \
--domains="*.base23.de" \
--dns hetzner \
renew \
--renew-hook="./scripts/cert_renew_hook.sh"

8
scripts/cert_renew_hook.sh Executable file
View file

@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euf -o pipefail
cd "$(dirname "$(realpath "$0")")/../"
install -m 400 -o 101 -g 101 "./data/.lego/certificates"/{_.base23.de.crt,_.base23.de.issuer.crt,_.base23.de.key} "./data/nginx/certs"
docker compose restart nginx

View file

@ -1,32 +0,0 @@
#!/usr/bin/env bash
set -euf -o pipefail
# Check if yq is installed
if ! command -v yq &>/dev/null; then
echo "yq is required but not installed. Please install it manually."
exit 1
fi
STAGE="${1:-}"
shift || true
if [[ -z "${STAGE,,}" ]]; then
echo "Usage: $0 <stage>"
echo "Example: $0 test"
exit 1
fi
cd "$(dirname "$(realpath "$0")")/../"
AUTHENTIK_DOCKER_COMPOSE_PATH="$(realpath "$(pwd)")"
# Merge docker-compose files using yq
# 1st merger is docker-compose.override.yml on top of the base docker-compose.yml
# 2nd merger is the stage-specific docker-compose file on top of the result of the first merger
# The final result is piped to docker compose command
yq eval-all 'select(fileIndex == 0) * select(fileIndex == 1)' \
docker-compose.yml \
docker-compose.override.yml \
| yq eval-all 'select(fileIndex == 0) * select(fileIndex == 1)' \
- \
docker-compose.${STAGE,,}.yml \
| ${DOCKER_COMPOSE_CLI} -f- ${@:-}