--- - hosts: pottwal.n39.eu become: true vars: ansible_python_interpreter: /usr/bin/python3 data_dir: "/srv/data" gitea_host_port: 9091 shlink_host_port: 8083 shlink_domain_name: sl.n39.eu prosody_data_dir: "{{ data_dir }}/prosody" prosody_domain_name: jabber.n39.eu jabber_host_port: 8086 hedgedoc_host_port: 8084 hedgedoc_image: quay.io/hedgedoc/hedgedoc:1.9.3 redmine_host_port: 8087 redmine_image: redmine:4.2.7 redmine_mysql_image: mysql:5.7 redmine_database: redmine influxdb_host_port: 8088 influxdb_image: influxdb:2.4-alpine uptimekuma_host_port: 8085 roles: - role: docker_setup vars: docker_data_root: "/srv/docker" - role: apache - role: apache_letsencrypt # Uses configuration from dehydrated setup - role: ansible-role-dehydrated vars: dehydrated_contact_email: "{{ server_admin }}" dehydrated_domains: - name: gitea.n39.eu - name: uritools.n39.eu - name: uritools-api.n39.eu - name: sl.n39.eu - name: pad.n39.eu - name: "{{ prosody_domain_name }}" alternate_names: - conference.jabber.n39.eu deploy_cert_hook: "docker exec prosody prosodyctl --root cert import ${DOMAIN} /var/lib/dehydrated/certs" - name: redmine.n39.eu - name: influx.n39.eu - name: uptime.n39.eu - role: penguineer.dehydrated_cron - role: dd24_dyndns_cron # variables are set in the inventory - role: cleanuri vars: cleanuri_ui_domain: uritools.n39.eu cleanuri_ui_host_port: 8090 cleanuri_api_domain: uritools-api.n39.eu cleanuri_api_host_port: 8091 # RabbitMQ setup can be found in the inventory tasks: - name: Check if gitea data dir exists ansible.builtin.stat: path: "{{ data_dir }}/gitea" register: gitea_dir - name: Fail if gitea data dir does not exist ansible.builtin.fail: msg: "Gitea data dir is missing, please restore from the backup!" when: not gitea_dir.stat.exists # If port 2222 is changed here, it must also be adapted # in the gitea config file (see application volume)!! - name: Setup the docker container for gitea docker_container: name: gitea image: "gitea/gitea:1.16.8" pull: true state: started restart_policy: unless-stopped detach: yes ports: - 127.0.0.1:{{ gitea_host_port }}:3000 - 2222:2222 env: APP_NAME="Netz39 Gitea" RUN_MODE="prod" SSH_DOMAIN="gitea.n39.eu" SSH_PORT="2222" SSH_START_SERVER="false" ROOT_URL="https://gitea.n39.eu" DISABLE_REGISTRATION="true" USER_UID=1000 USER_GID=1000 volumes: - "{{ data_dir }}/gitea:/data:rw" - name: Setup proxy site gitea.n39.eu include_role: name: setup_http_site_proxy vars: site_name: "gitea.n39.eu" proxy_port: "{{ gitea_host_port }}" - name: Ensure apt-cacher container is running docker_container: name: apt_cacher_ng image: "mrtux/apt-cacher-ng" pull: true state: started restart_policy: unless-stopped detach: yes ports: - 3142:3142 - name: Ensure container for shlink is running docker_container: name: shlink image: shlinkio/shlink:2.6.2 pull: true state: started detach: yes ports: - "127.0.0.1:{{ shlink_host_port }}:8080" restart_policy: unless-stopped env: SHORT_DOMAIN_HOST: "{{ shlink_domain_name }}" SHORT_DOMAIN_SCHEMA: https GEOLITE_LICENSE_KEY: "{{ shlink_geolite_license_key }}" - name: Setup proxy site {{ shlink_domain_name }} include_role: name: setup_http_site_proxy vars: site_name: "{{ shlink_domain_name }}" proxy_port: "{{ shlink_host_port }}" - name: Check if prosody data dir exists ansible.builtin.stat: path: "{{ prosody_data_dir }}" register: prosody_dir - name: Fail if prosody data dir does not exist ansible.builtin.fail: msg: "prosody data dir is missing, please restore from the backup!" when: not prosody_dir.stat.exists - name: Ensure container for prosody XMPP server is running docker_container: name: prosody image: netz39/prosody:0.11 pull: true state: started detach: true restart_policy: unless-stopped ports: # container offers more ports, depends on actual prosody configuration - 5222:5222 # xmpp-client - 5269:5269 # xmpp-server volumes: - "{{ prosody_data_dir }}/etc/prosody:/etc/prosody:rw" - "{{ prosody_data_dir }}/var/lib/prosody:/var/lib/prosody:rw" - "{{ prosody_data_dir }}/var/log/prosody:/var/log/prosody:rw" - "{{ dehydrated_certs_dir }}/{{ prosody_domain_name }}:/var/lib/dehydrated/certs/{{ prosody_domain_name }}:ro" - name: Ensure container for static XMPP website is running docker_container: name: jabber-static-website image: joseluisq/static-web-server:2.13.1 pull: true state: started detach: true restart_policy: unless-stopped env: SERVER_PORT=80 SERVER_ROOT=/public ports: - "127.0.0.1:{{ jabber_host_port }}:80" volumes: - "{{ prosody_data_dir }}/var/www:/public:ro" - name: Setup proxy site {{ prosody_domain_name }} # point to static website for now include_role: name: setup_http_site_proxy vars: site_name: "{{ prosody_domain_name }}" proxy_port: "{{ jabber_host_port }}" - name: Check if hedgedoc data dir exists ansible.builtin.stat: path: "{{ data_dir }}/hedgedoc" register: hedgedoc_dir - name: Fail if hedgedoc data dir does not exist ansible.builtin.fail: msg: "hedgedoc data dir is missing, please restore from the backup!" when: not hedgedoc_dir.stat.exists - name: Ensure the hedgedoc directories exist file: path: "{{ item.path }}" mode: "{{ item.mode }}" state: directory with_items: - path: "{{ data_dir }}/hedgedoc/data/database" mode: "0700" - path: "{{ data_dir }}/hedgedoc/data/uploads" mode: "0755" - name: Setup docker network docker_network: name: hedgedocnet state: present internal: true - name: Install HedgeDoc database container docker_container: name: hedgedocdb image: "postgres:11.6-alpine" pull: true state: started restart_policy: unless-stopped detach: yes env: POSTGRES_USER: "hedgedoc" POSTGRES_PASSWORD: "{{ hedgedoc_postgres_password }}" POSTGRES_DB: "hedgedoc" volumes: - "{{ data_dir }}/hedgedoc/data/database:/var/lib/postgresql/data" networks: - name: hedgedocnet - name: Ensure container for hedgedoc is running docker_container: name: hedgedoc image: "{{ hedgedoc_image }}" pull: true state: started detach: yes ports: - "127.0.0.1:{{ hedgedoc_host_port }}:3000" restart_policy: unless-stopped env: NODE_ENV: "production" CMD_PROTOCOL_USESSL: "true" CMD_DOMAIN: "pad.n39.eu" CMD_URL_ADDPORT: "false" CMD_DB_HOST: "hedgedocdb" CMD_DB_PORT: "5432" CMD_DB_DIALECT: "postgres" CMD_DB_DATABASE: "hedgedoc" CMD_DB_USERNAME: "hedgedoc" CMD_DB_PASSWORD: "{{ hedgedoc_postgres_password }}" volumes: - "{{ data_dir }}/hedgedoc/data/uploads:/hedgedoc/public/uploads" networks: - name: hedgedocnet - name: Setup proxy site pad.n39.eu include_role: name: setup_http_site_proxy vars: site_name: pad.n39.eu proxy_port: "{{ hedgedoc_host_port }}" - name: Ensure the influxdb directories exist file: path: "{{ item }}" mode: "0700" state: directory with_items: - "{{ data_dir }}/influxdb" - name: Ensure container for influxdb is running docker_container: name: influxdb image: "{{ influxdb_image }}" pull: true state: started detach: yes ports: - "127.0.0.1:{{ influxdb_host_port }}:8086" restart_policy: unless-stopped env: DOCKER_INFLUXDB_INIT_USERNAME: "{{ influxdb_init_username }}" DOCKER_INFLUXDB_INIT_PASSWORD: "{{ influxdb_init_password }}" DOCKER_INFLUXDB_INIT_ORG: Netz39 DOCKER_INFLUXDB_INIT_BUCKET: default volumes: - "{{ data_dir }}/influxdb:/var/lib/influxdb2" - name: Setup proxy site influx.n39.eu include_role: name: setup_http_site_proxy vars: site_name: influx.n39.eu proxy_port: "{{ influxdb_host_port }}" # Expected setup for the data directory # file: configuration.yml # directory: mysql # directory: files # directory: themes - name: Check if redmine data dir exists ansible.builtin.stat: path: "{{ data_dir }}/redmine" register: redmine_dir - name: Fail if redmine data dir does not exist ansible.builtin.fail: msg: "Redmine data dir is missing, please restore from the backup!" when: not redmine_dir.stat.exists - name: Setup Redmine docker network docker_network: name: redminenet state: present internal: true - name: Setup Redmine MySQL container docker_container: name: redminedb image: "{{ redmine_mysql_image }}" pull: true state: started restart_policy: unless-stopped detach: yes env: MYSQL_ROOT_PASSWORD: "{{ redmine_database_password }}" MYSQL_DATABASE: "{{ redmine_database }}" volumes: - "{{ data_dir }}/redmine/mysql:/var/lib/mysql" networks: - name: redminenet - name: Setup Redmine container docker_container: name: redmine image: "{{ redmine_image }}" pull: true state: started restart_policy: unless-stopped detach: yes ports: - "127.0.0.1:{{ redmine_host_port }}:3000" env: REDMINE_DB_MYSQL: redminedb REDMINE_DB_PASSWORD: "{{ redmine_database_password }}" volumes: - "{{ data_dir }}/redmine/configuration.yml:/usr/src/redmine/config/configuration.yml" - "{{ data_dir }}/redmine/files:/usr/src/redmine/files" - "{{ data_dir }}/redmine/themes:/usr/src/redmine/public/themes" networks: - name: redminenet - name: Setup proxy site redmine.n39.eu include_role: name: setup_http_site_proxy vars: site_name: redmine.n39.eu proxy_port: "{{ redmine_host_port }}" - name: Ensure the uptime-kuma directories exist file: path: "{{ item }}" mode: "0755" state: directory with_items: - "{{ data_dir }}/uptime-kuma" - name: Ensure container for uptime-kuma is running docker_container: name: uptime-kuma image: "louislam/uptime-kuma:1" pull: true state: started detach: yes ports: - "127.0.0.1:{{ uptimekuma_host_port }}:3001" restart_policy: unless-stopped volumes: - "{{ data_dir }}/uptime-kuma:/app/data" - name: Setup proxy site uptime.n39.eu include_role: name: setup_http_site_proxy vars: site_name: uptime.n39.eu proxy_port: "{{ uptimekuma_host_port }}" handlers: