---
- hosts: pottwal.n39.eu
  become: true

  roles:
    # role 'docker_setup' applied through group 'docker_host'
    - role: apache
    - role: apache_letsencrypt  # Uses configuration from dehydrated setup
    - role: ansible-role-dehydrated
      vars:
        dehydrated_contact_email: "{{ server_admin }}"
        dehydrated_domains:
          - name: gitea.n39.eu
          - name: uritools.n39.eu
          - name: uritools-api.n39.eu
          - name: "{{ shlink_domain_name }}"
          - name: pad.n39.eu
          - name: "{{ prosody_domain_name }}"
            alternate_names:
              - conference.jabber.n39.eu
            deploy_cert_hook: "docker exec prosody prosodyctl --root cert import ${DOMAIN} /var/lib/dehydrated/certs"
          - name: redmine.n39.eu
          - name: "{{ influxdb_domain_name }}"
          - name: uptime.n39.eu
          - name: "{{ grafana_domain_name }}"
          - name: "{{ homebox_domain_name }}"
    - role: penguineer.dehydrated_cron
    - role: dd24_dyndns_cron
      # variables are set in the inventory
    - role: cleanuri
      vars:
        cleanuri_ui_domain: uritools.n39.eu
        cleanuri_ui_host_port: 8090
        cleanuri_api_domain: uritools-api.n39.eu
        cleanuri_api_host_port: 8091
        # RabbitMQ setup can be found in the inventory

  tasks:

    - name: Check if forgejo data dir exists
      ansible.builtin.stat:
        path: "{{ data_dir }}/forgejo"
      register: forgejo_dir
    - name: Fail if forgejo data dir does not exist
      ansible.builtin.fail:
        msg: "Forgejo data dir is missing, please restore from the backup!"
      when: not forgejo_dir.stat.exists

    # If port 2222 is changed here, it must also be adapted
    # in the forgejo config file (see application volume)!!
    - name: Setup the docker container for gitea
      docker_container:
        name: forgejo
        image: "codeberg.org/forgejo/forgejo:1.19"
        pull: true
        state: started
        restart_policy: unless-stopped
        detach: yes
        ports:
          - 127.0.0.1:{{ forgejo_host_port }}:3000
          - 2222:2222
        env:
          TZ: "{{ timezone }}"
          APP_NAME: "Netz39 Git"
          RUN_MODE: "prod"
          SSH_DOMAIN: "gitea.n39.eu"
          SSH_PORT: "2222"
          SSH_START_SERVER: "false"
          ROOT_URL: "https://gitea.n39.eu"
          DISABLE_REGISTRATION: "true"
          USER_UID: "1000"
          USER_GID: "1000"
        volumes:
          - "{{ data_dir }}/forgejo:/data:rw"

    - name: Setup proxy site gitea.n39.eu
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: "gitea.n39.eu"
        proxy_port: "{{ forgejo_host_port }}"

    - name: Ensure apt-cacher container is running
      docker_container:
        name: apt_cacher_ng
        image: "mrtux/apt-cacher-ng"
        pull: true
        state: started
        restart_policy: unless-stopped
        detach: yes
        ports:
          - 3142:3142
        env:
          TZ: "{{ timezone }}"


    - name: Ensure container for shlink is running
      docker_container:
        name: shlink
        image: shlinkio/shlink:2.6.2
        pull: true
        state: started
        detach: yes
        ports:
          - "127.0.0.1:{{ shlink_host_port }}:8080"
        restart_policy: unless-stopped
        env:
          TZ: "{{ timezone }}"
          SHORT_DOMAIN_HOST: "{{ shlink_domain_name }}"
          SHORT_DOMAIN_SCHEMA: https
          GEOLITE_LICENSE_KEY: "{{ shlink_geolite_license_key }}"

    - name: Setup proxy site {{ shlink_domain_name }}
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: "{{ shlink_domain_name }}"
        proxy_port: "{{ shlink_host_port }}"

    - name: Check if prosody data dir exists
      ansible.builtin.stat:
        path: "{{ prosody_data_dir }}"
      register: prosody_dir
    - name: Fail if prosody data dir does not exist
      ansible.builtin.fail:
        msg: "prosody data dir is missing, please restore from the backup!"
      when: not prosody_dir.stat.exists

    - name: Ensure prosody config dir exists
      ansible.builtin.file:
        path: "{{ prosody_config_dir }}"
        state: directory
        mode: 0755

    - name: Ensure prosody certs dir exists
      ansible.builtin.file:
        path: "{{ prosody_config_dir }}/certs"
        state: directory
        # group 'ssl-cert' inside of the container
        group: "101"
        mode: 0750

    - name: Ensure prosody conf.avail dir exists
      ansible.builtin.file:
        path: "{{ prosody_config_dir }}/conf.avail"
        state: directory
        mode: 0755

    - name: Ensure prosody certs dir exists
      ansible.builtin.file:
        path: "{{ prosody_config_dir }}/conf.d"
        state: directory
        mode: 0755

    - name: Ensure prosody main config file is in place
      ansible.builtin.template:
        src: "templates/prosody/prosody.cfg.lua.j2"
        dest: "{{ prosody_config_dir }}/prosody.cfg.lua"
        mode: 0644
      notify:
        - Restart prosody

    - name: "Ensure prosody config file is in place: {{ prosody_domain_name }}"
      ansible.builtin.copy:
        src: "files/prosody/{{ prosody_domain_name }}.cfg.lua"
        dest: "{{ prosody_config_dir }}/conf.avail/{{ prosody_domain_name }}.cfg.lua"
        mode: 0644
      notify:
        - Restart prosody

    - name: "Ensure prosody config symlink exists: {{ prosody_domain_name }}"
      ansible.builtin.file:
        src: "../conf.avail/{{ prosody_domain_name }}.cfg.lua"
        dest: "{{ prosody_config_dir }}/conf.d/{{ prosody_domain_name }}.cfg.lua"
        state: link
      notify:
        - Restart prosody

    - name: Ensure container for prosody XMPP server is running
      docker_container:
        name: prosody
        image: netz39/prosody:0.11
        pull: true
        state: started
        detach: true
        restart_policy: unless-stopped
        ports:
          # container offers more ports, depends on actual prosody configuration
          - 5222:5222   # xmpp-client
          - 5269:5269   # xmpp-server
        volumes:
          - "{{ prosody_config_dir }}:/etc/prosody:ro"
          - "{{ prosody_config_dir }}/certs:/etc/prosody/certs:rw"
          - "{{ prosody_data_dir }}/var/lib/prosody:/var/lib/prosody:rw"
          - "{{ prosody_data_dir }}/var/log/prosody:/var/log/prosody:rw"
          - "{{ dehydrated_certs_dir }}/{{ prosody_domain_name }}:/var/lib/dehydrated/certs/{{ prosody_domain_name }}:ro"
        env:
          TZ: "{{ timezone }}"


    - name: Ensure container for static XMPP website is running
      docker_container:
        name: jabber-static-website
        image: joseluisq/static-web-server:2.14
        pull: true
        state: started
        detach: true
        restart_policy: unless-stopped
        env:
          TZ: "{{ timezone }}"
          SERVER_PORT: "80"
          SERVER_ROOT: "/public"
        ports:
          - "127.0.0.1:{{ jabber_host_port }}:80"
        volumes:
          - "{{ prosody_data_dir }}/var/www:/public:ro"

    - name: Setup proxy site {{ prosody_domain_name }}
      # point to static website for now
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: "{{ prosody_domain_name }}"
        proxy_port: "{{ jabber_host_port }}"

    - name: Check if hedgedoc data dir exists
      ansible.builtin.stat:
        path: "{{ data_dir }}/hedgedoc"
      register: hedgedoc_dir
    - name: Fail if hedgedoc data dir does not exist
      ansible.builtin.fail:
        msg: "hedgedoc data dir is missing, please restore from the backup!"
      when: not hedgedoc_dir.stat.exists

    - name: Ensure the hedgedoc directories exist
      file:
        path: "{{ item.path }}"
        mode: "{{ item.mode }}"
        state: directory
      with_items:
        - path: "{{ data_dir }}/hedgedoc/data/database"
          mode: "0700"
        - path: "{{ data_dir }}/hedgedoc/data/uploads"
          mode: "0755"

    - name: Setup docker network
      docker_network:
        name: hedgedocnet
        state: present
        internal: true

    - name: Install HedgeDoc database container
      docker_container:
        name: hedgedocdb
        image: "postgres:11.6-alpine"
        pull: true
        state: started
        restart_policy: unless-stopped
        detach: yes
        env:
          TZ: "{{ timezone }}"
          POSTGRES_USER: "hedgedoc"
          POSTGRES_PASSWORD: "{{ hedgedoc_postgres_password }}"
          POSTGRES_DB: "hedgedoc"
        volumes:
          - "{{ data_dir }}/hedgedoc/data/database:/var/lib/postgresql/data"
        networks:
          - name: hedgedocnet

    - name: Ensure container for hedgedoc is running
      docker_container:
        name: hedgedoc
        image: "{{ hedgedoc_image }}"
        pull: true
        state: started
        detach: yes
        ports:
          - "127.0.0.1:{{ hedgedoc_host_port }}:3000"
        restart_policy: unless-stopped
        env:
          TZ: "{{ timezone }}"
          NODE_ENV: "production"
          CMD_PROTOCOL_USESSL: "true"
          CMD_DOMAIN: "pad.n39.eu"
          CMD_URL_ADDPORT: "false"
          CMD_DB_HOST: "hedgedocdb"
          CMD_DB_PORT: "5432"
          CMD_DB_DIALECT: "postgres"
          CMD_DB_DATABASE: "hedgedoc"
          CMD_DB_USERNAME: "hedgedoc"
          CMD_DB_PASSWORD: "{{ hedgedoc_postgres_password }}"
        volumes:
          - "{{ data_dir }}/hedgedoc/data/uploads:/hedgedoc/public/uploads"
        networks:
          - name: hedgedocnet

    - name: Setup proxy site pad.n39.eu
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: pad.n39.eu
        proxy_port: "{{ hedgedoc_host_port }}"

    - name: Ensure the influxdb directories exist
      file:
        path: "{{ item }}"
        mode: 0700
        state: directory
      with_items:
        - "{{ data_dir }}/influxdb"
        - "{{ data_dir }}/influxdb/data"
        - "{{ data_dir }}/influxdb/cfg"

    - name: Ensure container for influxdb is running
      docker_container:
        name: influxdb
        image: "{{ influxdb_image }}"
        pull: true
        state: started
        detach: yes
        ports:
          - "127.0.0.1:{{ influxdb_host_port }}:8086"
        restart_policy: unless-stopped
        env:
          TZ: "{{ timezone }}"
          DOCKER_INFLUXDB_INIT_USERNAME: "{{ influxdb_init_username }}"
          DOCKER_INFLUXDB_INIT_PASSWORD: "{{ influxdb_init_password }}"
          DOCKER_INFLUXDB_INIT_ORG: "{{ influxdb_org }}"
          DOCKER_INFLUXDB_INIT_BUCKET: default
        volumes:
          - "{{ data_dir }}/influxdb/data:/var/lib/influxdb2"
          - "{{ data_dir }}/influxdb/cfg:/etc/influxdb2"

    - name: Setup proxy site {{ influxdb_domain_name }}
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: "{{ influxdb_domain_name }}"
        proxy_port: "{{ influxdb_host_port }}"

    # Expected setup for the data directory
    #   file: configuration.yml
    #   directory: mysql
    #   directory: files
    #   directory: themes
    - name: Check if redmine data dir exists
      ansible.builtin.stat:
        path: "{{ data_dir }}/redmine"
      register: redmine_dir
    - name: Fail if redmine data dir does not exist
      ansible.builtin.fail:
        msg: "Redmine data dir is missing, please restore from the backup!"
      when: not redmine_dir.stat.exists

    - name: Setup Redmine docker network
      docker_network:
        name: redminenet
        state: present
        internal: true

    - name: Setup Redmine MySQL container
      docker_container:
        name: redminedb
        image: "{{ redmine_mysql_image }}"
        pull: true
        state: started
        restart_policy: unless-stopped
        detach: yes
        env:
          TZ: "{{ timezone }}"
          MYSQL_ROOT_PASSWORD: "{{ redmine_database_password }}"
          MYSQL_DATABASE: "{{ redmine_database }}"
        volumes:
          - "{{ data_dir }}/redmine/mysql:/var/lib/mysql"
        networks:
          - name: redminenet

    - name: Setup Redmine container
      docker_container:
        name: redmine
        image: "{{ redmine_image }}"
        pull: true
        state: started
        restart_policy: unless-stopped
        detach: yes
        ports:
          - "127.0.0.1:{{ redmine_host_port }}:3000"
        env:
          TZ: "{{ timezone }}"
          REDMINE_DB_MYSQL: redminedb
          REDMINE_DB_PASSWORD: "{{ redmine_database_password }}"
        volumes:
          - "{{ data_dir }}/redmine/configuration.yml:/usr/src/redmine/config/configuration.yml"
          - "{{ data_dir }}/redmine/files:/usr/src/redmine/files"
          - "{{ data_dir }}/redmine/themes:/usr/src/redmine/public/themes"
        networks:
          - name: redminenet

    - name: Setup proxy site redmine.n39.eu
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: redmine.n39.eu
        proxy_port: "{{ redmine_host_port }}"

    - name: Ensure the uptime-kuma directories exist
      file:
        path: "{{ item }}"
        mode: "0755"
        state: directory
      with_items:
        - "{{ data_dir }}/uptime-kuma"

    - name: Ensure container for uptime-kuma is running
      docker_container:
        name: uptime-kuma
        image: "louislam/uptime-kuma:1"
        pull: true
        state: started
        detach: yes
        ports:
          - "127.0.0.1:{{ uptimekuma_host_port }}:3001"
        restart_policy: unless-stopped
        env:
          TZ: "{{ timezone }}"
        volumes:
          - "{{ data_dir }}/uptime-kuma:/app/data"

    - name: Setup proxy site uptime.n39.eu
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: uptime.n39.eu
        proxy_port: "{{ uptimekuma_host_port }}"

    - name: Ensure the grafana directories exist
      file:
        path: "{{ item.path }}"
        owner: "{{ item.owner | default('root') }}"
        mode: "{{ item.mode }}"
        state: directory
      with_items:
        - path: "{{ data_dir }}/grafana"
          mode: "0755"
        - path: "{{ data_dir }}/grafana/data"
          owner: 472
          mode: "0755"
        - path: "{{ data_dir }}/grafana/etc"
          mode: "0755"

    - name: Ensure container for grafana is running
      docker_container:
        name: grafana
        image: "grafana/grafana"
        pull: true
        state: started
        detach: yes
        restart_policy: unless-stopped
        ports:
          - "127.0.0.1:{{ grafana_host_port }}:3000"
        volumes:
          - "{{ data_dir }}/grafana/data:/var/lib/grafana"
          - "{{ data_dir }}/grafana/etc:/etc/grafana"
        env:
          TZ: "{{ timezone }}"
          GF_SECURITY_ADMIN_PASSWORD: "{{ grafana_admin_password }}"
          GF_USERS_ALLOW_SIGN_UP: "false"
          GF_INSTALL_PLUGINS: "flant-statusmap-panel,ae3e-plotly-panel"

    - name: Setup proxy site grafana.n39.eu
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: "{{ grafana_domain_name }}"
        proxy_port: "{{ grafana_host_port }}"
        proxy_preserve_host: "On"

    - name: Ensure the homebox directories exist
      file:
        path: "{{ item.path }}"
        owner: "{{ item.owner | default('root') }}"
        mode: "{{ item.mode }}"
        state: directory
      with_items:
        - path: "{{ data_dir }}/homebox"
          mode: "0755"
        - path: "{{ data_dir }}/homebox/data"
          mode: "0755"

    - name: Ensure container for homebox is running
      docker_container:
        name: homebox
        image: "ghcr.io/hay-kot/homebox"
        pull: true
        state: started
        detach: yes
        restart_policy: unless-stopped
        ports:
          - "127.0.0.1:{{ homebox_host_port }}:7745"
        volumes:
          - "{{ data_dir }}/homebox/data:/data"
        env:
          TZ: "{{ timezone }}"
          HBOX_LOG_LEVEL: "info"
          HBOX_LOG_FORMAT: "text"
          HBOX_WEB_MAX_UPLOAD_SIZE: "10"

    - name: Setup proxy site {{ homebox_domain_name }}
      include_role:
        name: setup_http_site_proxy
      vars:
        site_name: "{{ homebox_domain_name }}"
        proxy_port: "{{ homebox_host_port }}"
        proxy_preserve_host: "On"

  handlers:
    - name: Restart prosody
      community.docker.docker_container:
        name: prosody
        state: started
        restart: yes