Compare commits

..

7 commits

39 changed files with 657 additions and 547 deletions

View file

@ -49,7 +49,7 @@ To set up a new HTTPS vhost, the following steps need to be taken:
1. Select a domain (for internal services we use sub-domains of `.n39.eu`).
2. Create an external CNAME from this domain to `dyndns.n39.eu`.
3. Create an internal DNS entry in the [Descartes DNS config](https://git.n39.eu/Netz39_Admin/config.descartes/src/branch/prepare/dns_dhcp.txt). This is usually an alias on an existing server.
3. Create an internal DNS entry in the [Descartes DNS config](https://gitea.n39.eu/Netz39_Admin/config.descartes/src/branch/prepare/dns_dhcp.txt). This is usually an alias on an existing server.
4. Add the entry to the [holmium playbook](holmium.yml).
5. Set up Dehydrated and vhost on the target host, e.g. using `setup_http_site_proxy`.

97
device-cisco-2960-1.yml Normal file
View file

@ -0,0 +1,97 @@
---
- hosts: cisco-2960-1.n39.eu
become: true
tasks:
- name: configure login banner
cisco.ios.ios_banner:
banner: login
text: "Documentation here: https://wiki.netz39.de/internal:inventory:network:2960s-24td-l"
state: present
- name: configure vlans
cisco.ios.ios_vlans:
config:
- name: lan
vlan_id: 4
state: active
shutdown: disabled
- name: wan
vlan_id: 5
state: active
shutdown: disabled
- name: service
vlan_id: 7
state: active
shutdown: disabled
- name: legacy
vlan_id: 8
state: active
shutdown: disabled
- name: dmz
vlan_id: 9
state: active
shutdown: disabled
- name: ffmd-client
vlan_id: 11
state: active
shutdown: disabled
state: merged
- name: configure port assignment
cisco.ios.ios_l2_interfaces:
config:
# USV
- name: Gi1/0/6
mode: access
access:
vlan: 1
# beaker ipmi
- name: Gi1/0/9
mode: access
access:
vlan: 1
# Patchfeld 1, Switch ausleihliste
- name: Gi1/0/13
mode: trunk
trunk:
allowed_vlans: 1,4,5,7,8,11
native_vlan: 4
# patchfeld 2 - Raspberry Pi Platon
- name: Gi1/0/15
mode: access
access:
vlan: 4
# patchfeld 6 - Access Point Hempels Zimmer
- name: Gi1/0/17
mode: access
access:
vlan: 4
# FräsPC
- name: Gi1/0/19
mode: access
access:
vlan: 4
# patchfeld 4 - Switch am Basteltisch
- name: Gi1/0/20
mode: trunk
trunk:
allowed_vlans: 1,4,5,7,8,11
native_vlan: 4
# uplink descartes
- name: Gi1/0/25
mode: trunk
trunk:
allowed_vlans: 1-11
native_vlan: 1
# server marx
- name: Gi1/0/26
mode: trunk
trunk:
allowed_vlans: 1-11
native_vlan: 1
state: merged
- name: Save running to startup when modified
cisco.ios.ios_config:
save_when: modified

View file

@ -1,17 +0,0 @@
[Unit]
Description=Grafana Kiosk
After=network.target
Wants=network.target
[Service]
User=root
ExecStart=/usr/local/bin/kiosk.sh
Restart=always
PIDFile=/run/kiosk.pid
ExecStop=/bin/kill -s SIGTERM $MAINPID
[Install]
WantedBy=multi-user.target

View file

@ -1,7 +1,5 @@
#!/bin/sh
PATH='/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/games:/usr/games'
echo 'set PCM volume'
sudo amixer set "PCM" "70%"
echo 'start i2c-foo'
sudo modprobe i2c_dev
sudo modprobe i2c_bcm2708

View file

@ -1,9 +0,0 @@
---
- hosts: k3s
become: true
tasks:
- name: Ensure nfs-common is installed on k3s VMs
ansible.builtin.apt:
pkg: nfs-common
state: present

View file

@ -10,8 +10,6 @@ users:
ssh_pub:
- !unsafe >
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDVZPAE3XE8Ek1Ji4sCIHxLVx+bi2qpsTSsYhBqtYysnFn9AHJj14BR59D0Si05sfVkmL4OQoo7Q98oIxy33PgtqoUfgXk9dc7dlsye3t/gsAb25ABnqG/ZYe65nZLN7BzRM1/QZIbd6sSu6eXrNFCh0ikB5se4zgVkDO8t6h2dnz4FvTuIM2Bi/PnIJTqb8+uLQE1vS3A7tTx100ZKXxr81dlo2Y1JBP6WrS1W1IyFiG6wofl2XTY02ssyoENQyR89lLMJYKvm5xlhL/L69gtMsqIX9UBQFk8Rpq04ZIwN6b0K4R142GZvxdJNdQULgtI3gPkKgH7FDoFsRHNA6b/9 adahl@ada
- !unsafe >
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDb5j4SlVDkK/CE/viZA5M/VquKm0DaMB6h5yR9ZWf7oW2h/q5tPQr5Kzatt+uCF++1eCOfoq6BR/NV01KkVdyTMemA8LMZwuf4uUzTlfnoXO4eGP0+d4aGzSuE08gak8c0iYF5zzzJGSKVIZ7qQXAmAH5guJxdRltpJlFbnYY6Plo1nxmluSAAh8qPSBQhZy+ja05ZpXct6+IeXHDLJ9ia5x71hAbEzKJXafVukL/Qt6Gr80snW1OuVzBpDs5/O2taKNV4a3dAzM4cNb0xGbhNogiuZD5IPHjkbsiOifBT+i48CBOasSWO9tnNZ6X/kDXxizoo4gB1rWOVvPE8SXXbKSxus48AG0MEKh0XGB7z7klCxDWITn1JpN3x8/vbG9Y02/QlVdqdTuIq7fUfrQz3hipR2DMXuGnMkwkR80XXkQziuBP6UG3Meh2wZ0SxIex3JgVsZh4gxvIvNxuU9iEcpgEFhGFvQwxbZ+nWYYe0j//OzfKQpod/D03tx7W6SXM= adahl@ada-pc
- !unsafe >
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDvczlb1+9d1BjuLk5ZcQt2Z0Dh61Vg91i47tM48CN2koJ4I/9vgN37l6mnr383zD8kQkXDGmCYpXOa48WocyyUuP3h75DCjANYcWOsohQfFu2F1ZOiiVCGduDntzS2nbZEF2W3nZNLQ6/dKKEeaSxu5RjKflkWakghkMt3H4KN20bxzYzHQMLhRYFEGHpskOqeaXKPkqqEP+u5kToINtmXwegCvQFnlx4fNrysFII79buBNlcLsO1X4ABucVMYT/OJnBpJEfEcNFUKrJZRGgM8aDbUpkV9LRY2lywvoKJhiRMc7x7kK0LWOTdPJri+SJhW6fEW4JKCRTSHVN8OS8S/ alex@buffy
- !unsafe >
@ -55,24 +53,6 @@ users:
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHasp1Q/HJURndKnNRP5TJqJVHPuN9G/9uHdaNGhI8yi mg@mg-Swift-SF314-52G
sudo: yes
docker: yes
- logname: "timo"
viewname: "Timo Herrmann"
email: "timo@netz39.de"
ssh_pub:
- !unsafe >
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILKhKHCPibswu2p6UQHKsBSqGaXzMFM+oMX0XEWsxCIc timo@Space-Lap
- !unsafe >
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMJoS7nsLLzSAsZA4us2/7JaQhgRjj/BY+LOpDQnfy8u timo@mac
sudo: yes
docker: yes
- logname: "JensWH"
viewname: "Jens Winter-Hübenthal"
email: "jens.winter@gmail.com"
ssh_pub:
- !unsafe >
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIII4FS2sROKs2nIW8uzDuWmj8q127UoljtkVMthY8g// jens@work-lenovo
sudo: yes
docker: yes
# Data for DD24 dyndns updates
dyndns_domain: "dyndns.n39.eu"

View file

@ -8,6 +8,7 @@
roles:
tasks:
- name: Install packages needed for the system
# This is a list of all packages,
@ -15,9 +16,15 @@
ansible.builtin.apt:
state: present
name:
- mosquitto-clients
- fbi
# This is needed for the user-executed tasks
- acl
# Regular packages
- lightdm
- accountsservice
- unclutter
- lxde
- chromium-browser
- rng-tools
- name: Remove the screensavers
ansible.builtin.apt:
@ -46,23 +53,50 @@
### Kiosk setup
#
# https://github.com/grafana/grafana-kiosk
- name: Ensure kiosk user is there
ansible.builtin.user:
name: "{{ kiosk_user }}"
groups: audio,plugdev,input,netdev
append: yes
- name: Install Kiosk script
ansible.builtin.template:
src: templates/hobbes/kiosk.sh.j2
dest: /usr/local/bin/kiosk.sh
owner: root
group: root
- name: Create bin directory
file:
path: "/home/{{ kiosk_user }}/bin"
owner: "{{ kiosk_user }}"
mode: '0755'
state: directory
- name: Download grafana-kiosk
ansible.builtin.get_url:
url: "https://github.com/grafana/grafana-kiosk/releases/download/{{ kiosk_software_version }}/grafana-kiosk.linux.{{ kiosk_software_arch }}"
dest: "/home/{{ kiosk_user }}/bin/grafana-kiosk"
mode: '0755'
force: no
- name: Setup autologin in lightdm
ansible.builtin.blockinfile:
path: /etc/lightdm/lightdm.conf
block: |
[Seat:seat0]
autologin-user = pi
autologin-user-timeout = 0
autologin-in-background = False
- name: Remove autostart
# None of the things in autostart are needed or wanted
ansible.builtin.file:
path: /etc/xdg/lxsession/LXDE/autostart
state: absent
- name: Add systemd service
ansible.builtin.copy:
src: files/hobbes/grafana-kiosk.service
ansible.builtin.template:
src: templates/hobbes/grafana-kiosk.service.j2
dest: /etc/systemd/system/grafana-kiosk.service
owner: root
group: root
@ -74,4 +108,12 @@
enabled: true
state: started
- name: Set default systemd target to graphical
ansible.builtin.file:
src: /lib/systemd/system/graphical.target
dest: /etc/systemd/system/default.target
state: link
force: yes
handlers:

View file

@ -15,7 +15,7 @@
- name: entities-validation.svc.n39.eu
- server: pottwal
hosts:
- name: git.n39.eu
- name: gitea.n39.eu
- name: redmine.n39.eu
- name: uritools.n39.eu
- name: uritools-api.n39.eu
@ -37,5 +37,5 @@
local: true
- name: pwr-meter-pulse-gw-19i.svc.n39.eu
local: true
- name: labelprinter.n39.eu
- name: brotherql-web.n39.eu
local: true

View file

@ -9,6 +9,7 @@
docker_ip_ranges: ["172.16.0.0/12", "192.168.0.0/16"]
openldap_image_version: 1.5.0
openldap_data: "{{ data_dir }}/openldap"
openldap_domain: "ldap.n39.eu"
ldap_domain: "netz39.de"
@ -27,7 +28,7 @@
dehydrated_contact_email: "{{ server_admin }}"
dehydrated_domains:
- name: entities-validation.svc.n39.eu
- role: ansible-role-dehydrated_cron
- role: penguineer.dehydrated_cron
tasks:
@ -51,7 +52,7 @@
- name: Ensure container for openLDAP is running.
docker_container:
name: openLDAP
image: osixia/openldap:1.5.0
image: "osixia/openldap:{{ openldap_image_version }}"
detach: yes
state: started
restart_policy: unless-stopped
@ -116,7 +117,7 @@
- name: Ensure container for entities validation service is running
docker_container:
name: entities_validation_svc
image: netz39/entities_validation_svc:v1.0.1
image: netz39/entities_validation_svc:v1.0.0
pull: true
state: started
detach: yes

View file

@ -204,6 +204,7 @@
become: yes
become_user: "{{ gatekeeper_user }}"
ansible.builtin.shell:
warn: false
chdir: "/home/{{ gatekeeper_user }}/mqtt-tools"
cmd: |
mkdir build

View file

@ -1,14 +0,0 @@
---
- hosts: plumbum.n39.eu
become: true
roles:
- role: nfs-host
vars:
nfs_host_exports:
- directory: "/srv/nfs/backup"
hosts: "*.n39.eu"
options: rw,sync,no_subtree_check,no_root_squash
- directory: "/srv/nfs/ephemeral"
hosts: "*.n39.eu"
options: rw,sync,no_subtree_check,no_root_squash

View file

@ -10,22 +10,22 @@
vars:
dehydrated_contact_email: "{{ server_admin }}"
dehydrated_domains:
- name: "{{ forgejo_domain_name }}"
- name: gitea.n39.eu
- name: uritools.n39.eu
- name: uritools-api.n39.eu
- name: "{{ shlink_domain_name }}"
- name: "{{ hedgedoc_domain_name }}"
- name: pad.n39.eu
- name: "{{ prosody_domain_name }}"
alternate_names:
- conference.jabber.n39.eu
deploy_cert_hook: "docker exec prosody prosodyctl --root cert import ${DOMAIN} /var/lib/dehydrated/certs"
- name: "{{ redmine_domain_name }}"
- name: redmine.n39.eu
- name: "{{ influxdb_domain_name }}"
- name: "{{ uptimekuma_domain_name }}"
- name: uptime.n39.eu
- name: "{{ grafana_domain_name }}"
- name: "{{ homebox_domain_name }}"
- name: spaceapi.n39.eu
- role: ansible-role-dehydrated_cron
- role: penguineer.dehydrated_cron
- role: dd24_dyndns_cron
# variables are set in the inventory
- role: cleanuri
@ -42,53 +42,49 @@
ansible.builtin.stat:
path: "{{ data_dir }}/forgejo"
register: forgejo_dir
tags: ["forgejo"]
- name: Fail if forgejo data dir does not exist
ansible.builtin.fail:
msg: "Forgejo data dir is missing, please restore from the backup!"
when: not forgejo_dir.stat.exists
tags: ["forgejo"]
# If port 2222 is changed here, it must also be adapted
# in the forgejo config file (see application volume)!!
- name: Setup the docker container for forgejo
- name: Setup the docker container for gitea
docker_container:
name: forgejo
image: "{{ forgejo_image }}"
image: "codeberg.org/forgejo/forgejo:1.19"
pull: true
state: started
restart_policy: unless-stopped
detach: yes
ports:
- 127.0.0.1:{{ forgejo_host_port }}:3000
- "{{ forgejo_ssh_port }}:2222"
- 2222:2222
env:
TZ: "{{ timezone }}"
APP_NAME: "Netz39 Git"
RUN_MODE: "prod"
SSH_DOMAIN: "{{ forgejo_domain_name }}"
SSH_DOMAIN: "gitea.n39.eu"
SSH_PORT: "2222"
SSH_START_SERVER: "false"
ROOT_URL: "https://{{ forgejo_domain_name }}"
ROOT_URL: "https://gitea.n39.eu"
DISABLE_REGISTRATION: "true"
USER_UID: "1000"
USER_GID: "1000"
volumes:
- "{{ data_dir }}/forgejo:/data:rw"
tags: ["forgejo"]
- name: Setup proxy site "{{ forgejo_domain_name }}"
- name: Setup proxy site gitea.n39.eu
include_role:
name: setup_http_site_proxy
vars:
site_name: "{{ forgejo_domain_name }}"
site_name: "gitea.n39.eu"
proxy_port: "{{ forgejo_host_port }}"
tags: ["forgejo"]
- name: Ensure apt-cacher container is running
docker_container:
name: apt_cacher_ng
image: mrtux/apt-cacher-ng:latest
image: "mrtux/apt-cacher-ng"
pull: true
state: started
restart_policy: unless-stopped
@ -98,46 +94,11 @@
env:
TZ: "{{ timezone }}"
- name: Setup docker network
docker_network:
name: shlinknet
state: present
internal: true
tags:
- shlink
- name: Ensure shlink data dir exists
ansible.builtin.file:
path: "{{ data_dir }}/shlink/data/database"
state: directory
mode: 0755
tags:
- shlink
- name: Ensure shlink database container is running
docker_container:
name: shlinkdb
image: postgres:16.3-alpine
pull: true
state: started
restart_policy: unless-stopped
detach: yes
env:
TZ: "{{ timezone }}"
POSTGRES_USER: "shlink"
POSTGRES_PASSWORD: "{{ shlink_postgres_password }}"
POSTGRES_DB: "shlink"
volumes:
- "{{ data_dir }}/shlink/data/database:/var/lib/postgresql/data"
networks:
- name: shlinknet
tags:
- shlink
- name: Ensure container for shlink is running
docker_container:
name: shlink
image: "{{ shlink_image }}"
image: shlinkio/shlink:2.6.2
pull: true
state: started
detach: yes
@ -146,22 +107,9 @@
restart_policy: unless-stopped
env:
TZ: "{{ timezone }}"
DEFAULT_DOMAIN: "{{ shlink_domain_name }}"
INITIAL_API_KEY: "{{ shlink_initial_api_key }}"
DB_DRIVER: "postgres"
DB_HOST: shlinkdb
DB_NAME: "shlink"
DB_USER: "shlink"
DB_PASSWORD: "{{ shlink_postgres_password }}"
volumes:
- "{{ data_dir }}/shlink/database.sqlite:/etc/shlink/datadatabase.sqlite:rw"
networks_cli_compatible: false
comparisons:
networks: allow_more_present
networks:
- name: shlinknet
tags:
- shlink
SHORT_DOMAIN_HOST: "{{ shlink_domain_name }}"
SHORT_DOMAIN_SCHEMA: https
GEOLITE_LICENSE_KEY: "{{ shlink_geolite_license_key }}"
- name: Setup proxy site {{ shlink_domain_name }}
include_role:
@ -169,8 +117,6 @@
vars:
site_name: "{{ shlink_domain_name }}"
proxy_port: "{{ shlink_host_port }}"
tags:
- shlink
- name: Check if prosody data dir exists
ansible.builtin.stat:
@ -234,7 +180,7 @@
- name: Ensure container for prosody XMPP server is running
docker_container:
name: prosody
image: "{{ prosody_image }}"
image: netz39/prosody:0.11
pull: true
state: started
detach: true
@ -256,7 +202,7 @@
- name: Ensure container for static XMPP website is running
docker_container:
name: jabber-static-website
image: "{{ prosody_web_image }}"
image: joseluisq/static-web-server:2.14
pull: true
state: started
detach: true
@ -269,8 +215,6 @@
- "127.0.0.1:{{ jabber_host_port }}:80"
volumes:
- "{{ prosody_data_dir }}/var/www:/public:ro"
tags:
- prosody-web
- name: Setup proxy site {{ prosody_domain_name }}
# point to static website for now
@ -279,21 +223,15 @@
vars:
site_name: "{{ prosody_domain_name }}"
proxy_port: "{{ jabber_host_port }}"
tags:
- prosody-web
- name: Check if hedgedoc data dir exists
ansible.builtin.stat:
path: "{{ data_dir }}/hedgedoc"
register: hedgedoc_dir
tags:
- hedgedoc
- name: Fail if hedgedoc data dir does not exist
ansible.builtin.fail:
msg: "hedgedoc data dir is missing, please restore from the backup!"
when: not hedgedoc_dir.stat.exists
tags:
- hedgedoc
- name: Ensure the hedgedoc directories exist
file:
@ -305,21 +243,17 @@
mode: "0700"
- path: "{{ data_dir }}/hedgedoc/data/uploads"
mode: "0755"
tags:
- hedgedoc
- name: Setup docker network
docker_network:
name: hedgedocnet
state: present
internal: true
tags:
- hedgedoc
- name: Install HedgeDoc database container
docker_container:
name: hedgedocdb
image: "{{ hedgedoc_db_image }}"
image: "postgres:11.6-alpine"
pull: true
state: started
restart_policy: unless-stopped
@ -333,8 +267,6 @@
- "{{ data_dir }}/hedgedoc/data/database:/var/lib/postgresql/data"
networks:
- name: hedgedocnet
tags:
- hedgedoc
- name: Ensure container for hedgedoc is running
docker_container:
@ -350,7 +282,7 @@
TZ: "{{ timezone }}"
NODE_ENV: "production"
CMD_PROTOCOL_USESSL: "true"
CMD_DOMAIN: "{{ hedgedoc_domain_name }}"
CMD_DOMAIN: "pad.n39.eu"
CMD_URL_ADDPORT: "false"
CMD_DB_HOST: "hedgedocdb"
CMD_DB_PORT: "5432"
@ -360,22 +292,15 @@
CMD_DB_PASSWORD: "{{ hedgedoc_postgres_password }}"
volumes:
- "{{ data_dir }}/hedgedoc/data/uploads:/hedgedoc/public/uploads"
networks_cli_compatible: false
comparisons:
networks: allow_more_present
networks:
- name: hedgedocnet
tags:
- hedgedoc
- name: Setup proxy site "{{ hedgedoc_domain_name }}"
- name: Setup proxy site pad.n39.eu
include_role:
name: setup_http_site_proxy
vars:
site_name: "{{ hedgedoc_domain_name }}"
site_name: pad.n39.eu
proxy_port: "{{ hedgedoc_host_port }}"
tags:
- hedgedoc
- name: Ensure the influxdb directories exist
file:
@ -423,22 +348,16 @@
ansible.builtin.stat:
path: "{{ data_dir }}/redmine"
register: redmine_dir
tags:
- redmine
- name: Fail if redmine data dir does not exist
ansible.builtin.fail:
msg: "Redmine data dir is missing, please restore from the backup!"
when: not redmine_dir.stat.exists
tags:
- redmine
- name: Setup Redmine docker network
docker_network:
name: redminenet
state: present
internal: true
tags:
- redmine
- name: Setup Redmine MySQL container
docker_container:
@ -454,11 +373,8 @@
MYSQL_DATABASE: "{{ redmine_database }}"
volumes:
- "{{ data_dir }}/redmine/mysql:/var/lib/mysql"
- "{{ data_dir }}/redmine/mysql-config:/etc/mysql/conf.d"
networks:
- name: redminenet
tags:
- redmine
- name: Setup Redmine container
docker_container:
@ -478,22 +394,15 @@
- "{{ data_dir }}/redmine/configuration.yml:/usr/src/redmine/config/configuration.yml"
- "{{ data_dir }}/redmine/files:/usr/src/redmine/files"
- "{{ data_dir }}/redmine/themes:/usr/src/redmine/public/themes"
networks_cli_compatible: false
comparisons:
networks: allow_more_present
networks:
- name: redminenet
tags:
- redmine
- name: Setup proxy site "{{ redmine_domain_name }}"
- name: Setup proxy site redmine.n39.eu
include_role:
name: setup_http_site_proxy
vars:
site_name: "{{ redmine_domain_name }}"
site_name: redmine.n39.eu
proxy_port: "{{ redmine_host_port }}"
tags:
- redmine
- name: Ensure the uptime-kuma directories exist
file:
@ -502,13 +411,11 @@
state: directory
with_items:
- "{{ data_dir }}/uptime-kuma"
tags:
- uptimekuma
- name: Ensure container for uptime-kuma is running
docker_container:
name: uptime-kuma
image: "{{ uptimekuma_image }}"
image: "louislam/uptime-kuma:1"
pull: true
state: started
detach: yes
@ -519,17 +426,13 @@
TZ: "{{ timezone }}"
volumes:
- "{{ data_dir }}/uptime-kuma:/app/data"
tags:
- uptimekuma
- name: Setup proxy site "{{ uptimekuma_domain_name }}"
- name: Setup proxy site uptime.n39.eu
include_role:
name: setup_http_site_proxy
vars:
site_name: "{{ uptimekuma_domain_name }}"
site_name: uptime.n39.eu
proxy_port: "{{ uptimekuma_host_port }}"
tags:
- uptimekuma
- name: Ensure the grafana directories exist
file:
@ -545,13 +448,11 @@
mode: "0755"
- path: "{{ data_dir }}/grafana/etc"
mode: "0755"
tags:
- grafana
- name: Ensure container for grafana is running
docker_container:
name: grafana
image: "{{ grafana_image }}"
image: "grafana/grafana:9.4.7"
pull: true
state: started
detach: yes
@ -566,18 +467,14 @@
GF_SECURITY_ADMIN_PASSWORD: "{{ grafana_admin_password }}"
GF_USERS_ALLOW_SIGN_UP: "false"
GF_INSTALL_PLUGINS: "flant-statusmap-panel,ae3e-plotly-panel"
tags:
- grafana
- name: Setup proxy site "{{ grafana_domain_name }}"
- name: Setup proxy site grafana.n39.eu
include_role:
name: setup_http_site_proxy
vars:
site_name: "{{ grafana_domain_name }}"
proxy_port: "{{ grafana_host_port }}"
proxy_preserve_host: "On"
tags:
- grafana
- name: Ensure the homebox directories exist
file:
@ -590,13 +487,11 @@
mode: "0755"
- path: "{{ data_dir }}/homebox/data"
mode: "0755"
tags:
- homebox
- name: Ensure container for homebox is running
docker_container:
name: homebox
image: "{{ homebox_image }}"
image: "ghcr.io/hay-kot/homebox"
pull: true
state: started
detach: yes
@ -610,8 +505,6 @@
HBOX_LOG_LEVEL: "info"
HBOX_LOG_FORMAT: "text"
HBOX_WEB_MAX_UPLOAD_SIZE: "10"
tags:
- homebox
- name: Setup proxy site {{ homebox_domain_name }}
include_role:
@ -620,8 +513,6 @@
site_name: "{{ homebox_domain_name }}"
proxy_port: "{{ homebox_host_port }}"
proxy_preserve_host: "On"
tags:
- homebox
- name: Setup proxy site spaceapi.n39.eu
template:
@ -632,15 +523,8 @@
site_name: "spaceapi.n39.eu"
proxy_preserve_host: "On"
notify: Restart apache2
- name: Ensure renovate bot cronjob is present
ansible.builtin.template:
src: templates/pottwal/renovate-cron.j2
dest: /etc/cron.hourly/renovate-bot
mode: "0700"
notify: reload cron
tags:
- renovate
- dev
handlers:
- name: Restart prosody
@ -654,10 +538,3 @@
name: apache2
state: restarted
- name: reload cron
ansible.builtin.shell:
cmd: service cron reload
# Use the shell call because the task sometimes has problems finding the service state
# service:
# name: cron
# state: restarted

View file

@ -7,18 +7,19 @@
data_dir: "/srv/data"
mosquitto_image: eclipse-mosquitto:2.0.18
mosquitto_image: eclipse-mosquitto:2.0.14
mosquitto_data: "{{ data_dir }}/mosquitto"
nodered_image: nodered/node-red:3.0.1-1-18
nodered_data: "{{ data_dir }}/nodered"
rabbitmq_image: bitnami/rabbitmq:3.13.6
rabbitmq_image: "bitnami/rabbitmq:3.10.7"
rabbitmq_data: "{{ data_dir }}/rabbitmq"
pwr_meter_pulse_gw_image: netz39/power-meter-pulse-gateway:0.3.0
pwr_meter_pulse_gw_image: "netz39/power-meter-pulse-gateway:0.3.0"
brotherql_host_port: 9004
brotherql_web_image: "pklaus/brother_ql_web:alpine_9e20b6d"
roles:
# role 'docker_setup' applied through group 'docker_host'
@ -32,7 +33,7 @@
- name: rabbitmq.n39.eu
- name: pwr-meter-pulse-gw-19i.svc.n39.eu
- name: labelprinter.n39.eu
- role: ansible-role-dehydrated_cron
- role: penguineer.dehydrated_cron
tasks:
@ -45,8 +46,6 @@
- "{{ mosquitto_data }}/config"
- "{{ mosquitto_data }}/data"
- "{{ mosquitto_data }}/log"
tags:
- mosquitto
- name: Make sure mosquitto config is there
template:
@ -54,8 +53,6 @@
dest: "{{ mosquitto_data }}/config/mosquitto.conf"
mode: 0644
notify: restart mosquitto
tags:
- mosquitto
- name: Ensure mosquitto is running
docker_container:
@ -75,8 +72,6 @@
restart_policy: unless-stopped
env:
TZ: "{{ timezone }}"
tags:
- mosquitto
- name: Check if nodered data dir exists
ansible.builtin.stat:
@ -119,14 +114,10 @@
ansible.builtin.stat:
path: "{{ rabbitmq_data }}"
register: rabbitmq_dir
tags:
- rabbitmq
- name: Fail if rabbitmq data dir does not exist
ansible.builtin.fail:
msg: "RabbitMQ data dir is missing, please restore from the backup!"
when: not rabbitmq_dir.stat.exists
tags:
- rabbitmq
- name: Ensure rabbitmq docker container is running
docker_container:
@ -146,8 +137,6 @@
- "{{ rabbitmq_data }}/bitnami:/bitnami:rw"
- "{{ rabbitmq_data }}/etc_rabbitmq:/etc/rabbitmq:rw"
restart_policy: unless-stopped
tags:
- rabbitmq
- name: Setup proxy site rabbitmq.n39.eu
include_role:
@ -155,8 +144,7 @@
vars:
site_name: "rabbitmq.n39.eu"
proxy_port: 15672
tags:
- rabbitmq
- name: Ensure Power Meter Pulse Gateway for 19i room is running
docker_container:
@ -187,18 +175,16 @@
- name: Setup docker container for BrotherQL Web UI printer
docker_container:
name: brotherql-web
image: dersimn/brother_ql_web:2.1.9-alpine
image: "{{ brotherql_web_image }}"
pull: true
restart_policy: unless-stopped
state: started
ports:
- "127.0.0.1:{{ brotherql_host_port }}:8013"
command: "--default-label-size 62 --model QL-720NW tcp://{{ brotherql_printer_host }}"
command: " ./brother_ql_web.py --model QL-720NW tcp://{{ brotherql_printer_ip }}"
detach: yes
env:
TZ: "{{ timezone }}"
tags:
- labelprinter
- name: Setup proxy site labelprinter.n39.eu
include_role:
@ -206,24 +192,7 @@
vars:
site_name: labelprinter.n39.eu
proxy_port: "{{ brotherql_host_port }}"
tags:
- labelprinter
- name: Setup docker container for Grafana Screenshots
docker_container:
name: grafana-screenshot
image: mrtux/grafana-screenshot:0.1.1
pull: true
restart_policy: unless-stopped
detach: yes
env:
MQTT_BROKER_URL: "{{ kiosk_mqtt_host }}"
MQTT_TOPIC: "{{ kiosk_mqtt_topic }}"
GRAFANA_DASHBOARD_URL: "{{ kiosk_grafana_url }}"
GRAFANA_USERNAME: "{{ kiosk_grafana_user }}"
GRAFANA_PASSWORD: "{{ kiosk_grafana_pass }}"
tags:
- grafana-screenshot
handlers:
- name: restart mosquitto

View file

@ -14,14 +14,14 @@
dokuwiki_port: 9005
# This container is pinned, because there are issues
# with backwards compatibility within the same tag!
dokuwiki_image: bitnami/dokuwiki:20240206.1.0
dokuwiki_image: "bitnami/dokuwiki:20220731@sha256:989ab52cf2d2e0f84166e114ca4ce88f59546b8f6d34958905f8d81c18cbd759"
discord_invite_domain: discord.netz39.de
roles:
# role 'docker_setup' applied through group 'docker_host'
- role: apache
- role: ansible-role-dehydrated_cron
- role: penguineer.dehydrated_cron
tasks:
- name: Setup docker network
@ -61,7 +61,7 @@
docker_container:
name: phpmyadmin
state: started
image: phpmyadmin:5.2
image: phpmyadmin:5.0
networks_cli_compatible: true
networks:
- name: dockernet
@ -100,7 +100,7 @@
- name: Setup Docker Registry Container
docker_container:
name: registry
image: registry:2
image: "registry:2"
pull: true
state: started
restart_policy: unless-stopped
@ -128,22 +128,16 @@
ansible.builtin.stat:
path: "{{ data_dir }}/dokuwiki"
register: dokuwiki_dir
tags:
- dokuwiki
- name: Fail if Dokuwiki data dir does not exist
ansible.builtin.fail:
msg: "Dokuwiki data dir is missing, please restore from the backup!"
when: not dokuwiki_dir.stat.exists
tags:
- dokuwiki
- name: Set correct user for Dokuwiki data
ansible.builtin.file:
path: "{{ data_dir }}/dokuwiki"
owner: "1001" # According to container config
recurse: yes
tags:
- dokuwiki
- name: Setup Dokuwiki Container
docker_container:
@ -161,8 +155,6 @@
- "{{ data_dir }}/dokuwiki:/bitnami/dokuwiki:rw"
env:
TZ: "{{ timezone }}"
tags:
- dokuwiki
- name: Setup proxy site for Dokuwiki
include_role:
@ -170,14 +162,12 @@
vars:
site_name: "{{ dokuwiki_domain }}"
proxy_port: "{{ dokuwiki_port }}"
tags:
- dokuwiki
- name: Setup container for secondary FFMD DNS
docker_container:
name: bind9-md-freifunk-net
image: ffmd/bind9-md-freifunk-net:v2022122301
image: "ffmd/bind9-md-freifunk-net:2022111601"
pull: true
state: started
restart_policy: unless-stopped
@ -186,8 +176,6 @@
- "53:53/udp"
env:
TZ: "{{ timezone }}"
tags:
- ffmd-dns
- name: Setup forwarding for Discord invite
include_role:
@ -196,3 +184,4 @@
site_name: "{{ discord_invite_domain }}"
# forward_to: "https://discord.com/invite/8FcDvAf"
forward_to: "https://sl.n39.eu/discord"

View file

@ -13,7 +13,7 @@
- name: Setup the docker container for unifi-controller
docker_container:
name: unifi-controller
image: jacobalberty/unifi:v8.1.113
image: "jacobalberty/unifi:v7.1.65"
state: started
restart_policy: unless-stopped
container_default_behavior: no_defaults
@ -28,7 +28,6 @@
- "8880:8880/tcp" # HTTP portal
- "3478:3478/udp" # STUN service
- "6789:6789/tcp" # Speed Test (unifi5 only)
- "10001:10001/udp" # Used for device discovery.
volumes:
- "{{ data_dir }}/unifi-controller/data:/unifi/data"
- "{{ data_dir }}/unifi-controller/log:/unifi/log"

View file

@ -0,0 +1,310 @@
---
ios_interfaces:
GigabitEthernet1/0/1:
description: MGMT
enabled: true
l2:
mode: access
access_vlan: 1
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/2:
description: MGMT
enabled: true
l2:
mode: access
access_vlan: 1
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/3:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/4:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/5:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/6:
description: USV
enabled: true
l2:
mode: access
access_vlan: 1
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/7:
description: beaker
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: all
trunk_native_vlan: 1
state: present
lines: []
state: present
GigabitEthernet1/0/8:
description: beaker
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: all
trunk_native_vlan: 1
state: present
lines: []
state: present
GigabitEthernet1/0/9:
description: beaker ipmi
enabled: true
l2:
mode: access
access_vlan: 1
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/10:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/11:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/12:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/13:
description: patchfeld 1 - Switch an Ausleihliste
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: 1,4,5,7,8,11
trunk_native_vlan: 4
state: present
lines: []
state: present
GigabitEthernet1/0/14:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/15:
description: patchfeld 2 - Raspberry Pi Platon
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/16:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/17:
description: patchfeld 6 - Access Point Hempels Zimmer
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: 1,4,5,7,8,11
trunk_native_vlan: 4
state: present
lines: []
state: present
GigabitEthernet1/0/18:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/19:
description: FräsPC
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/20:
description: patchfeld 4 - Switch am Basteltisch
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: 1,4,5,7,8,11
trunk_native_vlan: 4
state: present
lines: []
state: present
GigabitEthernet1/0/21:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/22:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/23:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/24:
description: lan
enabled: true
l2:
mode: access
access_vlan: 4
trunk_allowed_vlans:
trunk_native_vlan:
state: present
lines: []
state: present
GigabitEthernet1/0/25:
description: uplink von descartes
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: all
trunk_native_vlan: 1
state: present
lines: []
state: present
GigabitEthernet1/0/26:
description: marx
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: all
trunk_native_vlan: 1
state: present
lines: []
state: present
GigabitEthernet1/0/27:
description: unconfigured
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: all
trunk_native_vlan: 1
state: present
lines: []
state: present
GigabitEthernet1/0/28:
description: unconfigured
enabled: true
l2:
mode: trunk
access_vlan:
trunk_allowed_vlans: all
trunk_native_vlan: 1
state: present
lines: []
state: present

View file

@ -0,0 +1,7 @@
---
ansible_host: 172.23.63.45
ansible_user: admin
ansible_password: "{{ vault_ansible_password }}"
ansible_become_password: "{{ vault_ansible_password }}"
ansible_connection: network_cli
ansible_network_os: ios

View file

@ -0,0 +1,8 @@
$ANSIBLE_VAULT;1.1;AES256
64656665316637386363313263653532393161656531336262613266363231373537396633386231
3834356536353235356630333766396233626130653237380a396137336262376539373838623762
35396361643263313239386637653330646134616333333961306537306464626461626462626665
3763386531316433390a323533353531363335306663343632326562663334303466663664363530
38613135333336656131646534633839386330323164643338333763373839306566656633633161
62643964343763316264376366636562316336616665663865336633373266353931366336346666
616135333836343436633136636163656138

View file

@ -0,0 +1,23 @@
---
ios_vlans:
1:
name: MGMT
state: present
4:
name: lan
state: present
5:
name: wan
state: present
7:
name: service
state: present
8:
name: legacy
state: present
9:
name: dmz
state: present
11:
name: ffmd-client
state: present

View file

@ -2,6 +2,8 @@
server_admin: "admin+hobbes@netz39.de"
mac: "b8:27:eb:f9:43:43"
kiosk_user: pi
kiosk_mqtt_host: "mqtt.n39.eu"
kiosk_mqtt_topic: "Netz39/Things/HackingDashboard/Screenshot"
docker_data_root: "/srv/docker"
kiosk_software_version: v1.0.6
kiosk_software_arch: "armv7"
kiosk_url: "https://grafana.n39.eu/d/xpLj6UD4z/hobbes-space-monitor?orgId=1"
kiosk_grafana_user: "{{ vault_kiosk_grafana_user }}"
kiosk_grafana_pass: "{{ vault_kiosk_grafana_pass }}"

View file

@ -1,6 +1,9 @@
$ANSIBLE_VAULT;1.1;AES256
32343732363234396136616164383833316634373061376235656566303761646461626138363432
3264633461383739393138396233303839363132343866370a306433306364666438623434383036
63366634313937623736393636393030333961323335323762663538373631353331353162613362
3661653539306365350a333263383537643738373939376563356566313732613766303931633630
6462
32313738636231313036633334333934643839636563646334336533316436653263623461643438
6362343635626266313466643465343962663931623662320a316635613231313930343937363064
33326164333137633039376363643539346463303934333430626431336637326638363233333234
3132333533376134380a383837616331303536623665383735663531343538366332313236386137
62306436663934383363616332316262313762633261396535663533636665633532316366386430
65343830376634633365343337313433643465323662313563366463393664653766623338623635
30653263303761316238396634346337636461643231303561353133643162633934323161663539
66646364373034633334

View file

@ -1,3 +0,0 @@
---
server_admin: "admin+plumbum@netz39.de"
mac: "32:A3:94:A0:23:77"

View file

@ -9,57 +9,38 @@ cleanuri_amqp_user: "cleanuri"
cleanuri_amqp_pass: "{{ vault_cleanuri_amqp_pass }}"
cleanuri_amqp_vhost: "/cleanuri"
forgejo_host_port: 9091
forgejo_ssh_port: 2222
forgejo_domain_name: git.n39.eu
forgejo_image: codeberg.org/forgejo/forgejo:1.21.11-0
shlink_host_port: 8083
shlink_domain_name: sl.n39.eu
shlink_image: shlinkio/shlink:4.1.1
shlink_initial_api_key: "{{ vault_shlink_initial_api_key }}"
shlink_postgres_password: "{{ vault_shlink_postgres_password }}"
shlink_geolite_license_key: "{{ vault_shlink_geolite_license_key }}"
hedgedoc_host_port: 8084
hedgedoc_domain_name: pad.n39.eu
hedgedoc_image: quay.io/hedgedoc/hedgedoc:1.9.9
hedgedoc_db_image: postgres:16.3-alpine
hedgedoc_image: quay.io/hedgedoc/hedgedoc:1.9.3
hedgedoc_postgres_password: "{{ vault_hedgedoc_postgres_password }}"
redmine_host_port: 8087
redmine_domain_name: redmine.n39.eu
redmine_image: redmine:5.1.3
redmine_mysql_image: mysql:8.4
redmine_image: redmine:4.2.7
redmine_mysql_image: mysql:5.7
redmine_database: redmine
redmine_database_password: "{{ vault_redmine_database_password }}"
influxdb_host_port: 8088
influxdb_domain_name: influx.n39.eu
influxdb_image: influxdb:2.7-alpine
influxdb_image: influxdb:2.4-alpine
influxdb_init_username: admin
influxdb_init_password: "{{ vault_influxdb_init_password }}"
jabber_host_port: 8086
prosody_domain_name: jabber.n39.eu
prosody_image: netz39/prosody:0.11
prosody_web_image: joseluisq/static-web-server:2.32
forgejo_host_port: 9091
prosody_config_dir: "/etc/prosody"
prosody_data_dir: "{{ data_dir }}/prosody"
prosody_domain_name: jabber.n39.eu
jabber_host_port: 8086
uptimekuma_host_port: 8085
uptimekuma_domain_name: uptime.n39.eu
uptimekuma_image: louislam/uptime-kuma:1.23.13
grafana_host_port: 8089
grafana_domain_name: grafana.n39.eu
grafana_image: grafana/grafana:10.4.5
grafana_admin_password: "{{ vault_grafana_admin_password }}"
homebox_host_port: 8092
homebox_domain_name: inventory.n39.eu
homebox_image: ghcr.io/hay-kot/homebox:v0.10.3
renovate_image: renovate/renovate:38.17.1
renovate_forgejo_pat: "{{ vault_renovate_forgejo_pat }}"
renovate_github_pat: "{{ vault_renovate_github_pat }}"
renovate_git_user: "Renovate Bot <accounts+renovatebot@netz39.de>"

View file

@ -1,33 +1,20 @@
$ANSIBLE_VAULT;1.1;AES256
61323135656430613464613334653239613865623361363734306139383261653563373365306364
3232353634356664323235393135653762383538353635660a363461393133376566613064366233
32323065633164646535386461373261373461343961383333333063663831353961656265313836
6231356666356266390a333230376264313537376461326331313134313737616137636465336430
38616261333534393464343630616464326331653163616435613863616165633730353263656565
66346536393737353962666438333661663636636339613633653363323438326635643738656430
38313635323066376532396666653633393736633939306566333337336635386430373662666534
64653662333832313964323039353838353638313337306631613564383933663166633164373132
33326537366135613733386436663366613238636133343065376534323561656265613433336637
64613330306530323238663738356133663166303730633735656562636139626232396130656337
34323238326437303730643736646430646239383239613061333033343733663832656262383732
66343236326537633539353230376365666462393264303532346431383838303963613731343263
63656630623934643763636237366630386333646263336261386162656439323232653066393266
36633239323638396639623734666466343164663539316165386632306235363435303139356433
37633731366565393339326235616264616535363461653531613331356239666534653232376235
36623431343136633964656330313833643161353738303564663662363062653631363661633333
31663339643034333336313630356266393062323637333664646335363961386433303662343734
32313338613064373966393163623863633037353564316361656162323234313435646532343231
30356336626435306332316566323932313564626164316165646530656365363330643033376134
32363530306536633531326535373136326364356237376264646130663430343838323834386264
35306561353866346430393837346333396236356465666334656139373764653365396534613034
36393239623930656266336130303236393336373063623738653939393563336130316461393535
32313932396263306439356663373361393539633639343238393631343830306532336162616565
32336264646333613238363065613130633966656164666333303332313536616334623639613630
34323665366131663736623638636263616131393133346464653037366465633332363131316332
65356563373036353432376234626262313266316435656562646365363539386361653966366465
39383536313764663732613462383466616238363765633062333830373038656334363764643663
61346664353064333238313038303333386436653738316630383237366532353765346633383862
65666235666663666638656337303762626563663135613431616439633731383638653466623434
62663164633032666638656464666130623566356636343330386236336266386263323936396330
31613339623034663466613930613062343666633530306136623734393862333365646538326261
63646439343565366463
35323634353263613464653863633861303539636238333464653633653164353632306233626231
3766386232326433383932636136313831346131336335360a383639643334613033336134373566
36343465336365363732363931383031356532336235313537396338316465366537313032616261
6634393966623662390a383335316661613332633165333936396335666637306163363133323363
33613639306537396136643438623937363336376332353634333130313434623433303264393461
38663337396465343937396438333261393463303866306234323138396563653837373334356239
64653231633066656662306530656139316530316263356135363538303061646432353338323237
66663161333133313762366261343434666238376537636433313461343065646565633130333061
33376537316338666662643639623637396366336263656537326363663936616234343235373636
33373039373033333533363366356435633863633434643963633664613238363961363733366437
61353936613065303230616239646334313130636133653461663561303037383663643761376235
33303661663063613635306438613738613064386466656430343130356131663262353239326334
63323630333466356263646162336437646133616565353430313737313332363330663236383830
33366138303665386561393136616238346335633163313330386434323239623736333562363862
66636165373264353132626232633537613536303362366535653438303261323735666331363439
61613939373333616364303134353437333965386239623933393932373434666234373736316166
63373935356162326230653437643030313262373965353831396361646136663938336334646633
65313166613131396665393363633166663137363564393063363330366364373936643831373030
333465303435636163616236306264646666

View file

@ -3,12 +3,5 @@ server_admin: "admin+radon@netz39.de"
pwr_meter_amqp_user: "pwr-meter"
pwr_meter_amqp_pass: "{{ vault_pwr_meter_amqp_pass }}"
pwr_meter_api_token: "{{ vault_pwr_meter_api_token }}"
brotherql_printer_host: "brotherql-720nw.n39.eu"
# URL for the grafana kiosk in our Bastelbereich
kiosk_grafana_url: "https://grafana.n39.eu/d/xpLj6UD4z/hobbes-space-monitor?orgId=1"
kiosk_grafana_user: "{{ vault_kiosk_grafana_user }}"
kiosk_grafana_pass: "{{ vault_kiosk_grafana_pass }}"
kiosk_mqtt_host: "mqtt.n39.eu"
kiosk_mqtt_topic: "Netz39/Things/HackingDashboard/Screenshot"
# See https://gitea.n39.eu/Netz39_Admin/config.descartes/src/branch/live/dns_dhcp.txt
brotherql_printer_ip: "172.23.48.53"

View file

@ -1,14 +1,10 @@
$ANSIBLE_VAULT;1.1;AES256
64396666646432653766656333333139613631333035393137363036633330336134383932663631
6533326532333366633136346232306637306266343264380a616164643037393036383834313238
32343437343466343262336137633436343935663465616364303961656565643134346563373461
3337303534646563660a366562323065666630626331346266366139653533386238663361373930
30623733336361353838373730316537623066326166366634643466386332396333613531613564
38373363303466346639343563356339303037323261383034316439326237636565633462336462
35313561356465393337616162323866393365613537333461656234313464653165333963343331
32343634383335663764353831303864373637393833366333376635343665396366616363663033
37323031316535636131333738633237383665323638613233666432316261376239636234626638
33313230643563316662323937656338613362646466323335376363626163383233623831643565
31393438363334653863363536373632333930616636323237336236353863616638616165303931
63333639393665633537646665613933323632376162363139656632323166393264313333653163
64333935356138336562366634636364346461356539363162616438613232306533
61393134306361663861356132333135633566626136383536363763646134386338363362343830
6339626232333037613437386634396138323438643037390a366338353862653439323961626532
37393438326261363563323233333364323536373735383834383134653935383436356137396166
3531326465363438310a663232306138333866373637336234326166666261333332386632316163
61616339656436666233343339383835643934366661366333386363386639306631643366623333
30666430623435633961613932323239343239623532316662323937346634656136396539303036
63363365363861646333386364373263303037663266323832663761633633663136616338323362
36326561623063646666373034333335373135343736633066393937653234313932363138643065
38646231333564303861633231353535623436326135303463613738346231633962

View file

@ -8,7 +8,6 @@ all:
krypton.n39.eu:
oganesson.n39.eu:
platon.n39.eu:
plumbum.n39.eu:
pottwal.n39.eu:
radon.n39.eu:
unicorn.n39.eu:
@ -18,6 +17,7 @@ all:
k3s-w1.n39.eu:
k3s-w2.n39.eu:
k3s-w3.n39.eu:
cisco-2960-1.n39.eu:
children:
docker_host:
@ -32,7 +32,6 @@ all:
holmium.n39.eu:
krypton.n39.eu:
oganesson.n39.eu:
plumbum.n39.eu:
pottwal.n39.eu:
radon.n39.eu:
unicorn.n39.eu:
@ -50,7 +49,6 @@ all:
krypton.n39.eu:
oganesson.n39.eu:
platon.n39.eu:
plumbum.n39.eu:
pottwal.n39.eu:
radon.n39.eu:
wittgenstein.n39.eu:
@ -72,3 +70,6 @@ all:
k3s-w1.n39.eu:
k3s-w2.n39.eu:
k3s-w3.n39.eu:
network:
hosts:
cisco-2960-1.n39.eu:

View file

@ -39,6 +39,3 @@
- name: Hobbes specific setup
import_playbook: host-hobbes.yml
- name: Plumbum specific setup
import_playbook: host-plumbum.yml

View file

@ -1,34 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"regexManagers": [
{
"fileMatch": [
"^host-.*.yml$",
"^host_vars/.*/vars.yml$"
],
"datasourceTemplate": "docker",
"versioningTemplate": "docker",
"matchStrings": [
"image: (?<depName>.*?):(?<currentValue>.*?)(@(?<currentDigest>sha256:.*?))?\\s"
]
},
{
"fileMatch": [
"^roles/docker_setup/defaults/main.yml$"
],
"datasourceTemplate": "github-release",
"versioningTemplate": "semver",
"depNameTemplate": "docker-compose",
"matchStrings": [
"docker_compose_version: (?<currentValue>.*?)\\s"
]
}
],
"packageRules": [
{
"matchPackageNames": ["renovate/renovate"],
"schedule": [ "on friday" ]
}
]
}

View file

@ -1,13 +1,13 @@
---
roles:
- src: hifis.unattended_upgrades
version: v3.2.1
version: v1.12.2
- src: git+https://github.com/adriagalin/ansible.timezone.git
version: 4.0.0
version: 3.0.0
- src: git+https://github.com/24367dfa/ansible-role-dehydrated.git
version: 2.0.0
- src: https://github.com/penguineer/ansible-role-dehydrated_cron.git
version: v1.1.0
version: 1.0.3
- src: penguineer.dehydrated_cron
version: v1.0.0
- src: git+https://github.com/maz3max/ble-keykeeper-role.git
version: v1.1.0
- src: lespocky.telegraf_docker_in_docker
@ -15,4 +15,4 @@ roles:
collections:
- name: community.grafana
version: 1.9.1
version: 1.5.3

View file

@ -19,7 +19,7 @@ cleanuri_amqp_canonizer: "canonizer"
cleanuri_amqp_retrieval: "extractor"
# Docker images
cleanuri_image_webui: mrtux/cleanuri-webui:0.2.1
cleanuri_image_apigateway: mrtux/cleanuri-apigateway:0.3.1
cleanuri_image_canonizer: mrtux/cleanuri-canonizer:0.5.0
cleanuri_image_extractor: mrtux/cleanuri-extractor:0.5.0
cleanuri_image_webui: mrtux/cleanuri-webui:0.2.0
cleanuri_image_apigateway: mrtux/cleanuri-apigateway:0.3.0
cleanuri_image_canonizer: mrtux/cleanuri-canonizer:0.3.0
cleanuri_image_extractor: mrtux/cleanuri-extractor:0.3.0

View file

@ -1,8 +0,0 @@
# Defaults for nfs-host
---
nfs_host_exports: []
# - directory: "/srv/nfs"
# hosts: "k3s-w[0-9]+.n39.eu"
# options: rw,sync,no_subtree_check
nfs_host_storage_device: "/dev/sdb"

View file

@ -1,3 +0,0 @@
---
- name: reload nfs
command: 'exportfs -ra'

View file

@ -1,41 +0,0 @@
---
- name: Install required packages
ansible.builtin.apt:
state: present
name:
- nfs-kernel-server
- nfs-common
- parted
- name: Create a new ext4 primary partition
community.general.parted:
device: "{{ nfs_host_storage_device }}"
number: 1
state: present
fs_type: ext4
- name: ensure nfs mountpoints exist
ansible.builtin.file:
path: "{{ item.directory }}"
state: directory
owner: nobody
group: nogroup
mode: '0777'
with_items: "{{ nfs_host_exports }}"
- name: Mount up device by label
ansible.posix.mount:
path: "{{ nfs_host_exports[0].directory }}"
src: /dev/sdb1
fstype: ext4
state: present
- name: template /etc/exports
ansible.builtin.template:
src: templates/exports.j2
dest: "/etc/exports"
notify: reload nfs
- name: Ensure nfs is running.
ansible.builtin.service: "name=nfs-kernel-server state=started enabled=yes"
when: nfs_host_exports|length

View file

@ -1,3 +0,0 @@
{% for export in nfs_host_exports %}
{{ export.directory }} {{ export.hosts }}({{ export.options }})
{% endfor %}

View file

@ -0,0 +1,36 @@
[Unit]
Description=Grafana Kiosk
Documentation=https://github.com/grafana/grafana-kiosk
Documentation=https://grafana.com/blog/2019/05/02/grafana-tutorial-how-to-create-kiosks-to-display-dashboards-on-a-tv
After=network.target
Wants=graphical.target
After=graphical.target
[Service]
User={{ kiosk_user }}
Environment="DISPLAY=:0"
Environment="XAUTHORITY=/home/{{ kiosk_user }}/.Xauthority"
# These should work according to the docs, but are nowhere in the code?
#Environment="KIOSK_MODE=full"
#Environment="KIOSK_AUTOFIT=false"
#Environment="KIOSK_LXDE_ENABLED=true"
#Environment="KIOSK_LXDE_HOME=/home/{{ kiosk_user }}"
#Environment="KIOSK_URL={{ kiosk_url }}"
#Environment="KIOSK_LOGIN_METHOD=local"
#Environment="KIOSK_LOGIN_USER={{ kiosk_grafana_user }}"
#Environment="KIOSK_LOGIN_PASSWORD={{ kiosk_grafana_pass }}"
# Disable screensaver etc.
ExecStartPre=xset s off
ExecStartPre=xset -dpms
ExecStartPre=xset s noblank
ExecStart=/home/{{ kiosk_user }}/bin/grafana-kiosk -kiosk-mode=full -autofit=false -lxde-home=/home/{{ kiosk_user }} -URL="{{ kiosk_url }}" -login-method=local -username={{ kiosk_grafana_user }} --password={{ kiosk_grafana_pass }}
Restart=on-failure
RestartSec=30s
[Install]
WantedBy=graphical.target

View file

@ -1,47 +0,0 @@
#!/bin/bash
# Check if the script is run by root
if [ "$EUID" -ne 0 ]
then echo "Please run as root"
exit
fi
# MQTT broker details
BROKER="{{ kiosk_mqtt_host }}"
TOPIC="{{ kiosk_mqtt_topic }}"
# Variable to store the PID of the fbi process
fbi_pid=0
# Function to be executed on SIGTERM
on_sigterm() {
echo "SIGTERM received, exiting..."
# Kill the fbi process
# As the process forks itself, we do not get a reliable PID and killall is needed
killall fbi
# Remove the temporary file
rm -f /tmp/grafana.png
exit 0
}
# Trap SIGTERM and call on_sigterm() when it is received
trap 'on_sigterm' SIGTERM
while true
do
# Subscribe to the topic and save received data to a file
mosquitto_sub -h $BROKER -t $TOPIC -C 1 > /tmp/grafana.png
# Kill the previous fbi process
# As the process forks itself, we do not get a reliable PID and killall is needed
killall fbi
# Display the image
fbi -T 1 -noverbose -a /tmp/grafana.png &
# Wait to avoid a race condition between
# fbi starting and mosquitto truncating the file
sleep 1
done

View file

@ -1,8 +0,0 @@
#!/bin/bash
docker run --rm \
-e RENOVATE_TOKEN={{ renovate_forgejo_pat }} \
-e RENOVATE_ENDPOINT=https://{{ forgejo_domain_name }}/api/v1 \
-e RENOVATE_PLATFORM=gitea \
-e RENOVATE_GIT_AUTHOR={{ renovate_git_user | quote }} \
-e GITHUB_COM_TOKEN={{ renovate_github_pat }} \
{{ renovate_image }} --autodiscover