Compare commits
7 commits
master
...
feat/cisco
Author | SHA1 | Date | |
---|---|---|---|
|
eebdf0b360 | ||
7846e91e6b | |||
a454932328 | |||
3fff171180 | |||
77c2d6aa04 | |||
d211caae89 | |||
a9567bd31d |
22 changed files with 529 additions and 241 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1 +1 @@
|
||||||
vault-pass
|
vault-pass
|
|
@ -49,7 +49,7 @@ To set up a new HTTPS vhost, the following steps need to be taken:
|
||||||
|
|
||||||
1. Select a domain (for internal services we use sub-domains of `.n39.eu`).
|
1. Select a domain (for internal services we use sub-domains of `.n39.eu`).
|
||||||
2. Create an external CNAME from this domain to `dyndns.n39.eu`.
|
2. Create an external CNAME from this domain to `dyndns.n39.eu`.
|
||||||
3. Create an internal DNS entry in the [Descartes DNS config](https://git.n39.eu/Netz39_Admin/config.descartes/src/branch/prepare/dns_dhcp.txt). This is usually an alias on an existing server.
|
3. Create an internal DNS entry in the [Descartes DNS config](https://gitea.n39.eu/Netz39_Admin/config.descartes/src/branch/prepare/dns_dhcp.txt). This is usually an alias on an existing server.
|
||||||
4. Add the entry to the [holmium playbook](holmium.yml).
|
4. Add the entry to the [holmium playbook](holmium.yml).
|
||||||
5. Set up Dehydrated and vhost on the target host, e.g. using `setup_http_site_proxy`.
|
5. Set up Dehydrated and vhost on the target host, e.g. using `setup_http_site_proxy`.
|
||||||
|
|
||||||
|
|
97
device-cisco-2960-1.yml
Normal file
97
device-cisco-2960-1.yml
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
---
|
||||||
|
- hosts: cisco-2960-1.n39.eu
|
||||||
|
become: true
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- name: configure login banner
|
||||||
|
cisco.ios.ios_banner:
|
||||||
|
banner: login
|
||||||
|
text: "Documentation here: https://wiki.netz39.de/internal:inventory:network:2960s-24td-l"
|
||||||
|
state: present
|
||||||
|
|
||||||
|
- name: configure vlans
|
||||||
|
cisco.ios.ios_vlans:
|
||||||
|
config:
|
||||||
|
- name: lan
|
||||||
|
vlan_id: 4
|
||||||
|
state: active
|
||||||
|
shutdown: disabled
|
||||||
|
- name: wan
|
||||||
|
vlan_id: 5
|
||||||
|
state: active
|
||||||
|
shutdown: disabled
|
||||||
|
- name: service
|
||||||
|
vlan_id: 7
|
||||||
|
state: active
|
||||||
|
shutdown: disabled
|
||||||
|
- name: legacy
|
||||||
|
vlan_id: 8
|
||||||
|
state: active
|
||||||
|
shutdown: disabled
|
||||||
|
- name: dmz
|
||||||
|
vlan_id: 9
|
||||||
|
state: active
|
||||||
|
shutdown: disabled
|
||||||
|
- name: ffmd-client
|
||||||
|
vlan_id: 11
|
||||||
|
state: active
|
||||||
|
shutdown: disabled
|
||||||
|
state: merged
|
||||||
|
|
||||||
|
- name: configure port assignment
|
||||||
|
cisco.ios.ios_l2_interfaces:
|
||||||
|
config:
|
||||||
|
# USV
|
||||||
|
- name: Gi1/0/6
|
||||||
|
mode: access
|
||||||
|
access:
|
||||||
|
vlan: 1
|
||||||
|
# beaker ipmi
|
||||||
|
- name: Gi1/0/9
|
||||||
|
mode: access
|
||||||
|
access:
|
||||||
|
vlan: 1
|
||||||
|
# Patchfeld 1, Switch ausleihliste
|
||||||
|
- name: Gi1/0/13
|
||||||
|
mode: trunk
|
||||||
|
trunk:
|
||||||
|
allowed_vlans: 1,4,5,7,8,11
|
||||||
|
native_vlan: 4
|
||||||
|
# patchfeld 2 - Raspberry Pi Platon
|
||||||
|
- name: Gi1/0/15
|
||||||
|
mode: access
|
||||||
|
access:
|
||||||
|
vlan: 4
|
||||||
|
# patchfeld 6 - Access Point Hempels Zimmer
|
||||||
|
- name: Gi1/0/17
|
||||||
|
mode: access
|
||||||
|
access:
|
||||||
|
vlan: 4
|
||||||
|
# FräsPC
|
||||||
|
- name: Gi1/0/19
|
||||||
|
mode: access
|
||||||
|
access:
|
||||||
|
vlan: 4
|
||||||
|
# patchfeld 4 - Switch am Basteltisch
|
||||||
|
- name: Gi1/0/20
|
||||||
|
mode: trunk
|
||||||
|
trunk:
|
||||||
|
allowed_vlans: 1,4,5,7,8,11
|
||||||
|
native_vlan: 4
|
||||||
|
# uplink descartes
|
||||||
|
- name: Gi1/0/25
|
||||||
|
mode: trunk
|
||||||
|
trunk:
|
||||||
|
allowed_vlans: 1-11
|
||||||
|
native_vlan: 1
|
||||||
|
# server marx
|
||||||
|
- name: Gi1/0/26
|
||||||
|
mode: trunk
|
||||||
|
trunk:
|
||||||
|
allowed_vlans: 1-11
|
||||||
|
native_vlan: 1
|
||||||
|
state: merged
|
||||||
|
|
||||||
|
- name: Save running to startup when modified
|
||||||
|
cisco.ios.ios_config:
|
||||||
|
save_when: modified
|
|
@ -15,7 +15,7 @@
|
||||||
- name: entities-validation.svc.n39.eu
|
- name: entities-validation.svc.n39.eu
|
||||||
- server: pottwal
|
- server: pottwal
|
||||||
hosts:
|
hosts:
|
||||||
- name: git.n39.eu
|
- name: gitea.n39.eu
|
||||||
- name: redmine.n39.eu
|
- name: redmine.n39.eu
|
||||||
- name: uritools.n39.eu
|
- name: uritools.n39.eu
|
||||||
- name: uritools-api.n39.eu
|
- name: uritools-api.n39.eu
|
||||||
|
@ -37,5 +37,5 @@
|
||||||
local: true
|
local: true
|
||||||
- name: pwr-meter-pulse-gw-19i.svc.n39.eu
|
- name: pwr-meter-pulse-gw-19i.svc.n39.eu
|
||||||
local: true
|
local: true
|
||||||
- name: labelprinter.n39.eu
|
- name: brotherql-web.n39.eu
|
||||||
local: true
|
local: true
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
|
|
||||||
docker_ip_ranges: ["172.16.0.0/12", "192.168.0.0/16"]
|
docker_ip_ranges: ["172.16.0.0/12", "192.168.0.0/16"]
|
||||||
|
|
||||||
|
openldap_image_version: 1.5.0
|
||||||
openldap_data: "{{ data_dir }}/openldap"
|
openldap_data: "{{ data_dir }}/openldap"
|
||||||
openldap_domain: "ldap.n39.eu"
|
openldap_domain: "ldap.n39.eu"
|
||||||
ldap_domain: "netz39.de"
|
ldap_domain: "netz39.de"
|
||||||
|
@ -51,7 +52,7 @@
|
||||||
- name: Ensure container for openLDAP is running.
|
- name: Ensure container for openLDAP is running.
|
||||||
docker_container:
|
docker_container:
|
||||||
name: openLDAP
|
name: openLDAP
|
||||||
image: osixia/openldap:1.5.0
|
image: "osixia/openldap:{{ openldap_image_version }}"
|
||||||
detach: yes
|
detach: yes
|
||||||
state: started
|
state: started
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
|
|
137
host-pottwal.yml
137
host-pottwal.yml
|
@ -10,18 +10,18 @@
|
||||||
vars:
|
vars:
|
||||||
dehydrated_contact_email: "{{ server_admin }}"
|
dehydrated_contact_email: "{{ server_admin }}"
|
||||||
dehydrated_domains:
|
dehydrated_domains:
|
||||||
- name: "{{ forgejo_domain_name }}"
|
- name: gitea.n39.eu
|
||||||
- name: uritools.n39.eu
|
- name: uritools.n39.eu
|
||||||
- name: uritools-api.n39.eu
|
- name: uritools-api.n39.eu
|
||||||
- name: "{{ shlink_domain_name }}"
|
- name: "{{ shlink_domain_name }}"
|
||||||
- name: "{{ hedgedoc_domain_name }}"
|
- name: pad.n39.eu
|
||||||
- name: "{{ prosody_domain_name }}"
|
- name: "{{ prosody_domain_name }}"
|
||||||
alternate_names:
|
alternate_names:
|
||||||
- conference.jabber.n39.eu
|
- conference.jabber.n39.eu
|
||||||
deploy_cert_hook: "docker exec prosody prosodyctl --root cert import ${DOMAIN} /var/lib/dehydrated/certs"
|
deploy_cert_hook: "docker exec prosody prosodyctl --root cert import ${DOMAIN} /var/lib/dehydrated/certs"
|
||||||
- name: "{{ redmine_domain_name }}"
|
- name: redmine.n39.eu
|
||||||
- name: "{{ influxdb_domain_name }}"
|
- name: "{{ influxdb_domain_name }}"
|
||||||
- name: "{{ uptimekuma_domain_name }}"
|
- name: uptime.n39.eu
|
||||||
- name: "{{ grafana_domain_name }}"
|
- name: "{{ grafana_domain_name }}"
|
||||||
- name: "{{ homebox_domain_name }}"
|
- name: "{{ homebox_domain_name }}"
|
||||||
- name: spaceapi.n39.eu
|
- name: spaceapi.n39.eu
|
||||||
|
@ -52,39 +52,39 @@
|
||||||
- name: Setup the docker container for gitea
|
- name: Setup the docker container for gitea
|
||||||
docker_container:
|
docker_container:
|
||||||
name: forgejo
|
name: forgejo
|
||||||
image: "{{ forgejo_image }}"
|
image: "codeberg.org/forgejo/forgejo:1.19"
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
detach: yes
|
detach: yes
|
||||||
ports:
|
ports:
|
||||||
- 127.0.0.1:{{ forgejo_host_port }}:3000
|
- 127.0.0.1:{{ forgejo_host_port }}:3000
|
||||||
- "{{ forgejo_ssh_port }}:2222"
|
- 2222:2222
|
||||||
env:
|
env:
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
APP_NAME: "Netz39 Git"
|
APP_NAME: "Netz39 Git"
|
||||||
RUN_MODE: "prod"
|
RUN_MODE: "prod"
|
||||||
SSH_DOMAIN: "{{ forgejo_domain_name }}"
|
SSH_DOMAIN: "gitea.n39.eu"
|
||||||
SSH_PORT: "2222"
|
SSH_PORT: "2222"
|
||||||
SSH_START_SERVER: "false"
|
SSH_START_SERVER: "false"
|
||||||
ROOT_URL: "https://{{ forgejo_domain_name }}"
|
ROOT_URL: "https://gitea.n39.eu"
|
||||||
DISABLE_REGISTRATION: "true"
|
DISABLE_REGISTRATION: "true"
|
||||||
USER_UID: "1000"
|
USER_UID: "1000"
|
||||||
USER_GID: "1000"
|
USER_GID: "1000"
|
||||||
volumes:
|
volumes:
|
||||||
- "{{ data_dir }}/forgejo:/data:rw"
|
- "{{ data_dir }}/forgejo:/data:rw"
|
||||||
|
|
||||||
- name: Setup proxy site "{{ forgejo_domain_name }}"
|
- name: Setup proxy site gitea.n39.eu
|
||||||
include_role:
|
include_role:
|
||||||
name: setup_http_site_proxy
|
name: setup_http_site_proxy
|
||||||
vars:
|
vars:
|
||||||
site_name: "{{ forgejo_domain_name }}"
|
site_name: "gitea.n39.eu"
|
||||||
proxy_port: "{{ forgejo_host_port }}"
|
proxy_port: "{{ forgejo_host_port }}"
|
||||||
|
|
||||||
- name: Ensure apt-cacher container is running
|
- name: Ensure apt-cacher container is running
|
||||||
docker_container:
|
docker_container:
|
||||||
name: apt_cacher_ng
|
name: apt_cacher_ng
|
||||||
image: mrtux/apt-cacher-ng:latest
|
image: "mrtux/apt-cacher-ng"
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
|
@ -94,18 +94,11 @@
|
||||||
env:
|
env:
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
|
|
||||||
- name: Ensure shlink data dir exists
|
|
||||||
ansible.builtin.file:
|
|
||||||
path: "{{ data_dir }}/shlink"
|
|
||||||
state: directory
|
|
||||||
mode: 0755
|
|
||||||
tags:
|
|
||||||
- shlink
|
|
||||||
|
|
||||||
- name: Ensure container for shlink is running
|
- name: Ensure container for shlink is running
|
||||||
docker_container:
|
docker_container:
|
||||||
name: shlink
|
name: shlink
|
||||||
image: "{{ shlink_image }}"
|
image: shlinkio/shlink:2.6.2
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
detach: yes
|
detach: yes
|
||||||
|
@ -114,13 +107,9 @@
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
env:
|
env:
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
DEFAULT_DOMAIN: "{{ shlink_domain_name }}"
|
SHORT_DOMAIN_HOST: "{{ shlink_domain_name }}"
|
||||||
|
SHORT_DOMAIN_SCHEMA: https
|
||||||
GEOLITE_LICENSE_KEY: "{{ shlink_geolite_license_key }}"
|
GEOLITE_LICENSE_KEY: "{{ shlink_geolite_license_key }}"
|
||||||
INITIAL_API_KEY: "{{ shlink_initial_api_key }}"
|
|
||||||
volumes:
|
|
||||||
- "{{ data_dir }}/shlink/database.sqlite:/etc/shlink/datadatabase.sqlite:rw"
|
|
||||||
tags:
|
|
||||||
- shlink
|
|
||||||
|
|
||||||
- name: Setup proxy site {{ shlink_domain_name }}
|
- name: Setup proxy site {{ shlink_domain_name }}
|
||||||
include_role:
|
include_role:
|
||||||
|
@ -128,8 +117,6 @@
|
||||||
vars:
|
vars:
|
||||||
site_name: "{{ shlink_domain_name }}"
|
site_name: "{{ shlink_domain_name }}"
|
||||||
proxy_port: "{{ shlink_host_port }}"
|
proxy_port: "{{ shlink_host_port }}"
|
||||||
tags:
|
|
||||||
- shlink
|
|
||||||
|
|
||||||
- name: Check if prosody data dir exists
|
- name: Check if prosody data dir exists
|
||||||
ansible.builtin.stat:
|
ansible.builtin.stat:
|
||||||
|
@ -193,7 +180,7 @@
|
||||||
- name: Ensure container for prosody XMPP server is running
|
- name: Ensure container for prosody XMPP server is running
|
||||||
docker_container:
|
docker_container:
|
||||||
name: prosody
|
name: prosody
|
||||||
image: "{{ prosody_image }}"
|
image: netz39/prosody:0.11
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
detach: true
|
detach: true
|
||||||
|
@ -215,7 +202,7 @@
|
||||||
- name: Ensure container for static XMPP website is running
|
- name: Ensure container for static XMPP website is running
|
||||||
docker_container:
|
docker_container:
|
||||||
name: jabber-static-website
|
name: jabber-static-website
|
||||||
image: "{{ prosody_web_image }}"
|
image: joseluisq/static-web-server:2.14
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
detach: true
|
detach: true
|
||||||
|
@ -241,14 +228,10 @@
|
||||||
ansible.builtin.stat:
|
ansible.builtin.stat:
|
||||||
path: "{{ data_dir }}/hedgedoc"
|
path: "{{ data_dir }}/hedgedoc"
|
||||||
register: hedgedoc_dir
|
register: hedgedoc_dir
|
||||||
tags:
|
|
||||||
- hedgedoc
|
|
||||||
- name: Fail if hedgedoc data dir does not exist
|
- name: Fail if hedgedoc data dir does not exist
|
||||||
ansible.builtin.fail:
|
ansible.builtin.fail:
|
||||||
msg: "hedgedoc data dir is missing, please restore from the backup!"
|
msg: "hedgedoc data dir is missing, please restore from the backup!"
|
||||||
when: not hedgedoc_dir.stat.exists
|
when: not hedgedoc_dir.stat.exists
|
||||||
tags:
|
|
||||||
- hedgedoc
|
|
||||||
|
|
||||||
- name: Ensure the hedgedoc directories exist
|
- name: Ensure the hedgedoc directories exist
|
||||||
file:
|
file:
|
||||||
|
@ -260,21 +243,17 @@
|
||||||
mode: "0700"
|
mode: "0700"
|
||||||
- path: "{{ data_dir }}/hedgedoc/data/uploads"
|
- path: "{{ data_dir }}/hedgedoc/data/uploads"
|
||||||
mode: "0755"
|
mode: "0755"
|
||||||
tags:
|
|
||||||
- hedgedoc
|
|
||||||
|
|
||||||
- name: Setup docker network
|
- name: Setup docker network
|
||||||
docker_network:
|
docker_network:
|
||||||
name: hedgedocnet
|
name: hedgedocnet
|
||||||
state: present
|
state: present
|
||||||
internal: true
|
internal: true
|
||||||
tags:
|
|
||||||
- hedgedoc
|
|
||||||
|
|
||||||
- name: Install HedgeDoc database container
|
- name: Install HedgeDoc database container
|
||||||
docker_container:
|
docker_container:
|
||||||
name: hedgedocdb
|
name: hedgedocdb
|
||||||
image: "{{ hedgedoc_db_image }}"
|
image: "postgres:11.6-alpine"
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
|
@ -288,8 +267,6 @@
|
||||||
- "{{ data_dir }}/hedgedoc/data/database:/var/lib/postgresql/data"
|
- "{{ data_dir }}/hedgedoc/data/database:/var/lib/postgresql/data"
|
||||||
networks:
|
networks:
|
||||||
- name: hedgedocnet
|
- name: hedgedocnet
|
||||||
tags:
|
|
||||||
- hedgedoc
|
|
||||||
|
|
||||||
- name: Ensure container for hedgedoc is running
|
- name: Ensure container for hedgedoc is running
|
||||||
docker_container:
|
docker_container:
|
||||||
|
@ -305,7 +282,7 @@
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
NODE_ENV: "production"
|
NODE_ENV: "production"
|
||||||
CMD_PROTOCOL_USESSL: "true"
|
CMD_PROTOCOL_USESSL: "true"
|
||||||
CMD_DOMAIN: "{{ hedgedoc_domain_name }}"
|
CMD_DOMAIN: "pad.n39.eu"
|
||||||
CMD_URL_ADDPORT: "false"
|
CMD_URL_ADDPORT: "false"
|
||||||
CMD_DB_HOST: "hedgedocdb"
|
CMD_DB_HOST: "hedgedocdb"
|
||||||
CMD_DB_PORT: "5432"
|
CMD_DB_PORT: "5432"
|
||||||
|
@ -315,22 +292,15 @@
|
||||||
CMD_DB_PASSWORD: "{{ hedgedoc_postgres_password }}"
|
CMD_DB_PASSWORD: "{{ hedgedoc_postgres_password }}"
|
||||||
volumes:
|
volumes:
|
||||||
- "{{ data_dir }}/hedgedoc/data/uploads:/hedgedoc/public/uploads"
|
- "{{ data_dir }}/hedgedoc/data/uploads:/hedgedoc/public/uploads"
|
||||||
networks_cli_compatible: false
|
|
||||||
comparisons:
|
|
||||||
networks: allow_more_present
|
|
||||||
networks:
|
networks:
|
||||||
- name: hedgedocnet
|
- name: hedgedocnet
|
||||||
tags:
|
|
||||||
- hedgedoc
|
|
||||||
|
|
||||||
- name: Setup proxy site "{{ hedgedoc_domain_name }}"
|
- name: Setup proxy site pad.n39.eu
|
||||||
include_role:
|
include_role:
|
||||||
name: setup_http_site_proxy
|
name: setup_http_site_proxy
|
||||||
vars:
|
vars:
|
||||||
site_name: "{{ hedgedoc_domain_name }}"
|
site_name: pad.n39.eu
|
||||||
proxy_port: "{{ hedgedoc_host_port }}"
|
proxy_port: "{{ hedgedoc_host_port }}"
|
||||||
tags:
|
|
||||||
- hedgedoc
|
|
||||||
|
|
||||||
- name: Ensure the influxdb directories exist
|
- name: Ensure the influxdb directories exist
|
||||||
file:
|
file:
|
||||||
|
@ -378,22 +348,16 @@
|
||||||
ansible.builtin.stat:
|
ansible.builtin.stat:
|
||||||
path: "{{ data_dir }}/redmine"
|
path: "{{ data_dir }}/redmine"
|
||||||
register: redmine_dir
|
register: redmine_dir
|
||||||
tags:
|
|
||||||
- redmine
|
|
||||||
- name: Fail if redmine data dir does not exist
|
- name: Fail if redmine data dir does not exist
|
||||||
ansible.builtin.fail:
|
ansible.builtin.fail:
|
||||||
msg: "Redmine data dir is missing, please restore from the backup!"
|
msg: "Redmine data dir is missing, please restore from the backup!"
|
||||||
when: not redmine_dir.stat.exists
|
when: not redmine_dir.stat.exists
|
||||||
tags:
|
|
||||||
- redmine
|
|
||||||
|
|
||||||
- name: Setup Redmine docker network
|
- name: Setup Redmine docker network
|
||||||
docker_network:
|
docker_network:
|
||||||
name: redminenet
|
name: redminenet
|
||||||
state: present
|
state: present
|
||||||
internal: true
|
internal: true
|
||||||
tags:
|
|
||||||
- redmine
|
|
||||||
|
|
||||||
- name: Setup Redmine MySQL container
|
- name: Setup Redmine MySQL container
|
||||||
docker_container:
|
docker_container:
|
||||||
|
@ -411,8 +375,6 @@
|
||||||
- "{{ data_dir }}/redmine/mysql:/var/lib/mysql"
|
- "{{ data_dir }}/redmine/mysql:/var/lib/mysql"
|
||||||
networks:
|
networks:
|
||||||
- name: redminenet
|
- name: redminenet
|
||||||
tags:
|
|
||||||
- redmine
|
|
||||||
|
|
||||||
- name: Setup Redmine container
|
- name: Setup Redmine container
|
||||||
docker_container:
|
docker_container:
|
||||||
|
@ -432,22 +394,15 @@
|
||||||
- "{{ data_dir }}/redmine/configuration.yml:/usr/src/redmine/config/configuration.yml"
|
- "{{ data_dir }}/redmine/configuration.yml:/usr/src/redmine/config/configuration.yml"
|
||||||
- "{{ data_dir }}/redmine/files:/usr/src/redmine/files"
|
- "{{ data_dir }}/redmine/files:/usr/src/redmine/files"
|
||||||
- "{{ data_dir }}/redmine/themes:/usr/src/redmine/public/themes"
|
- "{{ data_dir }}/redmine/themes:/usr/src/redmine/public/themes"
|
||||||
networks_cli_compatible: false
|
|
||||||
comparisons:
|
|
||||||
networks: allow_more_present
|
|
||||||
networks:
|
networks:
|
||||||
- name: redminenet
|
- name: redminenet
|
||||||
tags:
|
|
||||||
- redmine
|
|
||||||
|
|
||||||
- name: Setup proxy site "{{ redmine_domain_name }}"
|
- name: Setup proxy site redmine.n39.eu
|
||||||
include_role:
|
include_role:
|
||||||
name: setup_http_site_proxy
|
name: setup_http_site_proxy
|
||||||
vars:
|
vars:
|
||||||
site_name: "{{ redmine_domain_name }}"
|
site_name: redmine.n39.eu
|
||||||
proxy_port: "{{ redmine_host_port }}"
|
proxy_port: "{{ redmine_host_port }}"
|
||||||
tags:
|
|
||||||
- redmine
|
|
||||||
|
|
||||||
- name: Ensure the uptime-kuma directories exist
|
- name: Ensure the uptime-kuma directories exist
|
||||||
file:
|
file:
|
||||||
|
@ -456,13 +411,11 @@
|
||||||
state: directory
|
state: directory
|
||||||
with_items:
|
with_items:
|
||||||
- "{{ data_dir }}/uptime-kuma"
|
- "{{ data_dir }}/uptime-kuma"
|
||||||
tags:
|
|
||||||
- uptimekuma
|
|
||||||
|
|
||||||
- name: Ensure container for uptime-kuma is running
|
- name: Ensure container for uptime-kuma is running
|
||||||
docker_container:
|
docker_container:
|
||||||
name: uptime-kuma
|
name: uptime-kuma
|
||||||
image: "{{ uptimekuma_image }}"
|
image: "louislam/uptime-kuma:1"
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
detach: yes
|
detach: yes
|
||||||
|
@ -473,17 +426,13 @@
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
volumes:
|
volumes:
|
||||||
- "{{ data_dir }}/uptime-kuma:/app/data"
|
- "{{ data_dir }}/uptime-kuma:/app/data"
|
||||||
tags:
|
|
||||||
- uptimekuma
|
|
||||||
|
|
||||||
- name: Setup proxy site "{{ uptimekuma_domain_name }}"
|
- name: Setup proxy site uptime.n39.eu
|
||||||
include_role:
|
include_role:
|
||||||
name: setup_http_site_proxy
|
name: setup_http_site_proxy
|
||||||
vars:
|
vars:
|
||||||
site_name: "{{ uptimekuma_domain_name }}"
|
site_name: uptime.n39.eu
|
||||||
proxy_port: "{{ uptimekuma_host_port }}"
|
proxy_port: "{{ uptimekuma_host_port }}"
|
||||||
tags:
|
|
||||||
- uptimekuma
|
|
||||||
|
|
||||||
- name: Ensure the grafana directories exist
|
- name: Ensure the grafana directories exist
|
||||||
file:
|
file:
|
||||||
|
@ -499,13 +448,11 @@
|
||||||
mode: "0755"
|
mode: "0755"
|
||||||
- path: "{{ data_dir }}/grafana/etc"
|
- path: "{{ data_dir }}/grafana/etc"
|
||||||
mode: "0755"
|
mode: "0755"
|
||||||
tags:
|
|
||||||
- grafana
|
|
||||||
|
|
||||||
- name: Ensure container for grafana is running
|
- name: Ensure container for grafana is running
|
||||||
docker_container:
|
docker_container:
|
||||||
name: grafana
|
name: grafana
|
||||||
image: "{{ grafana_image }}"
|
image: "grafana/grafana:9.4.7"
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
detach: yes
|
detach: yes
|
||||||
|
@ -520,18 +467,14 @@
|
||||||
GF_SECURITY_ADMIN_PASSWORD: "{{ grafana_admin_password }}"
|
GF_SECURITY_ADMIN_PASSWORD: "{{ grafana_admin_password }}"
|
||||||
GF_USERS_ALLOW_SIGN_UP: "false"
|
GF_USERS_ALLOW_SIGN_UP: "false"
|
||||||
GF_INSTALL_PLUGINS: "flant-statusmap-panel,ae3e-plotly-panel"
|
GF_INSTALL_PLUGINS: "flant-statusmap-panel,ae3e-plotly-panel"
|
||||||
tags:
|
|
||||||
- grafana
|
|
||||||
|
|
||||||
- name: Setup proxy site "{{ grafana_domain_name }}"
|
- name: Setup proxy site grafana.n39.eu
|
||||||
include_role:
|
include_role:
|
||||||
name: setup_http_site_proxy
|
name: setup_http_site_proxy
|
||||||
vars:
|
vars:
|
||||||
site_name: "{{ grafana_domain_name }}"
|
site_name: "{{ grafana_domain_name }}"
|
||||||
proxy_port: "{{ grafana_host_port }}"
|
proxy_port: "{{ grafana_host_port }}"
|
||||||
proxy_preserve_host: "On"
|
proxy_preserve_host: "On"
|
||||||
tags:
|
|
||||||
- grafana
|
|
||||||
|
|
||||||
- name: Ensure the homebox directories exist
|
- name: Ensure the homebox directories exist
|
||||||
file:
|
file:
|
||||||
|
@ -544,13 +487,11 @@
|
||||||
mode: "0755"
|
mode: "0755"
|
||||||
- path: "{{ data_dir }}/homebox/data"
|
- path: "{{ data_dir }}/homebox/data"
|
||||||
mode: "0755"
|
mode: "0755"
|
||||||
tags:
|
|
||||||
- homebox
|
|
||||||
|
|
||||||
- name: Ensure container for homebox is running
|
- name: Ensure container for homebox is running
|
||||||
docker_container:
|
docker_container:
|
||||||
name: homebox
|
name: homebox
|
||||||
image: "{{ homebox_image }}"
|
image: "ghcr.io/hay-kot/homebox"
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
detach: yes
|
detach: yes
|
||||||
|
@ -564,8 +505,6 @@
|
||||||
HBOX_LOG_LEVEL: "info"
|
HBOX_LOG_LEVEL: "info"
|
||||||
HBOX_LOG_FORMAT: "text"
|
HBOX_LOG_FORMAT: "text"
|
||||||
HBOX_WEB_MAX_UPLOAD_SIZE: "10"
|
HBOX_WEB_MAX_UPLOAD_SIZE: "10"
|
||||||
tags:
|
|
||||||
- homebox
|
|
||||||
|
|
||||||
- name: Setup proxy site {{ homebox_domain_name }}
|
- name: Setup proxy site {{ homebox_domain_name }}
|
||||||
include_role:
|
include_role:
|
||||||
|
@ -574,8 +513,6 @@
|
||||||
site_name: "{{ homebox_domain_name }}"
|
site_name: "{{ homebox_domain_name }}"
|
||||||
proxy_port: "{{ homebox_host_port }}"
|
proxy_port: "{{ homebox_host_port }}"
|
||||||
proxy_preserve_host: "On"
|
proxy_preserve_host: "On"
|
||||||
tags:
|
|
||||||
- homebox
|
|
||||||
|
|
||||||
- name: Setup proxy site spaceapi.n39.eu
|
- name: Setup proxy site spaceapi.n39.eu
|
||||||
template:
|
template:
|
||||||
|
@ -586,15 +523,8 @@
|
||||||
site_name: "spaceapi.n39.eu"
|
site_name: "spaceapi.n39.eu"
|
||||||
proxy_preserve_host: "On"
|
proxy_preserve_host: "On"
|
||||||
notify: Restart apache2
|
notify: Restart apache2
|
||||||
|
tags:
|
||||||
- name: Ensure renovate bot cronjob is present
|
- dev
|
||||||
ansible.builtin.template:
|
|
||||||
src: templates/pottwal/renovate-cron.j2
|
|
||||||
dest: /etc/cron.hourly/renovate-bot
|
|
||||||
mode: "0700"
|
|
||||||
notify: reload cron
|
|
||||||
tags:
|
|
||||||
- renovate
|
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
- name: Restart prosody
|
- name: Restart prosody
|
||||||
|
@ -608,10 +538,3 @@
|
||||||
name: apache2
|
name: apache2
|
||||||
state: restarted
|
state: restarted
|
||||||
|
|
||||||
- name: reload cron
|
|
||||||
ansible.builtin.shell:
|
|
||||||
cmd: service cron reload
|
|
||||||
# Use the shell call because the task sometimes has problems finding the service state
|
|
||||||
# service:
|
|
||||||
# name: cron
|
|
||||||
# state: restarted
|
|
||||||
|
|
|
@ -7,19 +7,19 @@
|
||||||
|
|
||||||
data_dir: "/srv/data"
|
data_dir: "/srv/data"
|
||||||
|
|
||||||
mosquitto_image: eclipse-mosquitto:2.0.18
|
mosquitto_image: eclipse-mosquitto:2.0.14
|
||||||
mosquitto_data: "{{ data_dir }}/mosquitto"
|
mosquitto_data: "{{ data_dir }}/mosquitto"
|
||||||
|
|
||||||
nodered_image: nodered/node-red:3.0.1-1-18
|
nodered_image: nodered/node-red:3.0.1-1-18
|
||||||
nodered_data: "{{ data_dir }}/nodered"
|
nodered_data: "{{ data_dir }}/nodered"
|
||||||
|
|
||||||
rabbitmq_image: bitnami/rabbitmq:3.12.7
|
rabbitmq_image: "bitnami/rabbitmq:3.10.7"
|
||||||
rabbitmq_data: "{{ data_dir }}/rabbitmq"
|
rabbitmq_data: "{{ data_dir }}/rabbitmq"
|
||||||
|
|
||||||
pwr_meter_pulse_gw_image: netz39/power-meter-pulse-gateway:0.3.0
|
pwr_meter_pulse_gw_image: "netz39/power-meter-pulse-gateway:0.3.0"
|
||||||
|
|
||||||
brotherql_host_port: 9004
|
brotherql_host_port: 9004
|
||||||
brotherql_web_image: dersimn/brother_ql_web:2.1-alpine
|
brotherql_web_image: "pklaus/brother_ql_web:alpine_9e20b6d"
|
||||||
|
|
||||||
roles:
|
roles:
|
||||||
# role 'docker_setup' applied through group 'docker_host'
|
# role 'docker_setup' applied through group 'docker_host'
|
||||||
|
@ -46,8 +46,6 @@
|
||||||
- "{{ mosquitto_data }}/config"
|
- "{{ mosquitto_data }}/config"
|
||||||
- "{{ mosquitto_data }}/data"
|
- "{{ mosquitto_data }}/data"
|
||||||
- "{{ mosquitto_data }}/log"
|
- "{{ mosquitto_data }}/log"
|
||||||
tags:
|
|
||||||
- mosquitto
|
|
||||||
|
|
||||||
- name: Make sure mosquitto config is there
|
- name: Make sure mosquitto config is there
|
||||||
template:
|
template:
|
||||||
|
@ -55,8 +53,6 @@
|
||||||
dest: "{{ mosquitto_data }}/config/mosquitto.conf"
|
dest: "{{ mosquitto_data }}/config/mosquitto.conf"
|
||||||
mode: 0644
|
mode: 0644
|
||||||
notify: restart mosquitto
|
notify: restart mosquitto
|
||||||
tags:
|
|
||||||
- mosquitto
|
|
||||||
|
|
||||||
- name: Ensure mosquitto is running
|
- name: Ensure mosquitto is running
|
||||||
docker_container:
|
docker_container:
|
||||||
|
@ -76,8 +72,6 @@
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
env:
|
env:
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
tags:
|
|
||||||
- mosquitto
|
|
||||||
|
|
||||||
- name: Check if nodered data dir exists
|
- name: Check if nodered data dir exists
|
||||||
ansible.builtin.stat:
|
ansible.builtin.stat:
|
||||||
|
@ -187,7 +181,7 @@
|
||||||
state: started
|
state: started
|
||||||
ports:
|
ports:
|
||||||
- "127.0.0.1:{{ brotherql_host_port }}:8013"
|
- "127.0.0.1:{{ brotherql_host_port }}:8013"
|
||||||
command: "--default-label-size 62 --model QL-720NW tcp://{{ brotherql_printer_ip }}"
|
command: " ./brother_ql_web.py --model QL-720NW tcp://{{ brotherql_printer_ip }}"
|
||||||
detach: yes
|
detach: yes
|
||||||
env:
|
env:
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
|
|
21
host-tau.yml
21
host-tau.yml
|
@ -14,7 +14,7 @@
|
||||||
dokuwiki_port: 9005
|
dokuwiki_port: 9005
|
||||||
# This container is pinned, because there are issues
|
# This container is pinned, because there are issues
|
||||||
# with backwards compatibility within the same tag!
|
# with backwards compatibility within the same tag!
|
||||||
dokuwiki_image: bitnami/dokuwiki:20230404@sha256:a13023559160cf6bd1f2b77b753b5335643d65c0347cad4898076efa9de78d14
|
dokuwiki_image: "bitnami/dokuwiki:20220731@sha256:989ab52cf2d2e0f84166e114ca4ce88f59546b8f6d34958905f8d81c18cbd759"
|
||||||
|
|
||||||
discord_invite_domain: discord.netz39.de
|
discord_invite_domain: discord.netz39.de
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@
|
||||||
docker_container:
|
docker_container:
|
||||||
name: phpmyadmin
|
name: phpmyadmin
|
||||||
state: started
|
state: started
|
||||||
image: phpmyadmin:5.2
|
image: phpmyadmin:5.0
|
||||||
networks_cli_compatible: true
|
networks_cli_compatible: true
|
||||||
networks:
|
networks:
|
||||||
- name: dockernet
|
- name: dockernet
|
||||||
|
@ -100,7 +100,7 @@
|
||||||
- name: Setup Docker Registry Container
|
- name: Setup Docker Registry Container
|
||||||
docker_container:
|
docker_container:
|
||||||
name: registry
|
name: registry
|
||||||
image: registry:2
|
image: "registry:2"
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
|
@ -128,22 +128,16 @@
|
||||||
ansible.builtin.stat:
|
ansible.builtin.stat:
|
||||||
path: "{{ data_dir }}/dokuwiki"
|
path: "{{ data_dir }}/dokuwiki"
|
||||||
register: dokuwiki_dir
|
register: dokuwiki_dir
|
||||||
tags:
|
|
||||||
- dokuwiki
|
|
||||||
- name: Fail if Dokuwiki data dir does not exist
|
- name: Fail if Dokuwiki data dir does not exist
|
||||||
ansible.builtin.fail:
|
ansible.builtin.fail:
|
||||||
msg: "Dokuwiki data dir is missing, please restore from the backup!"
|
msg: "Dokuwiki data dir is missing, please restore from the backup!"
|
||||||
when: not dokuwiki_dir.stat.exists
|
when: not dokuwiki_dir.stat.exists
|
||||||
tags:
|
|
||||||
- dokuwiki
|
|
||||||
|
|
||||||
- name: Set correct user for Dokuwiki data
|
- name: Set correct user for Dokuwiki data
|
||||||
ansible.builtin.file:
|
ansible.builtin.file:
|
||||||
path: "{{ data_dir }}/dokuwiki"
|
path: "{{ data_dir }}/dokuwiki"
|
||||||
owner: "1001" # According to container config
|
owner: "1001" # According to container config
|
||||||
recurse: yes
|
recurse: yes
|
||||||
tags:
|
|
||||||
- dokuwiki
|
|
||||||
|
|
||||||
- name: Setup Dokuwiki Container
|
- name: Setup Dokuwiki Container
|
||||||
docker_container:
|
docker_container:
|
||||||
|
@ -161,8 +155,6 @@
|
||||||
- "{{ data_dir }}/dokuwiki:/bitnami/dokuwiki:rw"
|
- "{{ data_dir }}/dokuwiki:/bitnami/dokuwiki:rw"
|
||||||
env:
|
env:
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
tags:
|
|
||||||
- dokuwiki
|
|
||||||
|
|
||||||
- name: Setup proxy site for Dokuwiki
|
- name: Setup proxy site for Dokuwiki
|
||||||
include_role:
|
include_role:
|
||||||
|
@ -170,14 +162,12 @@
|
||||||
vars:
|
vars:
|
||||||
site_name: "{{ dokuwiki_domain }}"
|
site_name: "{{ dokuwiki_domain }}"
|
||||||
proxy_port: "{{ dokuwiki_port }}"
|
proxy_port: "{{ dokuwiki_port }}"
|
||||||
tags:
|
|
||||||
- dokuwiki
|
|
||||||
|
|
||||||
|
|
||||||
- name: Setup container for secondary FFMD DNS
|
- name: Setup container for secondary FFMD DNS
|
||||||
docker_container:
|
docker_container:
|
||||||
name: bind9-md-freifunk-net
|
name: bind9-md-freifunk-net
|
||||||
image: ffmd/bind9-md-freifunk-net:v2022122301
|
image: "ffmd/bind9-md-freifunk-net:2022111601"
|
||||||
pull: true
|
pull: true
|
||||||
state: started
|
state: started
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
|
@ -186,8 +176,6 @@
|
||||||
- "53:53/udp"
|
- "53:53/udp"
|
||||||
env:
|
env:
|
||||||
TZ: "{{ timezone }}"
|
TZ: "{{ timezone }}"
|
||||||
tags:
|
|
||||||
- ffmd-dns
|
|
||||||
|
|
||||||
- name: Setup forwarding for Discord invite
|
- name: Setup forwarding for Discord invite
|
||||||
include_role:
|
include_role:
|
||||||
|
@ -196,3 +184,4 @@
|
||||||
site_name: "{{ discord_invite_domain }}"
|
site_name: "{{ discord_invite_domain }}"
|
||||||
# forward_to: "https://discord.com/invite/8FcDvAf"
|
# forward_to: "https://discord.com/invite/8FcDvAf"
|
||||||
forward_to: "https://sl.n39.eu/discord"
|
forward_to: "https://sl.n39.eu/discord"
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
- name: Setup the docker container for unifi-controller
|
- name: Setup the docker container for unifi-controller
|
||||||
docker_container:
|
docker_container:
|
||||||
name: unifi-controller
|
name: unifi-controller
|
||||||
image: jacobalberty/unifi:v7.5.176
|
image: "jacobalberty/unifi:v7.1.65"
|
||||||
state: started
|
state: started
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
container_default_behavior: no_defaults
|
container_default_behavior: no_defaults
|
||||||
|
|
310
host_vars/cisco-2960-1.n39.eu/port-assignments.yml
Normal file
310
host_vars/cisco-2960-1.n39.eu/port-assignments.yml
Normal file
|
@ -0,0 +1,310 @@
|
||||||
|
---
|
||||||
|
ios_interfaces:
|
||||||
|
GigabitEthernet1/0/1:
|
||||||
|
description: MGMT
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 1
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/2:
|
||||||
|
description: MGMT
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 1
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/3:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/4:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/5:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/6:
|
||||||
|
description: USV
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 1
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/7:
|
||||||
|
description: beaker
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: all
|
||||||
|
trunk_native_vlan: 1
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/8:
|
||||||
|
description: beaker
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: all
|
||||||
|
trunk_native_vlan: 1
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/9:
|
||||||
|
description: beaker ipmi
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 1
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/10:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/11:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/12:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/13:
|
||||||
|
description: patchfeld 1 - Switch an Ausleihliste
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: 1,4,5,7,8,11
|
||||||
|
trunk_native_vlan: 4
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/14:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/15:
|
||||||
|
description: patchfeld 2 - Raspberry Pi Platon
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/16:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/17:
|
||||||
|
description: patchfeld 6 - Access Point Hempels Zimmer
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: 1,4,5,7,8,11
|
||||||
|
trunk_native_vlan: 4
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/18:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/19:
|
||||||
|
description: FräsPC
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/20:
|
||||||
|
description: patchfeld 4 - Switch am Basteltisch
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: 1,4,5,7,8,11
|
||||||
|
trunk_native_vlan: 4
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/21:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/22:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/23:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/24:
|
||||||
|
description: lan
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: access
|
||||||
|
access_vlan: 4
|
||||||
|
trunk_allowed_vlans:
|
||||||
|
trunk_native_vlan:
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/25:
|
||||||
|
description: uplink von descartes
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: all
|
||||||
|
trunk_native_vlan: 1
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/26:
|
||||||
|
description: marx
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: all
|
||||||
|
trunk_native_vlan: 1
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/27:
|
||||||
|
description: unconfigured
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: all
|
||||||
|
trunk_native_vlan: 1
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
||||||
|
GigabitEthernet1/0/28:
|
||||||
|
description: unconfigured
|
||||||
|
enabled: true
|
||||||
|
l2:
|
||||||
|
mode: trunk
|
||||||
|
access_vlan:
|
||||||
|
trunk_allowed_vlans: all
|
||||||
|
trunk_native_vlan: 1
|
||||||
|
state: present
|
||||||
|
lines: []
|
||||||
|
state: present
|
7
host_vars/cisco-2960-1.n39.eu/vars.yml
Normal file
7
host_vars/cisco-2960-1.n39.eu/vars.yml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
ansible_host: 172.23.63.45
|
||||||
|
ansible_user: admin
|
||||||
|
ansible_password: "{{ vault_ansible_password }}"
|
||||||
|
ansible_become_password: "{{ vault_ansible_password }}"
|
||||||
|
ansible_connection: network_cli
|
||||||
|
ansible_network_os: ios
|
8
host_vars/cisco-2960-1.n39.eu/vault
Normal file
8
host_vars/cisco-2960-1.n39.eu/vault
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
|
64656665316637386363313263653532393161656531336262613266363231373537396633386231
|
||||||
|
3834356536353235356630333766396233626130653237380a396137336262376539373838623762
|
||||||
|
35396361643263313239386637653330646134616333333961306537306464626461626462626665
|
||||||
|
3763386531316433390a323533353531363335306663343632326562663334303466663664363530
|
||||||
|
38613135333336656131646534633839386330323164643338333763373839306566656633633161
|
||||||
|
62643964343763316264376366636562316336616665663865336633373266353931366336346666
|
||||||
|
616135333836343436633136636163656138
|
23
host_vars/cisco-2960-1.n39.eu/vlans.yml
Normal file
23
host_vars/cisco-2960-1.n39.eu/vlans.yml
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
---
|
||||||
|
ios_vlans:
|
||||||
|
1:
|
||||||
|
name: MGMT
|
||||||
|
state: present
|
||||||
|
4:
|
||||||
|
name: lan
|
||||||
|
state: present
|
||||||
|
5:
|
||||||
|
name: wan
|
||||||
|
state: present
|
||||||
|
7:
|
||||||
|
name: service
|
||||||
|
state: present
|
||||||
|
8:
|
||||||
|
name: legacy
|
||||||
|
state: present
|
||||||
|
9:
|
||||||
|
name: dmz
|
||||||
|
state: present
|
||||||
|
11:
|
||||||
|
name: ffmd-client
|
||||||
|
state: present
|
|
@ -1,2 +0,0 @@
|
||||||
---
|
|
||||||
mac: "32:A3:94:A0:23:77"
|
|
|
@ -9,57 +9,38 @@ cleanuri_amqp_user: "cleanuri"
|
||||||
cleanuri_amqp_pass: "{{ vault_cleanuri_amqp_pass }}"
|
cleanuri_amqp_pass: "{{ vault_cleanuri_amqp_pass }}"
|
||||||
cleanuri_amqp_vhost: "/cleanuri"
|
cleanuri_amqp_vhost: "/cleanuri"
|
||||||
|
|
||||||
forgejo_host_port: 9091
|
|
||||||
forgejo_ssh_port: 2222
|
|
||||||
forgejo_domain_name: git.n39.eu
|
|
||||||
forgejo_image: codeberg.org/forgejo/forgejo:1.20
|
|
||||||
|
|
||||||
shlink_host_port: 8083
|
shlink_host_port: 8083
|
||||||
shlink_domain_name: sl.n39.eu
|
shlink_domain_name: sl.n39.eu
|
||||||
shlink_image: shlinkio/shlink:3.6.4
|
|
||||||
shlink_geolite_license_key: "{{ vault_shlink_geolite_license_key }}"
|
shlink_geolite_license_key: "{{ vault_shlink_geolite_license_key }}"
|
||||||
shlink_initial_api_key: "{{ vault_shlink_initial_api_key }}"
|
|
||||||
|
|
||||||
hedgedoc_host_port: 8084
|
hedgedoc_host_port: 8084
|
||||||
hedgedoc_domain_name: pad.n39.eu
|
hedgedoc_image: quay.io/hedgedoc/hedgedoc:1.9.3
|
||||||
hedgedoc_image: quay.io/hedgedoc/hedgedoc:1.9.9
|
|
||||||
hedgedoc_db_image: postgres:16.0-alpine
|
|
||||||
hedgedoc_postgres_password: "{{ vault_hedgedoc_postgres_password }}"
|
hedgedoc_postgres_password: "{{ vault_hedgedoc_postgres_password }}"
|
||||||
|
|
||||||
redmine_host_port: 8087
|
redmine_host_port: 8087
|
||||||
redmine_domain_name: redmine.n39.eu
|
redmine_image: redmine:4.2.7
|
||||||
redmine_image: redmine:5.0.6
|
redmine_mysql_image: mysql:5.7
|
||||||
redmine_mysql_image: mysql:8.2
|
|
||||||
redmine_database: redmine
|
redmine_database: redmine
|
||||||
redmine_database_password: "{{ vault_redmine_database_password }}"
|
redmine_database_password: "{{ vault_redmine_database_password }}"
|
||||||
|
|
||||||
influxdb_host_port: 8088
|
influxdb_host_port: 8088
|
||||||
influxdb_domain_name: influx.n39.eu
|
influxdb_domain_name: influx.n39.eu
|
||||||
influxdb_image: influxdb:2.7-alpine
|
influxdb_image: influxdb:2.4-alpine
|
||||||
influxdb_init_username: admin
|
influxdb_init_username: admin
|
||||||
influxdb_init_password: "{{ vault_influxdb_init_password }}"
|
influxdb_init_password: "{{ vault_influxdb_init_password }}"
|
||||||
|
|
||||||
jabber_host_port: 8086
|
forgejo_host_port: 9091
|
||||||
prosody_domain_name: jabber.n39.eu
|
|
||||||
prosody_image: netz39/prosody:0.11
|
|
||||||
prosody_web_image: joseluisq/static-web-server:2.23
|
|
||||||
prosody_config_dir: "/etc/prosody"
|
prosody_config_dir: "/etc/prosody"
|
||||||
prosody_data_dir: "{{ data_dir }}/prosody"
|
prosody_data_dir: "{{ data_dir }}/prosody"
|
||||||
|
prosody_domain_name: jabber.n39.eu
|
||||||
|
jabber_host_port: 8086
|
||||||
|
|
||||||
uptimekuma_host_port: 8085
|
uptimekuma_host_port: 8085
|
||||||
uptimekuma_domain_name: uptime.n39.eu
|
|
||||||
uptimekuma_image: louislam/uptime-kuma:1.23.3
|
|
||||||
|
|
||||||
grafana_host_port: 8089
|
grafana_host_port: 8089
|
||||||
grafana_domain_name: grafana.n39.eu
|
grafana_domain_name: grafana.n39.eu
|
||||||
grafana_image: grafana/grafana:10.2.0
|
|
||||||
grafana_admin_password: "{{ vault_grafana_admin_password }}"
|
grafana_admin_password: "{{ vault_grafana_admin_password }}"
|
||||||
|
|
||||||
homebox_host_port: 8092
|
homebox_host_port: 8092
|
||||||
homebox_domain_name: inventory.n39.eu
|
homebox_domain_name: inventory.n39.eu
|
||||||
homebox_image: ghcr.io/hay-kot/homebox:v0.10.1
|
|
||||||
|
|
||||||
renovate_image: renovate/renovate:37.36.2
|
|
||||||
renovate_forgejo_pat: "{{ vault_renovate_forgejo_pat }}"
|
|
||||||
renovate_github_pat: "{{ vault_renovate_github_pat }}"
|
|
||||||
renovate_git_user: "Renovate Bot <accounts+renovatebot@netz39.de>"
|
|
|
@ -1,33 +1,20 @@
|
||||||
$ANSIBLE_VAULT;1.1;AES256
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
34373163393834623634633434373737303738383439616433633236363937633764666231373964
|
35323634353263613464653863633861303539636238333464653633653164353632306233626231
|
||||||
6330396333366432343765313738396461333133656236350a356239656636363764303464323737
|
3766386232326433383932636136313831346131336335360a383639643334613033336134373566
|
||||||
32353330336364663833386631303364323861326664633937383132373735373337393431646633
|
36343465336365363732363931383031356532336235313537396338316465366537313032616261
|
||||||
3763323461363561390a393034333364663633396462646632393362613733363239633061616139
|
6634393966623662390a383335316661613332633165333936396335666637306163363133323363
|
||||||
30383461363565346464306263613363396237373233346361333137626539613463336665343134
|
33613639306537396136643438623937363336376332353634333130313434623433303264393461
|
||||||
66623461623634386134636334386561356230373138643039653335353866356630646536376438
|
38663337396465343937396438333261393463303866306234323138396563653837373334356239
|
||||||
33643232313037356261303034383232623863323030616134386530646366636230616432316639
|
64653231633066656662306530656139316530316263356135363538303061646432353338323237
|
||||||
33346637316435353365343434613230353934613262653964643563303938326433373938636539
|
66663161333133313762366261343434666238376537636433313461343065646565633130333061
|
||||||
30643633636134666135393166313334353336343132346564313637333837323036623331326134
|
33376537316338666662643639623637396366336263656537326363663936616234343235373636
|
||||||
32643166663165343739663936356432633739303265333536336365646435313162623638353434
|
33373039373033333533363366356435633863633434643963633664613238363961363733366437
|
||||||
30613335306134623238666261666537366631633133663162346464396138383139613331346436
|
61353936613065303230616239646334313130636133653461663561303037383663643761376235
|
||||||
34363232356163373534393062303137663732366237313732323465613463323837386561666164
|
33303661663063613635306438613738613064386466656430343130356131663262353239326334
|
||||||
37346535393263343661303436353739323364323663626135373330303038383135653564623734
|
63323630333466356263646162336437646133616565353430313737313332363330663236383830
|
||||||
62383337666362653633323130613264303133653731643965666461383030656562373832333731
|
33366138303665386561393136616238346335633163313330386434323239623736333562363862
|
||||||
36333632353763363931326235613164646364306162643533353038613239386632336662346532
|
66636165373264353132626232633537613536303362366535653438303261323735666331363439
|
||||||
38626135323233386462646533633536396235396465643635383834306631613234646336656134
|
61613939373333616364303134353437333965386239623933393932373434666234373736316166
|
||||||
64616530346466666231623432323462623438643333373838396666356465386230383737663731
|
63373935356162326230653437643030313262373965353831396361646136663938336334646633
|
||||||
35356262613064366433363263373033636632363135386531616636313337663536643437623061
|
65313166613131396665393363633166663137363564393063363330366364373936643831373030
|
||||||
66353333376530663765376432323933363730646237646535663533346333633263346563343337
|
333465303435636163616236306264646666
|
||||||
61313461323966393536303961623037353432326632336132306134343332663462666230353732
|
|
||||||
65613832633134386266373337396439333637343139626136636237383632366232363837383539
|
|
||||||
37353965323061336365653366343064653739346363623662373734323065323162353035363938
|
|
||||||
62326664306232356134306563646338666635333531663338396239636539356664333937663636
|
|
||||||
36303032333962653335326261366362373035626463303933393666633832393762326435386361
|
|
||||||
36353065363762363638303833616133663330393532313233306135653034656562626435633834
|
|
||||||
62383833306633346662383439313037633763353737333234373234303962666262316638326461
|
|
||||||
62373765643432663134643561623261326265306437306439353966336364373931376261333963
|
|
||||||
65353938376463313463623037303566366435323938326633353334323731333134353137356165
|
|
||||||
39346563303536356565663333393061613231653565646435373839626235633032316333646566
|
|
||||||
37303232396139653531633836663461623464316332666632363435313566326262666562626130
|
|
||||||
63613937323335336630383261363334396366623161343730623662363533323430326334353063
|
|
||||||
31343938346434613765
|
|
||||||
|
|
|
@ -3,5 +3,5 @@ server_admin: "admin+radon@netz39.de"
|
||||||
pwr_meter_amqp_user: "pwr-meter"
|
pwr_meter_amqp_user: "pwr-meter"
|
||||||
pwr_meter_amqp_pass: "{{ vault_pwr_meter_amqp_pass }}"
|
pwr_meter_amqp_pass: "{{ vault_pwr_meter_amqp_pass }}"
|
||||||
pwr_meter_api_token: "{{ vault_pwr_meter_api_token }}"
|
pwr_meter_api_token: "{{ vault_pwr_meter_api_token }}"
|
||||||
# See https://git.n39.eu/Netz39_Admin/config.descartes/src/branch/live/dns_dhcp.txt
|
# See https://gitea.n39.eu/Netz39_Admin/config.descartes/src/branch/live/dns_dhcp.txt
|
||||||
brotherql_printer_ip: "172.23.48.53"
|
brotherql_printer_ip: "172.23.48.53"
|
||||||
|
|
|
@ -8,7 +8,6 @@ all:
|
||||||
krypton.n39.eu:
|
krypton.n39.eu:
|
||||||
oganesson.n39.eu:
|
oganesson.n39.eu:
|
||||||
platon.n39.eu:
|
platon.n39.eu:
|
||||||
plumbum.n39.eu:
|
|
||||||
pottwal.n39.eu:
|
pottwal.n39.eu:
|
||||||
radon.n39.eu:
|
radon.n39.eu:
|
||||||
unicorn.n39.eu:
|
unicorn.n39.eu:
|
||||||
|
@ -18,6 +17,7 @@ all:
|
||||||
k3s-w1.n39.eu:
|
k3s-w1.n39.eu:
|
||||||
k3s-w2.n39.eu:
|
k3s-w2.n39.eu:
|
||||||
k3s-w3.n39.eu:
|
k3s-w3.n39.eu:
|
||||||
|
cisco-2960-1.n39.eu:
|
||||||
|
|
||||||
children:
|
children:
|
||||||
docker_host:
|
docker_host:
|
||||||
|
@ -32,7 +32,6 @@ all:
|
||||||
holmium.n39.eu:
|
holmium.n39.eu:
|
||||||
krypton.n39.eu:
|
krypton.n39.eu:
|
||||||
oganesson.n39.eu:
|
oganesson.n39.eu:
|
||||||
plumbum.n39.eu:
|
|
||||||
pottwal.n39.eu:
|
pottwal.n39.eu:
|
||||||
radon.n39.eu:
|
radon.n39.eu:
|
||||||
unicorn.n39.eu:
|
unicorn.n39.eu:
|
||||||
|
@ -50,7 +49,6 @@ all:
|
||||||
krypton.n39.eu:
|
krypton.n39.eu:
|
||||||
oganesson.n39.eu:
|
oganesson.n39.eu:
|
||||||
platon.n39.eu:
|
platon.n39.eu:
|
||||||
plumbum.n39.eu:
|
|
||||||
pottwal.n39.eu:
|
pottwal.n39.eu:
|
||||||
radon.n39.eu:
|
radon.n39.eu:
|
||||||
wittgenstein.n39.eu:
|
wittgenstein.n39.eu:
|
||||||
|
@ -72,3 +70,6 @@ all:
|
||||||
k3s-w1.n39.eu:
|
k3s-w1.n39.eu:
|
||||||
k3s-w2.n39.eu:
|
k3s-w2.n39.eu:
|
||||||
k3s-w3.n39.eu:
|
k3s-w3.n39.eu:
|
||||||
|
network:
|
||||||
|
hosts:
|
||||||
|
cisco-2960-1.n39.eu:
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
|
||||||
"regexManagers": [
|
|
||||||
{
|
|
||||||
"fileMatch": [
|
|
||||||
"^host-.*.yml$",
|
|
||||||
"^host_vars/.*/vars.yml$"
|
|
||||||
],
|
|
||||||
"datasourceTemplate": "docker",
|
|
||||||
"versioningTemplate": "docker",
|
|
||||||
"matchStrings": [
|
|
||||||
"image: (?<depName>.*?):(?<currentValue>.*?)(@(?<currentDigest>sha256:.*?))?\\s"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"packageRules": [
|
|
||||||
{
|
|
||||||
"matchPackageNames": ["renovate/renovate"],
|
|
||||||
"schedule": [ "on friday" ]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
roles:
|
roles:
|
||||||
- src: hifis.unattended_upgrades
|
- src: hifis.unattended_upgrades
|
||||||
version: v3.2.0
|
version: v1.12.2
|
||||||
- src: git+https://github.com/adriagalin/ansible.timezone.git
|
- src: git+https://github.com/adriagalin/ansible.timezone.git
|
||||||
version: 4.0.0
|
version: 3.0.0
|
||||||
- src: git+https://github.com/24367dfa/ansible-role-dehydrated.git
|
- src: git+https://github.com/24367dfa/ansible-role-dehydrated.git
|
||||||
version: 1.0.3
|
version: 1.0.3
|
||||||
- src: penguineer.dehydrated_cron
|
- src: penguineer.dehydrated_cron
|
||||||
|
@ -15,4 +15,4 @@ roles:
|
||||||
|
|
||||||
collections:
|
collections:
|
||||||
- name: community.grafana
|
- name: community.grafana
|
||||||
version: 1.5.4
|
version: 1.5.3
|
||||||
|
|
|
@ -20,6 +20,6 @@ cleanuri_amqp_retrieval: "extractor"
|
||||||
|
|
||||||
# Docker images
|
# Docker images
|
||||||
cleanuri_image_webui: mrtux/cleanuri-webui:0.2.0
|
cleanuri_image_webui: mrtux/cleanuri-webui:0.2.0
|
||||||
cleanuri_image_apigateway: mrtux/cleanuri-apigateway:0.3.1
|
cleanuri_image_apigateway: mrtux/cleanuri-apigateway:0.3.0
|
||||||
cleanuri_image_canonizer: mrtux/cleanuri-canonizer:0.3.1
|
cleanuri_image_canonizer: mrtux/cleanuri-canonizer:0.3.0
|
||||||
cleanuri_image_extractor: mrtux/cleanuri-extractor:0.3.0
|
cleanuri_image_extractor: mrtux/cleanuri-extractor:0.3.0
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
docker run --rm \
|
|
||||||
-e RENOVATE_TOKEN={{ renovate_forgejo_pat }} \
|
|
||||||
-e RENOVATE_ENDPOINT=https://{{ forgejo_domain_name }}/api/v1 \
|
|
||||||
-e RENOVATE_PLATFORM=gitea \
|
|
||||||
-e RENOVATE_GIT_AUTHOR={{ renovate_git_user | quote }} \
|
|
||||||
-e GITHUB_COM_TOKEN={{ renovate_github_pat }} \
|
|
||||||
{{ renovate_image }} --autodiscover
|
|
Loading…
Reference in a new issue