forked from Netz39_Admin/netz39-infra-ansible
Compare commits
No commits in common. "cf9bea015bc5ae436551d321df13f500f749d18e" and "master" have entirely different histories.
cf9bea015b
...
master
9 changed files with 4 additions and 162 deletions
|
@ -1,13 +0,0 @@
|
|||
# EditorConfig is awesome: https://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.yml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
|
@ -3,7 +3,6 @@
|
|||
Currently including the following hosts:
|
||||
* tau.netz39.de
|
||||
* pottwal.n39.eu
|
||||
* unicorn.n39.eu
|
||||
|
||||
## Setup
|
||||
```bash
|
||||
|
|
|
@ -76,8 +76,4 @@ all:
|
|||
31356239323439666632333033616366663762646366343839663463633665333863343437343334
|
||||
3665386538393066380a383366346235343531306561643534663035646537666534383536333230
|
||||
34613761363237633865306332653631323366343232353666343165666664343838
|
||||
unicorn.n39.eu:
|
||||
server_admin: "admin+unicorn@netz39.de"
|
||||
platon.n39.eu:
|
||||
server_admin: "admin+platon@netz39.de"
|
||||
ansible_ssh_user: pi
|
||||
|
||||
|
|
6
main.yml
6
main.yml
|
@ -39,9 +39,3 @@
|
|||
|
||||
- name: Pottwal specific setup
|
||||
import_playbook: pottwal.yml
|
||||
|
||||
- name: Specific setup for host unicorn
|
||||
import_playbook: unicorn.yml
|
||||
|
||||
- name: Platon specific setup
|
||||
import_playbook: platon.yml
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
- hosts: platon.n39.eu
|
||||
become: true
|
||||
vars:
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
door_open_command: '/home/pi/sesame-open.sh'
|
||||
ble_keykeeper_dir: '/home/pi/netz39_ble_keykeeper'
|
||||
roles:
|
||||
- role: ble-keykeeper-role
|
52
pottwal.yml
52
pottwal.yml
|
@ -8,13 +8,12 @@
|
|||
mosquitto_image: eclipse-mosquitto:1.6
|
||||
mosquitto_data: /srv/data/mosquitto
|
||||
|
||||
openhab_image: openhab/openhab:2.5.11
|
||||
openhab_image: openhab/openhab:3.1.0
|
||||
openhab_data: /srv/data/openhab
|
||||
openhab_host_port: 8081
|
||||
openhab_configuration_source: https://github.com/netz39/n39-openhab.git
|
||||
openhab_configuration_version: master
|
||||
|
||||
gitea_host_port: 9091
|
||||
uritools_host_port: 8080
|
||||
entities_validation_svc_host_port: 8082
|
||||
shlink_host_port: 8083
|
||||
|
@ -27,50 +26,6 @@
|
|||
- role: apache
|
||||
|
||||
tasks:
|
||||
|
||||
- name: Check if gitea data dir exists
|
||||
ansible.builtin.stat:
|
||||
path: "/srv/data/gitea"
|
||||
register: gitea_dir
|
||||
- name: Fail if gitea data dir does not exist
|
||||
ansible.builtin.fail:
|
||||
msg: "Gitea data dir is missing, please restore from the backup!"
|
||||
when: not gitea_dir.stat.exists
|
||||
|
||||
# If port 2222 is changed here, it must also be adapted
|
||||
# in the gitea config file (see application volume)!!
|
||||
- name: Setup the docker container for gitea
|
||||
docker_container:
|
||||
name: gitea
|
||||
image: "gitea/gitea:1.15.10"
|
||||
pull: true
|
||||
state: started
|
||||
restart_policy: unless-stopped
|
||||
detach: yes
|
||||
ports:
|
||||
# - 127.0.0.1:{{ gitea_host_port }}:3000
|
||||
- "{{ gitea_host_port }}:3000"
|
||||
- 2222:2222
|
||||
env:
|
||||
APP_NAME="Netz39 Gitea"
|
||||
RUN_MODE="prod"
|
||||
SSH_DOMAIN="gitea.n39.eu"
|
||||
SSH_PORT="2222"
|
||||
SSH_START_SERVER="false"
|
||||
ROOT_URL="https://gitea.n39.eu"
|
||||
DISABLE_REGISTRATION="true"
|
||||
USER_UID=1000
|
||||
USER_GID=1000
|
||||
volumes:
|
||||
- "/srv/data/gitea:/data:rw"
|
||||
|
||||
- name: Setup proxy site gitea.n39.eu
|
||||
include_role:
|
||||
name: setup-http-site-proxy
|
||||
vars:
|
||||
site_name: "gitea.n39.eu"
|
||||
proxy_port: "{{ gitea_host_port }}"
|
||||
|
||||
- name: Ensure apt-cacher container is running
|
||||
docker_container:
|
||||
name: apt_cacher_ng
|
||||
|
@ -82,7 +37,6 @@
|
|||
ports:
|
||||
- 3142:3142
|
||||
|
||||
|
||||
- name: Ensure the mosquitto directories exist
|
||||
file:
|
||||
path: "{{ item }}"
|
||||
|
@ -216,8 +170,8 @@
|
|||
name: setup-http-site-proxy
|
||||
vars:
|
||||
site_name: "{{ shlink_domain_name }}"
|
||||
proxy_port: "{{ shlink_host_port }}"
|
||||
|
||||
proxy_port: "{{ shlink_host_port }}"
|
||||
|
||||
handlers:
|
||||
- name: restart mosquitto
|
||||
docker_container:
|
||||
|
|
|
@ -5,5 +5,3 @@
|
|||
version: 3.0.0
|
||||
- src: git+https://github.com/24367dfa/ansible-role-dehydrated.git
|
||||
version: 1.0.2
|
||||
- src: git+https://github.com/maz3max/ble-keykeeper-role.git
|
||||
version: v1.0.2
|
||||
|
|
45
tau.yml
45
tau.yml
|
@ -5,9 +5,6 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
|
||||
docker_registry_port: 5000 # this is the reg standard port
|
||||
docker_registry_domain: "docker.n39.eu"
|
||||
|
||||
roles:
|
||||
- role: docker_setup
|
||||
- role: apache
|
||||
|
@ -34,8 +31,6 @@
|
|||
deploy_challenge_hook: "/bin/systemctl restart apache2"
|
||||
- name: "mysql.adm.netz39.de"
|
||||
deploy_challenge_hook: "/bin/systemctl restart apache2"
|
||||
- name: "{{ docker_registry_domain }}"
|
||||
deploy_challenge_hook: "/bin/systemctl restart apache2"
|
||||
|
||||
- name: Setup forward site reservierung.netz39.de
|
||||
include_role:
|
||||
|
@ -73,43 +68,3 @@
|
|||
vars:
|
||||
site_name: mysql.adm.netz39.de
|
||||
proxy_port: 9001
|
||||
|
||||
|
||||
- name: Check if Docker Registry auth dir exists
|
||||
ansible.builtin.stat:
|
||||
path: "/srv/docker/registry/auth"
|
||||
register: docker_dir
|
||||
- name: Fail if docker registry data dir does not exist
|
||||
ansible.builtin.fail:
|
||||
msg: "Docker Registry auth dir is missing, please restore from the backup!"
|
||||
when: not docker_dir.stat.exists
|
||||
- name: Ensure the Docker Registry data directory exists
|
||||
# This may not be part of the backup
|
||||
file:
|
||||
path: "/srv/docker/registry/data"
|
||||
state: directory
|
||||
|
||||
- name: Setup Docker Registry Container
|
||||
docker_container:
|
||||
name: registry
|
||||
image: "registry:2"
|
||||
pull: true
|
||||
state: started
|
||||
restart_policy: unless-stopped
|
||||
detach: yes
|
||||
ports:
|
||||
- 127.0.0.1:{{ docker_registry_port }}:{{ docker_registry_port }}
|
||||
env:
|
||||
REGISTRY_HTTP_HOST: "https://{{ docker_registry_domain }}"
|
||||
REGISTRY_AUTH_HTPASSWD_REALM: "Netz39 Docker Registry"
|
||||
REGISTRY_AUTH_HTPASSWD_PATH: "/auth/htpasswd"
|
||||
volumes:
|
||||
- "/srv/docker/registry/data:/var/lib/registry:rw"
|
||||
- "/srv/docker/registry/auth:/auth:rw"
|
||||
|
||||
- name: Setup proxy site for the Docker Registry
|
||||
include_role:
|
||||
name: setup-http-site-proxy
|
||||
vars:
|
||||
site_name: "{{ docker_registry_domain }}"
|
||||
proxy_port: "{{ docker_registry_port }}"
|
||||
|
|
32
unicorn.yml
32
unicorn.yml
|
@ -1,32 +0,0 @@
|
|||
---
|
||||
# this is for a dedicated vm just hosting the unifi controller.
|
||||
- hosts: unicorn.n39.eu
|
||||
become: true
|
||||
vars:
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
roles:
|
||||
- role: docker_setup
|
||||
vars:
|
||||
docker_data_root: "/srv/docker"
|
||||
tasks:
|
||||
- name: Setup the docker container for unifi-controller
|
||||
docker_container:
|
||||
name: unifi-controller
|
||||
image: "jacobalberty/unifi:v6.5.55"
|
||||
state: started
|
||||
restart_policy: unless-stopped
|
||||
container_default_behavior: no_defaults
|
||||
env:
|
||||
TZ: "Europe/Berlin"
|
||||
# These fixed ports are needed.
|
||||
# https://help.ui.com/hc/en-us/articles/218506997-UniFi-Ports-Used
|
||||
ports:
|
||||
- "8080:8080/tcp" # Device command/control
|
||||
- "8443:8443/tcp" # Web interface + API
|
||||
- "8843:8843/tcp" # HTTPS portal
|
||||
- "8880:8880/tcp" # HTTP portal
|
||||
- "3478:3478/udp" # STUN service
|
||||
- "6789:6789/tcp" # Speed Test (unifi5 only)
|
||||
volumes:
|
||||
- "/srv/data/unifi-controller/data:/unifi/data"
|
||||
- "/srv/data/unifi-controller/log:/unifi/log"
|
Loading…
Reference in a new issue