Compare commits
233 Commits
424fe250d8
...
main
Author | SHA1 | Date | |
---|---|---|---|
230d830612 | |||
f843c7eaa3 | |||
076757e1f8 | |||
0b1c18a3a0 | |||
f47ad625da | |||
2b56d30666 | |||
c12dfc18ce | |||
814a642cc0 | |||
8cad395e34 | |||
eb360951a1 | |||
9601aa4937 | |||
81319370b1 | |||
76f6f78112 | |||
9b0edab903 | |||
c377f1a7d1 | |||
7f5a35d936 | |||
7b9f0e0ca5 | |||
a490e4ad92 | |||
6734d78bef | |||
6722ab4138 | |||
e76d1a1f88 | |||
c090cc9cbe | |||
8ab3783a2b | |||
cdf20ba9ef | |||
f0b3388e8d | |||
27e2fc6058 | |||
b622bb29df | |||
bde6a5f208 | |||
85d6fe5056 | |||
098f63fa5b | |||
43fc89a966 | |||
7aa2992228 | |||
1775e24a45 | |||
d6983b4744 | |||
29cb12a2d1 | |||
9464737fe9 | |||
14fc10a10a | |||
fe38bebbd5 | |||
bad78681c6 | |||
c8ab4633ca | |||
627343b50f | |||
2d31a5524f | |||
2981bdb22f | |||
84930795b6 | |||
f068c9710b | |||
afc0b57cfb | |||
7df41b5c8d | |||
2cc78654fe | |||
a6eb508cf0 | |||
85330c8645 | |||
c05f3a845b | |||
3d9241b475 | |||
cb4abe5722 | |||
d0c1bb8717 | |||
5b83607fe0 | |||
43dbb951fe | |||
f68c6b227a | |||
8d049f3056 | |||
a0997ee8ec | |||
b8c2dae1fa | |||
c2f7590b44 | |||
d8db6ba755 | |||
d6882bd306 | |||
f78dd67cd5 | |||
2aa50b4015 | |||
4dfe68a54b | |||
bdf04302aa | |||
39cb2b0007 | |||
f10ce63569 | |||
52c455d490 | |||
c6755e8d97 | |||
ba7cda511e | |||
7eddbba696 | |||
92df824252 | |||
9e07845208 | |||
8d71ff222a | |||
117b36842c | |||
2b2486f2fb | |||
2e38d3d07f | |||
af13cfbb41 | |||
cd19a7687c | |||
0923148d8e | |||
dda7bc7a10 | |||
9c477f2094 | |||
e1349b2b90 | |||
cffbcaea8c | |||
e1fb6b94ee | |||
a2ec933cf8 | |||
24f3a7c485 | |||
f00093ef8e | |||
32ba17ea33 | |||
8f22f5429a | |||
2769a3430b | |||
84a20416e3 | |||
621d9595f8 | |||
5f1e304301 | |||
df3587032d | |||
5007f0b68e | |||
590a50cd1a | |||
1df2adffdb | |||
ddfccdfe96 | |||
0c091aba7e | |||
ef418f2839 | |||
ff0d769aa5 | |||
e4c5846353 | |||
afedcf16d5 | |||
ff8e0581ec | |||
ce3af85e73 | |||
3fa49df87f | |||
11115d515e | |||
657ae3fa91 | |||
14a126afa0 | |||
5eb52e7adb | |||
a86cb26010 | |||
a985895225 | |||
b352a796e0 | |||
0a6a3e5371 | |||
ce9f7891fa | |||
9fd4c6f001 | |||
4ff4a3198e | |||
970576cbb9 | |||
77cfa06ab1 | |||
74ba8cad51 | |||
5111482194 | |||
1f7bc2cfb6 | |||
8d7d005431 | |||
678da5e314 | |||
5c72b57d9c | |||
6b7e1182b0 | |||
4a7a11aad7 | |||
bcae7303ab | |||
f230db3739 | |||
3e70f70fa3 | |||
e5fdf35669 | |||
47266ef6b7 | |||
a62240b02a | |||
5bd82a4d33 | |||
d9a28d21a8 | |||
4d4977b164 | |||
73982fd7b7 | |||
75e78ea7c8 | |||
09a2662c50 | |||
7608d86cde | |||
7710d6b097 | |||
a8edbf45d6 | |||
9fb0635789 | |||
cce06a515b | |||
028fe4b8c4 | |||
a685f44139 | |||
d663591e2a | |||
a046ade37b | |||
9fb6bcfab8 | |||
ecdaad56af | |||
6e2a91013a | |||
103caae226 | |||
a61705b9a3 | |||
92ac1dd6c1 | |||
b98e368259 | |||
a07565128e | |||
29e722d1b5 | |||
6020b9771c | |||
a60c397d1b | |||
37da64cacf | |||
7662f4a11f | |||
4abb4a929f | |||
a7f197eda2 | |||
c8abd13f86 | |||
263907032b | |||
c9d7c99f77 | |||
13148f3e69 | |||
2093fca5d2 | |||
50c451e94d | |||
73c9106c8f | |||
a69ac7f0cd | |||
83346eb842 | |||
7e3fefd220 | |||
c0553c7d22 | |||
0a969fddb4 | |||
07cbae13f7 | |||
0ada17f736 | |||
f648ce0448 | |||
595c2f177e | |||
300ca326ee | |||
8cbfb1a2c6 | |||
c6eb329c73 | |||
7ce5e21906 | |||
239ababf1d | |||
03700ba0fb | |||
ba5672b72a | |||
d25e57ee9f | |||
c84eb30443 | |||
8daf2d1ff8 | |||
09d6c93224 | |||
8aa59da325 | |||
686dde0156 | |||
d1508efd3f | |||
fb6a488dac | |||
31157afa57 | |||
020efcc3bb | |||
ae8633767a | |||
2cfb3b180f | |||
dac4962ace | |||
92a04af990 | |||
08d55c4f22 | |||
ea117c30f8 | |||
e7870f6ef6 | |||
1166e932c5 | |||
140749e34c | |||
21679647c6 | |||
63116af524 | |||
450d60cd46 | |||
06ea234395 | |||
b8fb28d748 | |||
26abd5af81 | |||
ccad767c43 | |||
f397225862 | |||
0a49861be6 | |||
c89c678ea0 | |||
9cdb8b12b7 | |||
7ff7a36eb3 | |||
acffc9c72c | |||
08541101b7 | |||
8df44f880d | |||
e5e3ed6931 | |||
0548f903a8 | |||
873cf1446e | |||
92d3178d90 | |||
fdc437c60f | |||
e04d80e08b | |||
5d891e290b | |||
6143769a3f | |||
6c88c4af86 | |||
2c24ade913 |
41
.gitignore
vendored
41
.gitignore
vendored
@ -0,0 +1,41 @@
|
||||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
**/.terraform
|
||||
|
||||
.ansible/
|
||||
.vscode/
|
||||
|
||||
ansible/collections/**
|
||||
|
||||
# registry password file
|
||||
distribution/htpasswd
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Terraform lock file
|
||||
**/.terraform.lock.hcl
|
||||
|
||||
# Terraform secrets file
|
||||
**/secrets.auto.tfvars
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
crash.*.log
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
||||
|
||||
**/vault_password
|
||||
**/vault.yaml
|
||||
**/*secrets.yaml
|
||||
**/*secret.yaml
|
||||
.vscode/*
|
||||
|
10
.sops.yaml
Normal file
10
.sops.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
creation_rules:
|
||||
- path_regex: (secret|secrets)\.(yml|yaml)$
|
||||
unencrypted_regex: ^(apiVersion|kind|name|namespace|type)$
|
||||
kms: 'arn:aws:kms:us-east-1:140023401248:key/c51c2cc5-4e8e-484d-b2f0-4d4ec2039938'
|
||||
# kms:
|
||||
# - arn: 'arn:aws:kms:us-east-1:140023401248:key/c51c2cc5-4e8e-484d-b2f0-4d4ec2039938'
|
||||
# aws_profile: home
|
||||
age: 'age1k5y5gj5fzpwtjgzqd4n93h4h9ek9jz8898rva5zsgj7zjet97ytq4dtzjs'
|
||||
hc_vault_transit_uri: 'https://vault.balsillie.net:443/v1/sops/keys/krds'
|
||||
|
27
.vscode/settings.json
vendored
Normal file
27
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
{
|
||||
"yaml.schemas": {
|
||||
"https://raw.githubusercontent.com/ansible/schemas/main/f/ansible.json": "file:///home/michael/Code/home/IaC/ansible/roles/vm_deploy/tasks/deploy.yaml",
|
||||
"kubernetes://schema/storage.k8s.io/v1@storageclass": "file:///home/michael/Code/home/IaC/ansible/roles/k8s_storage_deploy/files/config/blockpool_ssd_replica.yaml"
|
||||
},
|
||||
"vs-kubernetes": {
|
||||
|
||||
"vs-kubernetes.namespace": "",
|
||||
"disable-linters": ["resource-limits"],
|
||||
"vs-kubernetes.kubectl-path": "",
|
||||
"vs-kubernetes.helm-path": "",
|
||||
"vs-kubernetes.minikube-path": "",
|
||||
"vs-kubernetes.kubectlVersioning": "user-provided",
|
||||
"vs-kubernetes.outputFormat": "yaml",
|
||||
"vs-kubernetes.kubeconfig": "",
|
||||
"vs-kubernetes.knownKubeconfigs": [],
|
||||
"vs-kubernetes.autoCleanupOnDebugTerminate": false,
|
||||
"vs-kubernetes.nodejs-autodetect-remote-root": true,
|
||||
"vs-kubernetes.nodejs-remote-root": "",
|
||||
"vs-kubernetes.nodejs-debug-port": 9229,
|
||||
"vs-kubernetes.dotnet-vsdbg-path": "~/vsdbg/vsdbg",
|
||||
"vs-kubernetes.local-tunnel-debug-provider": "",
|
||||
"checkForMinikubeUpgrade": true,
|
||||
"imageBuildTool": "Docker"
|
||||
},
|
||||
"ansible.python.interpreterPath": "/usr/bin/python"
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
Packer for creating immutable images.
|
||||
|
||||
Terraform for deploying VMs based on those images.
|
||||
|
||||
Ansible for configuring mutable systems and applications.
|
@ -1,11 +1,11 @@
|
||||
[defaults]
|
||||
inventory = ./inventory/hosts.yaml
|
||||
inventory = ./inventory/
|
||||
jinja2_extensions = jinja2.ext.do,jinja2.ext.i18n
|
||||
library = modules
|
||||
module_utils = module_utils
|
||||
display_skipped_hosts = false
|
||||
interpreter_python = auto_silent
|
||||
collections_paths = ./collections
|
||||
collections_path = ./collections
|
||||
roles_path = ./roles
|
||||
vault_password_file = ../.vault_password.txt
|
||||
vault_password_file = ./vault_password
|
||||
playbook_dir = ./playbooks/
|
||||
|
6
ansible/inventory/group_vars/all/acme_certificate.yaml
Normal file
6
ansible/inventory/group_vars/all/acme_certificate.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
acme_certificate_csr_organization: Balsillie Family
|
||||
acme_certificate_csr_locality: Queenstown
|
||||
acme_certificate_csr_state: Otago
|
||||
acme_certificate_csr_country: NZ
|
||||
acme_certificate_csr_email: admin@balsillie.net
|
||||
acme_certificate_directory: https://acme-v02.api.letsencrypt.org/directory
|
3
ansible/inventory/group_vars/all/nut.yaml
Normal file
3
ansible/inventory/group_vars/all/nut.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
nut_client_admin_username: nut-admin
|
||||
nut_client_primary_username: nut-primary
|
||||
nut_client_secondary_username: nut-secondary
|
3
ansible/inventory/group_vars/all/rfc2136.yaml
Normal file
3
ansible/inventory/group_vars/all/rfc2136.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
rfc2136_key_algorithm: hmac-sha256
|
||||
rfc2136_key_name: rndc-house
|
||||
rfc2136_server_address: 10.208.240.1
|
@ -0,0 +1,23 @@
|
||||
# code: language=ansible
|
||||
|
||||
aur_repo_packager_name: "Balsillie Family"
|
||||
aur_repo_packager_email: "admin@balsillie.net"
|
||||
aur_repo_dir: "/aur"
|
||||
|
||||
aur_repo_build_account: "aur-builder"
|
||||
|
||||
aur_repo_host_packages:
|
||||
- pikaur
|
||||
- jellyfin-media-player # If you get errors relating to icu, check 'icu' package version and perform a system update
|
||||
- git-credential-keepassxc
|
||||
- docker-credential-secretservice-bin
|
||||
- ventoy-bin
|
||||
- debtap
|
||||
- aurutils
|
||||
- ipmiview
|
||||
- powershell-bin
|
||||
- visual-studio-code-bin
|
||||
- ttf-ms-fonts
|
||||
- brave-bin
|
||||
- teamviewer
|
||||
- vmware-horizon-client
|
25
ansible/inventory/group_vars/k8s_storage/k8s_storage.yaml
Normal file
25
ansible/inventory/group_vars/k8s_storage/k8s_storage.yaml
Normal file
@ -0,0 +1,25 @@
|
||||
---
|
||||
|
||||
zfs_pools:
|
||||
- name: ssd
|
||||
ashift: 16
|
||||
recordsize: 64k
|
||||
type: ""
|
||||
disks: /dev/vde
|
||||
compression: "off"
|
||||
datasets:
|
||||
- name: ssd/data
|
||||
encrypt: false
|
||||
- name: ssd/data/open-ebs
|
||||
encrypt: false
|
||||
- name: hdd
|
||||
ashift: 12
|
||||
recordsize: 64k
|
||||
type: mirror
|
||||
disks: /dev/sda /dev/sdb
|
||||
compression: "off"
|
||||
datasets:
|
||||
- name: hdd/data
|
||||
encrypt: true
|
||||
- name: hdd/data/open-ebs
|
||||
encrypt: false
|
@ -0,0 +1,17 @@
|
||||
# code: language=ansible
|
||||
|
||||
# Connection (SSH)
|
||||
|
||||
ansible_connection: ansible.builtin.ssh
|
||||
ansible_ssh_host: dev.balsillie.house
|
||||
ansible_ssh_port: 22
|
||||
ansible_ssh_host_key_checking: false
|
||||
ansible_ssh_pipelining: false
|
||||
ansible_ssh_user: ladmin
|
||||
ansible_ssh_private_key_file: ~/.ssh/conf.d/home/dev.balsillie.house.key
|
||||
|
||||
# Become (sudo)
|
||||
|
||||
ansible_become_method: ansible.builtin.sudo
|
||||
ansible_become_user: root
|
||||
ansible_become_password: "{{ lookup('community.hashi_vault.vault_kv1_get', 'ansible/host_vars/dev.balsillie.house/ansible_connection').secret.ansible_become_password }}" # noqa yaml[line-length]
|
17
ansible/inventory/host_vars/dev.balsillie.house/certbot.yaml
Normal file
17
ansible/inventory/host_vars/dev.balsillie.house/certbot.yaml
Normal file
@ -0,0 +1,17 @@
|
||||
# code: language=ansible
|
||||
|
||||
certbot_rfc2136_server: '10.208.240.1'
|
||||
certbot_rfc2136_key_name: 'rndc-house'
|
||||
certbot_rfc2136_key_algorithm: 'hmac-sha256'
|
||||
|
||||
certbot_cloudflare_api_token: "{{ lookup('community.hashi_vault.vault_kv1_get', 'cloudflare/balsillie.house/dns').secret.api_token }}" # noqa yaml[line-length]
|
||||
|
||||
certbot_dns_propagation_seconds: 15
|
||||
|
||||
certbot_webserver_type: 'nginx' # 'nginx' or 'apache'
|
||||
certbot_dns_plugin: 'cloudflare'
|
||||
certbot_email: "certbot.dev@balsillie.email"
|
||||
certbot_acme_server: "acme-v02.api.letsencrypt.org"
|
||||
|
||||
certbot_domains:
|
||||
- repo.balsillie.house
|
@ -0,0 +1,9 @@
|
||||
# code: language=ansible
|
||||
|
||||
nginx_sites:
|
||||
- name: repo.balsillie.house
|
||||
type: site
|
||||
autoindex: 'on'
|
||||
root: /var/www/aur
|
||||
|
||||
nginx_user: "http"
|
@ -0,0 +1 @@
|
||||
acme_certificate_account_email: acme.hv00@balsillie.email
|
@ -0,0 +1,9 @@
|
||||
ansible_connection: ssh
|
||||
ansible_host: hv00.balsillie.house
|
||||
ansible_fqdn: hv00.balsillie.house
|
||||
ansible_remote_addr: 10.192.110.100
|
||||
ansible_port: 22
|
||||
ansible_user: ladmin
|
||||
# ansible_become_user: root
|
||||
ansible_become_method: ansible.builtin.sudo
|
||||
static_fqdn: hv00.balsillie.house
|
@ -0,0 +1,8 @@
|
||||
certbot_rfc2136_server: '10.208.240.1'
|
||||
certbot_rfc2136_key_name: 'rndc-house'
|
||||
certbot_rfc2136_key_algorithm: 'hmac-sha256'
|
||||
|
||||
certbot_webserver_type: 'nginx' # 'nginx' or 'apache'
|
||||
certbot_dns_plugin: 'rfc2136'
|
||||
certbot_email: "certbot.hv00@balsillie.email"
|
||||
certbot_acme_server: "acme-v02.api.letsencrypt.org"
|
@ -0,0 +1,5 @@
|
||||
hypervisor:
|
||||
storage: dir
|
||||
|
||||
qemu_bridges:
|
||||
- br0
|
17
ansible/inventory/host_vars/hv00.balsillie.house/nginx.yaml
Normal file
17
ansible/inventory/host_vars/hv00.balsillie.house/nginx.yaml
Normal file
@ -0,0 +1,17 @@
|
||||
nginx_sites:
|
||||
- name: repo.balsillie.house
|
||||
type: site
|
||||
autoindex: 'on'
|
||||
root: /var/www/aur
|
||||
- name: unifi.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 8989
|
||||
- name: hv00.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 9443
|
||||
|
||||
nginx_user: "http"
|
@ -0,0 +1,38 @@
|
||||
nut_client_local_server: true
|
||||
nut_client_shutdown_cmd: /usr/bin/poweroff
|
||||
nut_client_shutdown_exit: "true"
|
||||
nut_client_hostsync: 240
|
||||
nut_client_notify_cmd: /scripts/notify.sh
|
||||
nut_client_min_supplies: 1
|
||||
nut_client_ups_devices:
|
||||
- name: ups0
|
||||
host: hv00.balsillie.house
|
||||
type: primary
|
||||
port: 3493
|
||||
powervalue: 1
|
||||
nut_client_notify_messages:
|
||||
- name: SHUTDOWN
|
||||
message: "UPSMON shutdown triggered for HV00."
|
||||
- name: LOWBATT
|
||||
message: "UPS has reached low battery condition."
|
||||
nut_client_notify_flags:
|
||||
- name: LOWBATT
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: FSD
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: COMMOK
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: COMMBAD
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: SHUTDOWN
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: REPLBATT
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: NOCOMM
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: NOPARENT
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: BYPASS
|
||||
flags: SYSLOG+WALL+EXEC
|
||||
- name: NOTBYPASS
|
||||
flags: SYSLOG+WALL+EXEC
|
@ -0,0 +1,7 @@
|
||||
nut_server_listen_address: 10.192.110.100
|
||||
nut_server_listen_port: 3493
|
||||
nut_server_certificate_file: /etc/ssl/private/hv00.balsillie.house.plain.combined.pem
|
||||
nut_server_ups_devices:
|
||||
- name: ups0
|
||||
driver: usbhid-ups
|
||||
port: auto
|
@ -0,0 +1 @@
|
||||
console_device: ttyS0
|
@ -0,0 +1,16 @@
|
||||
sshd:
|
||||
config_path: home
|
||||
auth:
|
||||
pubkey: 'yes'
|
||||
password: 'no'
|
||||
empty: 'no'
|
||||
listen:
|
||||
port: '22'
|
||||
family: inet
|
||||
ipv4:
|
||||
- '192.168.1.250'
|
||||
- '10.192.110.100'
|
||||
forwarding:
|
||||
agent: 'no'
|
||||
x11: 'no'
|
||||
nickname: vault
|
@ -0,0 +1,82 @@
|
||||
systemd_networkd_configs:
|
||||
- name: 00-eth0.link
|
||||
src: ethernet.link.j2
|
||||
mac_address: 64-62-66-21-e9-c3
|
||||
- name: 00-eth1.link
|
||||
src: ethernet.link.j2
|
||||
mac_address: 64-62-66-21-e9-c4
|
||||
- name: 00-eth2.link
|
||||
src: ethernet.link.j2
|
||||
mac_address: 64-62-66-21-e9-c5
|
||||
- name: 00-wan.link
|
||||
src: ethernet.link.j2
|
||||
mac_address: 64-62-66-21-e9-c6
|
||||
- name: 01-eth0.network
|
||||
src: ethernet.network.j2
|
||||
mac_address: 64-62-66-21-e9-c3
|
||||
arp: false
|
||||
lldp: true
|
||||
dhcp: false
|
||||
bridge:
|
||||
name: br0
|
||||
vlans:
|
||||
- 110
|
||||
- 210
|
||||
pvid: 210
|
||||
- name: 01-eth1.network
|
||||
src: ethernet.network.j2
|
||||
mac_address: 64-62-66-21-e9-c4
|
||||
arp: false
|
||||
lldp: true
|
||||
dhcp: false
|
||||
bridge:
|
||||
name: br0
|
||||
vlans:
|
||||
- 210
|
||||
pvid: 210
|
||||
- name: 01-eth2.network
|
||||
src: ethernet.network.j2
|
||||
mac_address: 64-62-66-21-e9-c5
|
||||
arp: false
|
||||
lldp: true
|
||||
dhcp: false
|
||||
bridge:
|
||||
name: br0
|
||||
vlans:
|
||||
- 30
|
||||
- 210
|
||||
- 220
|
||||
- 230
|
||||
- name: 01-wan.network
|
||||
src: ethernet.network.j2
|
||||
mac_address: 64-62-66-21-e9-c6
|
||||
arp: true
|
||||
lldp: false
|
||||
dhcp: true
|
||||
- name: 10-br0.netdev
|
||||
src: bridge.netdev.j2
|
||||
vlan_filtering: true
|
||||
stp: true
|
||||
- name: 11-br0.network
|
||||
src: bridge.network.j2
|
||||
arp: false
|
||||
dhcp: false
|
||||
lldp: true
|
||||
vlans:
|
||||
- 110
|
||||
- name: 20-vlan110.netdev
|
||||
src: vlan.netdev.j2
|
||||
vlan_id: 110
|
||||
- name: 21-vlan110.network
|
||||
src: vlan.network.j2
|
||||
arp: true
|
||||
lldp: true
|
||||
dhcp: false
|
||||
address:
|
||||
ipv4:
|
||||
- 10.192.110.100/24
|
||||
gateway:
|
||||
ipv4: 10.192.110.254
|
||||
nameserver:
|
||||
ipv4:
|
||||
- 10.192.110.254
|
@ -0,0 +1,9 @@
|
||||
ansible_connection: ssh
|
||||
ansible_host: kodi00.balsillie.house
|
||||
ansible_fqdn: kodi00.balsillie.house
|
||||
ansible_remote_addr: 10.192.210.169
|
||||
ansible_port: 22
|
||||
ansible_user: ladmin
|
||||
ansible_become_user: root
|
||||
ansible_become_method: sudo
|
||||
static_fqdn: kodi00.balsillie.house
|
@ -0,0 +1,8 @@
|
||||
certbot_rfc2136_server: '10.208.240.1'
|
||||
certbot_rfc2136_key_name: 'rndc-house'
|
||||
certbot_rfc2136_key_algorithm: 'hmac-sha256'
|
||||
|
||||
certbot_webserver_type: 'nginx' # 'nginx' or 'apache'
|
||||
certbot_dns_plugin: 'rfc2136'
|
||||
certbot_email: "certbot.kodi00@balsillie.email"
|
||||
certbot_acme_server: "acme-v02.api.letsencrypt.org"
|
@ -0,0 +1,81 @@
|
||||
---
|
||||
|
||||
docker_users:
|
||||
- ladmin
|
||||
|
||||
docker_networks:
|
||||
- name: torrent
|
||||
driver: bridge
|
||||
driver_options:
|
||||
# com.docker.network.bridge.name: docker-torrent
|
||||
com.docker.network.bridge.enable_ip_masquerade: true
|
||||
com.docker.network.bridge.enable_icc: true
|
||||
# com.docker.network.container_iface_prefix: container-torrent
|
||||
attachable: true
|
||||
enable_ipv6: false
|
||||
internal: false
|
||||
ipam:
|
||||
- subnet: 192.168.99.0/24
|
||||
gateway: 192.168.99.254
|
||||
|
||||
docker_volumes:
|
||||
- name: torrent-data
|
||||
driver: local
|
||||
driver_options:
|
||||
type: none
|
||||
device: /downloads
|
||||
o: bind
|
||||
- name: torrent-config
|
||||
driver: local
|
||||
driver_options:
|
||||
type: none
|
||||
device: /etc/qbittorrent
|
||||
o: bind
|
||||
|
||||
docker_images:
|
||||
- name: hotio/qbittorrent
|
||||
tag: release
|
||||
|
||||
docker_containers:
|
||||
- name: qbittorrent
|
||||
image: hotio/qbittorrent:release
|
||||
auto_remove: false
|
||||
capabilities:
|
||||
- NET_ADMIN
|
||||
domainname: balsillie.house
|
||||
env:
|
||||
PUID: '968'
|
||||
PGID: '968'
|
||||
UMASK: '002'
|
||||
TZ: Pacific/Auckland
|
||||
WEBUI_PORTS: 8080/tcp
|
||||
VPN_ENABLED: 'true'
|
||||
VPN_CONF: 'wg0'
|
||||
VPN_PROVIDER: 'proton'
|
||||
VPN_LAN_NETWORK: ''
|
||||
VPN_LAN_LEAK_ENABLED: 'false'
|
||||
VPN_EXPOSE_PORTS_ON_LAN: ''
|
||||
VPN_AUTO_PORT_FORWARD: 'true'
|
||||
VPN_AUTO_PORT_FORWARD_TO_PORTS: ''
|
||||
VPN_KEEP_LOCAL_DNS: 'false'
|
||||
VPN_FIREWALL_TYPE: 'nftables'
|
||||
VPN_HEALTHCHECK_ENABLED: 'true'
|
||||
PRIVOXY_ENABLED: 'false'
|
||||
UNBOUND_ENABLED: 'false'
|
||||
etc_hosts:
|
||||
tv.balsillie.house: 192.168.99.254
|
||||
movies.balsillie.house: 192.168.99.254
|
||||
hostname: torrent
|
||||
networks:
|
||||
- name: torrent
|
||||
aliases:
|
||||
- torrent
|
||||
- qbittorrent
|
||||
ipv4_address: 192.168.99.1
|
||||
restart_policy: 'unless-stopped'
|
||||
sysctls:
|
||||
net.ipv4.conf.all.src_valid_mark: 1
|
||||
net.ipv6.conf.all.disable_ipv6: 1
|
||||
volumes:
|
||||
- torrent-config:/config:rw
|
||||
- torrent-data:/downloads:rw
|
@ -0,0 +1,43 @@
|
||||
nginx_sites:
|
||||
- name: tv.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 8989
|
||||
- name: movies.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 7878
|
||||
- name: music.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 8686
|
||||
- name: subs.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 6767
|
||||
- name: index.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 9696
|
||||
- name: torrent.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 192.168.99.1
|
||||
port: 8080
|
||||
- name: jellyfin.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 8096
|
||||
- name: kodi.balsillie.house
|
||||
type: proxy
|
||||
upstream:
|
||||
host: 127.0.0.1
|
||||
port: 8082
|
||||
|
||||
nginx_user: "http"
|
@ -0,0 +1,3 @@
|
||||
---
|
||||
|
||||
sonarr_var: "sonarr_value"
|
@ -0,0 +1,4 @@
|
||||
sshd:
|
||||
auth:
|
||||
password: 'no'
|
||||
pubkey: 'yes'
|
@ -0,0 +1,7 @@
|
||||
torrent_user: kodi
|
||||
torrent_downloads_dir: /downloads
|
||||
|
||||
torrent_wireguard_address: 10.2.0.2
|
||||
torrent_wireguard_dns: 10.2.0.1
|
||||
torrent_wireguard_peer_endpoint: 103.75.11.18
|
||||
torrent_wireguard_peer_public_key: 8Rm0uoG0H9BcSuA67/5gBv8tJgFZXNLm4sqEtkB9Nmw=
|
21
ansible/inventory/host_vars/kodi00.balsillie.house/ufw.yaml
Normal file
21
ansible/inventory/host_vars/kodi00.balsillie.house/ufw.yaml
Normal file
@ -0,0 +1,21 @@
|
||||
ufw_enabled: true
|
||||
|
||||
ufw_rules:
|
||||
- name: "SSH from Local Subnet"
|
||||
port: "22"
|
||||
protocol: "tcp"
|
||||
action: "allow"
|
||||
source: "10.192.210.0/24"
|
||||
destination: "10.192.210.169"
|
||||
- name: "HTTP from Local Subnet"
|
||||
port: "80"
|
||||
protocol: "tcp"
|
||||
action: "allow"
|
||||
source: "10.192.210.0/24"
|
||||
destination: "10.192.210.169"
|
||||
- name: "HTTPS from Local Subnet"
|
||||
port: "443"
|
||||
protocol: "tcp"
|
||||
action: "allow"
|
||||
source: "10.192.210.0/24"
|
||||
destination: "10.192.210.169"
|
@ -0,0 +1 @@
|
||||
acme_certificate_account_email: acme.kube00@balsillie.email
|
@ -0,0 +1,9 @@
|
||||
ansible_connection: ssh
|
||||
ansible_host: kube00.balsillie.house
|
||||
ansible_fqdn: kube00.balsillie.house
|
||||
ansible_remote_addr: 10.192.110.110
|
||||
ansible_port: 22
|
||||
ansible_user: ladmin
|
||||
ansible_become_user: root
|
||||
ansible_become_method: sudo
|
||||
static_fqdn: hv00.balsillie.house
|
@ -0,0 +1,18 @@
|
||||
nut_client_local_server: false
|
||||
nut_client_shutdown_cmd: /scripts/shutdown.sh
|
||||
nut_client_shutdown_exit: "false"
|
||||
nut_client_hostsync: 15
|
||||
nut_client_notify_cmd: /scripts/notify.sh
|
||||
nut_client_min_supplies: 1
|
||||
nut_client_ups_devices:
|
||||
- name: ups0
|
||||
host: hv00.balsillie.house
|
||||
type: secondary
|
||||
port: 3493
|
||||
powervalue: 1
|
||||
nut_client_notify_messages:
|
||||
- name: SHUTDOWN
|
||||
message: "UPSMON shutdown triggered for KUBE00."
|
||||
nut_client_notify_flags:
|
||||
- name: SHUTDOWN
|
||||
flags: SYSLOG+WALL+EXEC
|
1
ansible/inventory/host_vars/localhost/synapse.yaml
Normal file
1
ansible/inventory/host_vars/localhost/synapse.yaml
Normal file
@ -0,0 +1 @@
|
||||
synapse_host_address: matrix.balsillie.net
|
@ -0,0 +1,4 @@
|
||||
ansible_connection: local
|
||||
ansible_user: ladmin
|
||||
ansible_become_user: root
|
||||
ansible_become_method: sudo
|
11
ansible/inventory/host_vars/nuc.balsillie.house/certbot.yaml
Normal file
11
ansible/inventory/host_vars/nuc.balsillie.house/certbot.yaml
Normal file
@ -0,0 +1,11 @@
|
||||
certbot_rfc2136_server: '10.208.240.1'
|
||||
certbot_rfc2136_key_name: 'rndc-house'
|
||||
certbot_rfc2136_key_algorithm: 'hmac-sha256'
|
||||
|
||||
certbot_webserver_type: 'nginx' # 'nginx' or 'apache'
|
||||
certbot_dns_plugin: 'rfc2136'
|
||||
certbot_email: "certbot.kodi00@balsillie.email"
|
||||
certbot_acme_server: "acme-v02.api.letsencrypt.org"
|
||||
|
||||
certbot_domains:
|
||||
- xmr.balsillie.house
|
@ -1,26 +0,0 @@
|
||||
all:
|
||||
children:
|
||||
hosts:
|
||||
server:
|
||||
ansible_host: server.balsillie.net
|
||||
ansible_os_family: Arch
|
||||
lab:
|
||||
ansible_host: lab.balsillie.net
|
||||
ansible_os_family: Arch
|
||||
nodes:
|
||||
node1:
|
||||
ansible_host: node1.balsillie.net
|
||||
node2:
|
||||
ansible_host: node2.balsillie.net
|
||||
node3:
|
||||
ansible_host: node3.balsillie.net
|
||||
guests:
|
||||
router:
|
||||
ansible_host: router.balsillie.net
|
||||
workstations:
|
||||
lat5420:
|
||||
ansible_host: lat5420.balsillie.net
|
||||
sff:
|
||||
ansible_host: sff.balsillie.net
|
||||
bridie:
|
||||
ansible_host: bridie.balsillie.net
|
80
ansible/inventory/inventory.yaml
Normal file
80
ansible/inventory/inventory.yaml
Normal file
@ -0,0 +1,80 @@
|
||||
all:
|
||||
children:
|
||||
aur_repo_hosts:
|
||||
hosts:
|
||||
dev.balsillie.house:
|
||||
firewalls:
|
||||
children:
|
||||
opnsense:
|
||||
hosts:
|
||||
router.balsillie.house:
|
||||
switches:
|
||||
hosts:
|
||||
sw00.balsillie.house:
|
||||
waps:
|
||||
hosts:
|
||||
wap00.balsillie.house:
|
||||
virtual_machines:
|
||||
hosts:
|
||||
fw00.balsillie.house:
|
||||
win11.balsillie.house:
|
||||
bare_metal:
|
||||
hosts:
|
||||
sw00.balsillie.house:
|
||||
wap00.balsillie.house:
|
||||
hv00.balsillie.house:
|
||||
kube00.balsillie.house:
|
||||
lat5420.balsillie.house:
|
||||
lat7490.balsillie.house:
|
||||
nuc.balsillie.house:
|
||||
servers:
|
||||
children:
|
||||
hypervisors:
|
||||
hosts:
|
||||
hv00.balsillie.house: # vp2420
|
||||
k8s:
|
||||
children:
|
||||
k8s_control:
|
||||
hosts:
|
||||
kube00.balsillie.house:
|
||||
k8s_worker:
|
||||
hosts:
|
||||
kube00.balsillie.net:
|
||||
k8s_storage:
|
||||
hosts:
|
||||
kube00.balsillie.net:
|
||||
nut_servers:
|
||||
hosts:
|
||||
hv00.balsillie.house:
|
||||
nut_clients:
|
||||
hosts:
|
||||
hv00.balsillie.house:
|
||||
kube00.balsillie.house:
|
||||
nas:
|
||||
hosts:
|
||||
nas.balsillie.house:
|
||||
workstations:
|
||||
children:
|
||||
arch:
|
||||
hosts:
|
||||
lat5420.balsillie.house:
|
||||
sff.balsillie.house:
|
||||
kodi00.balsillie.house:
|
||||
nuc.balsillie.house:
|
||||
windows:
|
||||
hosts:
|
||||
lat7490.balsillie.house:
|
||||
win11.balsillie.house:
|
||||
laptops:
|
||||
hosts:
|
||||
lat5420.balsillie.house:
|
||||
lat7490.balsillie.house:
|
||||
desktops:
|
||||
hosts:
|
||||
sff.balsillie.house:
|
||||
mp00.balsillie.house:
|
||||
kodi00.balsillie.house:
|
||||
nuc.balsillie.house:
|
||||
kodi:
|
||||
hosts:
|
||||
kodi00.balsillie.house:
|
10
ansible/playbooks/home.yml
Normal file
10
ansible/playbooks/home.yml
Normal file
@ -0,0 +1,10 @@
|
||||
# code: language=ansible
|
||||
|
||||
- name: AUR Repo
|
||||
hosts: aur_repo_hosts
|
||||
become: true
|
||||
gather_facts: true
|
||||
roles:
|
||||
# - certbot
|
||||
- nginx
|
||||
# - aur_repo_host
|
10
ansible/playbooks/infra/hv00.yaml
Normal file
10
ansible/playbooks/infra/hv00.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
---
|
||||
|
||||
- name: Setup core home router
|
||||
hosts:
|
||||
- hv00.balsillie.house
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
# - role: aur_repo_host
|
||||
- role: nginx
|
15
ansible/playbooks/infra/kodi.yaml
Normal file
15
ansible/playbooks/infra/kodi.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
---
|
||||
|
||||
- name: Setup Kodi boxes
|
||||
hosts:
|
||||
- kodi00.balsillie.house
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
# - role: sshd
|
||||
# - role: ufw
|
||||
# - role: nginx
|
||||
# - role: aur_repo_client
|
||||
# - role: arr
|
||||
- role: torrent
|
||||
# - role: sonarr
|
9
ansible/playbooks/infra/nuc.yaml
Normal file
9
ansible/playbooks/infra/nuc.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
---
|
||||
|
||||
- name: Setup NUC
|
||||
hosts:
|
||||
- nuc.balsillie.house
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
- role: certbot
|
32
ansible/playbooks/infra/nut.yaml
Normal file
32
ansible/playbooks/infra/nut.yaml
Normal file
@ -0,0 +1,32 @@
|
||||
- name: Install NUT
|
||||
hosts:
|
||||
- nut_servers
|
||||
- nut_clients
|
||||
become: true
|
||||
gather_facts: true
|
||||
tasks:
|
||||
|
||||
- name: Install NUT package on Archlinux
|
||||
when: ansible_facts['os_family'] == "Archlinux"
|
||||
community.general.pacman:
|
||||
name: nut
|
||||
state: latest
|
||||
update_cache: true
|
||||
|
||||
- name: Setup NUT servers
|
||||
gather_facts: false
|
||||
hosts: nut_servers
|
||||
become: true
|
||||
roles:
|
||||
- role: acme_certificate
|
||||
acme_certificate_subject: "{{ ansible_host }}"
|
||||
acme_certificate_zone: balsillie.house
|
||||
acme_certificate_restart_services: ['nut-server.service']
|
||||
- role: nut_server
|
||||
|
||||
- name: Setup NUT clients
|
||||
gather_facts: false
|
||||
hosts: nut_clients
|
||||
become: true
|
||||
roles:
|
||||
- nut_client
|
1
ansible/playbooks/infra/roles
Symbolic link
1
ansible/playbooks/infra/roles
Symbolic link
@ -0,0 +1 @@
|
||||
../../roles/
|
6
ansible/playbooks/k8s/00_all_of_the_things.yaml
Normal file
6
ansible/playbooks/k8s/00_all_of_the_things.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
- ansible.builtin.import_playbook: ./01_baremetal.yaml
|
||||
- ansible.builtin.import_playbook: ./02_hypervisor.yaml
|
||||
- ansible.builtin.import_playbook: ./03_vm_template.yaml
|
||||
- ansible.builtin.import_playbook: ./04_vm_deploy.yaml
|
||||
- ansible.builtin.import_playbook: ./05_k8s_deploy.yaml
|
7
ansible/playbooks/k8s/01_baremetal.yaml
Normal file
7
ansible/playbooks/k8s/01_baremetal.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
- name: bare metal os installation
|
||||
gather_facts: false
|
||||
hosts: hv00
|
||||
become: true
|
||||
roles:
|
||||
- baremetal
|
15
ansible/playbooks/k8s/02_hypervisor.yaml
Normal file
15
ansible/playbooks/k8s/02_hypervisor.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
---
|
||||
- name: configure hyervisor host
|
||||
gather_facts: false
|
||||
hosts: hv00
|
||||
become: true
|
||||
|
||||
roles:
|
||||
- python-install
|
||||
- sshd
|
||||
- firewall
|
||||
- pikaur
|
||||
- msmtp
|
||||
- zfs-install
|
||||
- libvirt-server
|
||||
|
7
ansible/playbooks/k8s/03_vm_template.yml
Normal file
7
ansible/playbooks/k8s/03_vm_template.yml
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
- name: create vm template
|
||||
hosts: hv00
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
- vm_template
|
22
ansible/playbooks/k8s/04_vm_deploy.yaml
Normal file
22
ansible/playbooks/k8s/04_vm_deploy.yaml
Normal file
@ -0,0 +1,22 @@
|
||||
---
|
||||
- name: create the vms
|
||||
hosts: hv00
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
- vm_deploy
|
||||
|
||||
# - name: vm hardening
|
||||
# hosts: k8s
|
||||
# gather_facts: true
|
||||
# become: true
|
||||
# roles:
|
||||
# - sshd
|
||||
# - firewall
|
||||
|
||||
- name: configure vm disks
|
||||
hosts: k8s
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
- vm_disks
|
66
ansible/playbooks/k8s/05_k8s_deploy.yaml
Normal file
66
ansible/playbooks/k8s/05_k8s_deploy.yaml
Normal file
@ -0,0 +1,66 @@
|
||||
---
|
||||
# - name: configure control plane
|
||||
# hosts: k8s_control
|
||||
# gather_facts: true
|
||||
# become: true
|
||||
# roles:
|
||||
# - k8s_control
|
||||
|
||||
# - name: configure calico networking
|
||||
# hosts: localhost
|
||||
# gather_facts: true
|
||||
# become: false
|
||||
# roles:
|
||||
# - k8s_network
|
||||
|
||||
# - name: remove control-plane taints
|
||||
# hosts: localhost
|
||||
# gather_facts: false
|
||||
# become: false
|
||||
# roles:
|
||||
# - k8s_taint
|
||||
|
||||
# - name: configure zfs storage on nodes
|
||||
# hosts: k8s_storage
|
||||
# gather_facts: true
|
||||
# become: true
|
||||
# roles:
|
||||
# - zfs_repo_install
|
||||
|
||||
# - name: configure open-ebs storage operator
|
||||
# hosts: localhost
|
||||
# gather_facts: false
|
||||
# become: false
|
||||
# roles:
|
||||
# - k8s_storage_ebs_local_deploy
|
||||
|
||||
- name: configure open-ebs zfs driver
|
||||
hosts: localhost
|
||||
gather_facts: false
|
||||
become: false
|
||||
roles:
|
||||
- k8s_storage_ebs_zfs_deploy
|
||||
|
||||
# - name: configure smb storage provider
|
||||
# hosts: localhost
|
||||
# gather_facts: false
|
||||
# become: false
|
||||
# roles:
|
||||
# - k8s_storage_smb_deploy
|
||||
|
||||
# - name: configure ingress controller
|
||||
# hosts: localhost
|
||||
# gather_facts: false
|
||||
# become: false
|
||||
# roles:
|
||||
# - k8s_ingress_controller
|
||||
|
||||
|
||||
# - name: configure cert manager
|
||||
# hosts: localhost
|
||||
# gather_facts: false
|
||||
# become: false
|
||||
# roles:
|
||||
# - k8s_cert_manager
|
||||
|
||||
|
7
ansible/playbooks/k8s/96_ebs_destroy.yaml
Normal file
7
ansible/playbooks/k8s/96_ebs_destroy.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
- name: destroy rook storage
|
||||
hosts: k8s_worker
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
- k8s_ebs_destroy
|
7
ansible/playbooks/k8s/96_rook_destroy.yaml
Normal file
7
ansible/playbooks/k8s/96_rook_destroy.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
- name: destroy rook storage
|
||||
hosts: k8s_worker
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
- k8s_rook_destroy
|
7
ansible/playbooks/k8s/97_k8s_destroy.yaml
Normal file
7
ansible/playbooks/k8s/97_k8s_destroy.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
- name: destroy the k8s cluster
|
||||
hosts: k8s
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
- k8s_destroy
|
7
ansible/playbooks/k8s/98_vm_destroy.yaml
Normal file
7
ansible/playbooks/k8s/98_vm_destroy.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
- name: destroy the vms
|
||||
hosts: hv00
|
||||
gather_facts: true
|
||||
become: true
|
||||
roles:
|
||||
- vm_destroy
|
3
ansible/playbooks/k8s/99_burn_it_all.yaml
Normal file
3
ansible/playbooks/k8s/99_burn_it_all.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
---
|
||||
- ansible.builtin.import_playbook: ./97_k8s_destroy.yaml
|
||||
- ansible.builtin.import_playbook: ./98_vm_destroy.yaml
|
1
ansible/playbooks/k8s/roles
Symbolic link
1
ansible/playbooks/k8s/roles
Symbolic link
@ -0,0 +1 @@
|
||||
../../roles/
|
1
ansible/playbooks/roles
Symbolic link
1
ansible/playbooks/roles
Symbolic link
@ -0,0 +1 @@
|
||||
../roles
|
44
ansible/playbooks/synapse_delete_empty_rooms.yaml
Normal file
44
ansible/playbooks/synapse_delete_empty_rooms.yaml
Normal file
@ -0,0 +1,44 @@
|
||||
# code: language=ansible
|
||||
|
||||
- name: Clean Synapse
|
||||
hosts: localhost
|
||||
connection: local
|
||||
become: false
|
||||
gather_facts: false
|
||||
tasks:
|
||||
|
||||
- name: Get room list
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v1/rooms?limit=1000"
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
||||
register: room_list
|
||||
|
||||
- name: Set empty_rooms fact
|
||||
ansible.builtin.set_fact:
|
||||
empty_rooms: "{{ room_list.json.rooms | selectattr('joined_local_members', '==', 0) | list }}"
|
||||
|
||||
- name: Debug empty room count
|
||||
ansible.builtin.debug:
|
||||
msg: "Total empty rooms to delete: {{ empty_rooms | length }}"
|
||||
|
||||
- name: Delete empty rooms
|
||||
when: empty_rooms | length > 0
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v2/rooms/{{ room.room_id }}"
|
||||
method: DELETE
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
||||
body_format: json
|
||||
body: {}
|
||||
loop: "{{ empty_rooms }}"
|
||||
loop_control:
|
||||
loop_var: room
|
||||
label: "{{ room.room_id }}"
|
||||
register: purge_ids
|
||||
|
||||
- name: Write purge_ids to file
|
||||
ansible.builtin.copy:
|
||||
dest: "{{ playbook_dir }}/purge_ids_{{ now(utc=false, fmt='%Y-%m-%d_%H-%M-%S') }}.json"
|
||||
content: "{{ purge_ids.results | map(attribute='json.delete_id') | list | to_nice_json }}"
|
||||
mode: "0664"
|
28
ansible/playbooks/synapse_delete_room.yaml
Normal file
28
ansible/playbooks/synapse_delete_room.yaml
Normal file
@ -0,0 +1,28 @@
|
||||
# code: language=ansible
|
||||
|
||||
- name: Clean Synapse
|
||||
hosts: localhost
|
||||
connection: local
|
||||
become: false
|
||||
gather_facts: false
|
||||
vars_prompt:
|
||||
- name: room_id
|
||||
prompt: "Enter the room ID to delete"
|
||||
private: false
|
||||
tasks:
|
||||
|
||||
- name: Delete room
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v2/rooms/{{ room_id }}"
|
||||
method: DELETE
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
||||
body_format: json
|
||||
body: {}
|
||||
register: purge_id
|
||||
|
||||
- name: Wait for purge to complete
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v2/rooms/delete_status/{{ item }}"
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
19
ansible/playbooks/synapse_get_all_rooms.yaml
Normal file
19
ansible/playbooks/synapse_get_all_rooms.yaml
Normal file
@ -0,0 +1,19 @@
|
||||
# code: language=ansible
|
||||
|
||||
- name: Clean Synapse
|
||||
hosts: localhost
|
||||
connection: local
|
||||
become: false
|
||||
gather_facts: false
|
||||
tasks:
|
||||
|
||||
- name: Get room details
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v1/rooms?limit=1000"
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
||||
register: result
|
||||
|
||||
- name: Print result
|
||||
ansible.builtin.debug:
|
||||
var: result.json.rooms | map(attribute='room_id') | list
|
19
ansible/playbooks/synapse_get_large_rooms.yaml
Normal file
19
ansible/playbooks/synapse_get_large_rooms.yaml
Normal file
@ -0,0 +1,19 @@
|
||||
# code: language=ansible
|
||||
|
||||
- name: Clean Synapse
|
||||
hosts: localhost
|
||||
connection: local
|
||||
become: false
|
||||
gather_facts: false
|
||||
tasks:
|
||||
|
||||
- name: Get large rooms
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v1/statistics/database/rooms"
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
||||
register: result
|
||||
|
||||
- name: Print result
|
||||
ansible.builtin.debug:
|
||||
var: result.json
|
44
ansible/playbooks/synapse_get_purge_status.yaml
Normal file
44
ansible/playbooks/synapse_get_purge_status.yaml
Normal file
@ -0,0 +1,44 @@
|
||||
# code: language=ansible
|
||||
|
||||
- name: Clean Synapse
|
||||
hosts: localhost
|
||||
connection: local
|
||||
become: false
|
||||
gather_facts: false
|
||||
vars_prompt:
|
||||
- name: "purge_ids_file"
|
||||
prompt: "Enter the file name containing the purge ids"
|
||||
private: false
|
||||
tasks:
|
||||
|
||||
- name: Load purge ids
|
||||
ansible.builtin.slurp:
|
||||
src: "{{ playbook_dir }}/{{ purge_ids_file }}"
|
||||
register: purge_ids
|
||||
|
||||
- name: Set purge_ids_list fact
|
||||
ansible.builtin.set_fact:
|
||||
purge_ids_list: "{{ purge_ids.content | b64decode | from_json }}"
|
||||
|
||||
- name: Get purge status
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v2/rooms/delete_status/{{ item }}"
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
||||
loop: "{{ purge_ids_list }}"
|
||||
register: purge_status
|
||||
|
||||
- name: Set purge_satus_totals
|
||||
ansible.builtin.set_fact:
|
||||
purge_status_shutting_down: "{{ purge_status.results | selectattr('json.status', '==', 'shutting_down') | list | length }}"
|
||||
purge_status_purging: "{{ purge_status.results | selectattr('json.status', '==', 'purging') | list | length }}"
|
||||
purge_status_complete: "{{ purge_status.results | selectattr('json.status', '==', 'complete') | list | length }}"
|
||||
purge_status_failed: "{{ purge_status.results | selectattr('json.status', '==', 'failed') | list | length }}"
|
||||
|
||||
- name: Print status
|
||||
ansible.builtin.debug:
|
||||
msg: |
|
||||
Shutting down: {{ purge_status_shutting_down }}
|
||||
Purging: {{ purge_status_purging }}
|
||||
Complete: {{ purge_status_complete }}
|
||||
Failed: {{ purge_status_failed }}"
|
23
ansible/playbooks/synapse_get_room_details.yaml
Normal file
23
ansible/playbooks/synapse_get_room_details.yaml
Normal file
@ -0,0 +1,23 @@
|
||||
# code: language=ansible
|
||||
|
||||
- name: Clean Synapse
|
||||
hosts: localhost
|
||||
connection: local
|
||||
become: false
|
||||
gather_facts: false
|
||||
vars_prompt:
|
||||
- name: room_id
|
||||
prompt: "Enter the room ID to fetch"
|
||||
private: false
|
||||
tasks:
|
||||
|
||||
- name: Get room details
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v1/rooms/{{ room_id }}"
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
||||
register: result
|
||||
|
||||
- name: Print result
|
||||
ansible.builtin.debug:
|
||||
var: result.json
|
23
ansible/playbooks/synapse_get_room_members.yaml
Normal file
23
ansible/playbooks/synapse_get_room_members.yaml
Normal file
@ -0,0 +1,23 @@
|
||||
# code: language=ansible
|
||||
|
||||
- name: Room members
|
||||
hosts: localhost
|
||||
connection: local
|
||||
become: false
|
||||
gather_facts: false
|
||||
vars_prompt:
|
||||
- name: room_id
|
||||
prompt: "Enter the room ID to fetch"
|
||||
private: false
|
||||
tasks:
|
||||
|
||||
- name: Get room details
|
||||
ansible.builtin.uri:
|
||||
url: "https://{{ synapse_host_address }}/_synapse/admin/v1/rooms/{{ room_id }}/members"
|
||||
headers:
|
||||
Authorization: "Bearer {{ synapse_admin_token }}"
|
||||
register: result
|
||||
|
||||
- name: Print result
|
||||
ansible.builtin.debug:
|
||||
var: result.json
|
17
ansible/playbooks/truenas.yml
Normal file
17
ansible/playbooks/truenas.yml
Normal file
@ -0,0 +1,17 @@
|
||||
---
|
||||
|
||||
- name: Configure Truenas
|
||||
hosts: truenas
|
||||
become: false
|
||||
tasks:
|
||||
- name: Install required packages
|
||||
package:
|
||||
name: "{{ item }}"
|
||||
state: present
|
||||
with_items:
|
||||
- py37-ansible
|
||||
- py37-pip
|
||||
- py37-netifaces
|
||||
- py37-netaddr
|
||||
- py37-requests
|
||||
- py37-yaml
|
218
ansible/roles/acme_certificate/tasks/main.yaml
Normal file
218
ansible/roles/acme_certificate/tasks/main.yaml
Normal file
@ -0,0 +1,218 @@
|
||||
---
|
||||
|
||||
- name: Install required python libraries system wide
|
||||
when: ansible_facts['os_family'] == "Archlinux"
|
||||
community.general.pacman:
|
||||
name:
|
||||
- python-cryptography
|
||||
- python-dnspython
|
||||
state: latest
|
||||
update_cache: true
|
||||
|
||||
- name: Set certificate path facts
|
||||
ansible.builtin.set_fact:
|
||||
acme_certificate_certificate_path: "/etc/ssl/private/{{ acme_certificate_subject }}.pem"
|
||||
acme_certificate_chain_path: "/etc/ssl/private/{{ acme_certificate_subject }}.chain.pem"
|
||||
acme_certificate_combined_path: "/etc/ssl/private/{{ acme_certificate_subject }}.combined.pem"
|
||||
acme_certificate_csr_path: "/etc/ssl/private/{{ acme_certificate_subject }}.csr"
|
||||
acme_certificate_fullchain_path: "/etc/ssl/private/{{ acme_certificate_subject }}.fullchain.pem"
|
||||
acme_certificate_key_path: "/etc/ssl/private/{{ acme_certificate_subject }}.key"
|
||||
acme_certificate_plain_combined_path: "/etc/ssl/private/{{ acme_certificate_subject }}.plain.combined.pem"
|
||||
acme_certificate_plain_key_path: "/etc/ssl/private/{{ acme_certificate_subject }}.plain.key"
|
||||
|
||||
- name: Create ACME account key directory
|
||||
ansible.builtin.file:
|
||||
group: root
|
||||
mode: '0700'
|
||||
owner: root
|
||||
path: /etc/ssl/private/ACME
|
||||
state: directory
|
||||
|
||||
- name: Create ACME account key
|
||||
community.crypto.openssl_privatekey:
|
||||
cipher: auto
|
||||
curve: secp384r1
|
||||
format: auto_ignore
|
||||
group: root
|
||||
mode: '0600'
|
||||
owner: root
|
||||
passphrase: "{{ acme_certificate_account_key_passphrase }}"
|
||||
path: /etc/ssl/private/ACME/account.key
|
||||
size: 4096
|
||||
state: present
|
||||
type: RSA
|
||||
|
||||
- name: Generate RSA private key
|
||||
community.crypto.openssl_privatekey:
|
||||
cipher: auto
|
||||
curve: secp384r1
|
||||
format: auto_ignore
|
||||
group: root
|
||||
mode: '0600'
|
||||
owner: root
|
||||
passphrase: "{{ ssl_passphrase }}"
|
||||
path: "{{ acme_certificate_key_path }}"
|
||||
size: 4096
|
||||
state: present
|
||||
type: RSA
|
||||
register: genrsa_private_key
|
||||
|
||||
- name: Generate CSR
|
||||
community.crypto.openssl_csr:
|
||||
common_name: "{{ acme_certificate_subject }}"
|
||||
country_name: "{{ acme_certificate_csr_country }}"
|
||||
digest: sha256
|
||||
email_address: "{{ acme_certificate_csr_email }}"
|
||||
group: root
|
||||
locality_name: "{{ acme_certificate_csr_locality }}"
|
||||
mode: '0600'
|
||||
organization_name: "{{ acme_certificate_csr_organization }}"
|
||||
owner: root
|
||||
path: "{{ acme_certificate_csr_path }}"
|
||||
privatekey_passphrase: "{{ ssl_passphrase }}"
|
||||
privatekey_path: "{{ acme_certificate_key_path }}"
|
||||
state: present
|
||||
state_or_province_name: "{{ acme_certificate_csr_state }}"
|
||||
use_common_name_for_san: true
|
||||
|
||||
- name: Submit ACME certificate request
|
||||
community.crypto.acme_certificate:
|
||||
account_email: "{{ acme_certificate_account_email }}"
|
||||
account_key_passphrase: "{{ acme_certificate_account_key_passphrase }}"
|
||||
account_key_src: /etc/ssl/private/ACME/account.key
|
||||
acme_directory: "{{ acme_certificate_directory }}"
|
||||
acme_version: 2
|
||||
chain_dest: "{{ acme_certificate_chain_path }}"
|
||||
challenge: dns-01
|
||||
csr: "{{ acme_certificate_csr_path }}"
|
||||
dest: "{{ acme_certificate_certificate_path }}"
|
||||
fullchain_dest: "{{ acme_certificate_fullchain_path }}"
|
||||
modify_account: true
|
||||
select_crypto_backend: cryptography
|
||||
terms_agreed: true
|
||||
validate_certs: true
|
||||
register: challenge
|
||||
|
||||
- name: Debug ACME certificate challenge
|
||||
ansible.builtin.debug:
|
||||
var: challenge
|
||||
|
||||
- name: Proceed if challenge is changed
|
||||
when:
|
||||
- challenge is changed
|
||||
- acme_certificate_subject in challenge.challenge_data
|
||||
block:
|
||||
|
||||
- name: Answer ACME certificate challenge
|
||||
community.general.nsupdate:
|
||||
key_algorithm: "{{ rfc2136_key_algorithm }}"
|
||||
key_name: "{{ rfc2136_key_name }}"
|
||||
key_secret: "{{ rfc2136_key_secret }}"
|
||||
port: 53
|
||||
protocol: tcp
|
||||
record: "{{ challenge.challenge_data[acme_certificate_subject]['dns-01'].record }}."
|
||||
server: "{{ rfc2136_server_address }}"
|
||||
state: present
|
||||
ttl: 3600
|
||||
type: TXT
|
||||
value: "{{ challenge.challenge_data[acme_certificate_subject]['dns-01'].resource_value }}"
|
||||
# zone: "{{ acme_certificate_zone }}"
|
||||
register: nsupdate_result
|
||||
|
||||
- name: Debug nsupdate result
|
||||
ansible.builtin.debug:
|
||||
var: nsupdate_result
|
||||
|
||||
- name: Retrieve ACME certificate
|
||||
community.crypto.acme_certificate:
|
||||
account_email: "{{ acme_certificate_account_email }}"
|
||||
account_key_passphrase: "{{ acme_certificate_account_key_passphrase }}"
|
||||
account_key_src: /etc/ssl/private/ACME/account.key
|
||||
acme_directory: "{{ acme_certificate_directory }}"
|
||||
acme_version: 2
|
||||
chain_dest: "{{ acme_certificate_chain_path }}"
|
||||
challenge: dns-01
|
||||
csr: "{{ acme_certificate_csr_path }}"
|
||||
data: "{{ challenge }}"
|
||||
dest: "{{ acme_certificate_certificate_path }}"
|
||||
fullchain_dest: "{{ acme_certificate_fullchain_path }}"
|
||||
select_crypto_backend: cryptography
|
||||
terms_agreed: true
|
||||
validate_certs: true
|
||||
|
||||
- name: Cleanup ACME challenge
|
||||
community.general.nsupdate:
|
||||
key_algorithm: "{{ rfc2136_key_algorithm }}"
|
||||
key_name: "{{ rfc2136_key_name }}"
|
||||
key_secret: "{{ rfc2136_key_secret }}"
|
||||
port: 53
|
||||
protocol: tcp
|
||||
record: "{{ challenge.challenge_data[acme_certificate_subject]['dns-01'].record }}."
|
||||
server: "{{ rfc2136_server_address }}"
|
||||
state: absent
|
||||
ttl: 3600
|
||||
type: TXT
|
||||
value: "{{ challenge.challenge_data[acme_certificate_subject]['dns-01'].resource_value }}"
|
||||
zone: "{{ acme_certificate_zone }}"
|
||||
|
||||
- name: Slurp fullchain contents
|
||||
ansible.builtin.slurp:
|
||||
src: "{{ acme_certificate_fullchain_path }}"
|
||||
register: acme_certificate_fullchain_content
|
||||
|
||||
- name: Slurp private key contents
|
||||
ansible.builtin.slurp:
|
||||
src: "{{ acme_certificate_key_path }}"
|
||||
register: acme_certificate_key_content
|
||||
|
||||
- name: Create combined cert file
|
||||
ansible.builtin.template:
|
||||
dest: "{{ acme_certificate_combined_path }}"
|
||||
group: root
|
||||
mode: '0600'
|
||||
owner: root
|
||||
src: combined.pem.j2
|
||||
|
||||
- name: Check if plain key file exists
|
||||
ansible.builtin.stat:
|
||||
path: "{{ acme_certificate_plain_key_path }}"
|
||||
register: plain_key_file
|
||||
|
||||
- name: Create a plain text copy of the SSL private key # noqa: no-handler
|
||||
when: |
|
||||
genrsa_private_key.changed or
|
||||
not plain_key_file.stat.exists
|
||||
ansible.builtin.command:
|
||||
cmd: openssl rsa -in {{ acme_certificate_key_path }} -passin pass:{{ ssl_passphrase }} -out {{ acme_certificate_plain_key_path }}
|
||||
changed_when: true
|
||||
|
||||
- name: Slurp plain text private key contents
|
||||
ansible.builtin.slurp:
|
||||
src: "{{ acme_certificate_plain_key_path }}"
|
||||
register: acme_certificate_key_content
|
||||
|
||||
- name: Create plain text combined cert file
|
||||
ansible.builtin.template:
|
||||
dest: "{{ acme_certificate_plain_combined_path }}"
|
||||
group: root
|
||||
mode: '0600'
|
||||
owner: root
|
||||
src: combined.pem.j2
|
||||
|
||||
- name: Dependant services block
|
||||
when:
|
||||
- (acme_certificate_restart_services | default([]) | length) >= 1
|
||||
- challenge is changed
|
||||
block:
|
||||
|
||||
- name: Check state of running services
|
||||
ansible.builtin.service_facts:
|
||||
|
||||
- name: Restart dependant services
|
||||
when:
|
||||
- ansible_facts.services[item] is defined
|
||||
- ansible_facts.services[item].state in ['running','failed']
|
||||
ansible.builtin.service:
|
||||
name: "{{ item }}"
|
||||
state: restarted
|
||||
loop: "{{ acme_certificate_restart_services }}"
|
2
ansible/roles/acme_certificate/templates/combined.pem.j2
Normal file
2
ansible/roles/acme_certificate/templates/combined.pem.j2
Normal file
@ -0,0 +1,2 @@
|
||||
{{ acme_certificate_fullchain_content['content'] | b64decode }}
|
||||
{{ acme_certificate_key_content['content'] | b64decode }}
|
49
ansible/roles/archinstall/defaults/main.yml
Normal file
49
ansible/roles/archinstall/defaults/main.yml
Normal file
@ -0,0 +1,49 @@
|
||||
---
|
||||
iso_source:
|
||||
ntp_servers:
|
||||
- time.example.com
|
||||
|
||||
pacstrap:
|
||||
server: # Select from https://geo.mirror.pkgbuild.com/iso/latest/arch/pkglist.x86_64.txt
|
||||
base
|
||||
linux-lts
|
||||
linux-firmware
|
||||
intel-ucode
|
||||
e2fsprogs
|
||||
dosfstools
|
||||
exfatprogs
|
||||
nftables
|
||||
openssh
|
||||
ufw
|
||||
nano
|
||||
man-db
|
||||
man-pages
|
||||
texinfo
|
||||
curl
|
||||
which
|
||||
usbutils
|
||||
tzdata
|
||||
tpm2-tss
|
||||
tar
|
||||
sudo
|
||||
smartmontools
|
||||
shadow
|
||||
sed
|
||||
screen
|
||||
reflector
|
||||
pv
|
||||
pinentry
|
||||
pciutils
|
||||
parted
|
||||
openssl
|
||||
nbd
|
||||
kmod
|
||||
bash
|
||||
bind
|
||||
ca-certificates
|
||||
ca-certificates-mozilla
|
||||
ca-certificates-utils
|
||||
efibootmgr
|
||||
grep
|
||||
mdadm
|
||||
lvm2
|
3
ansible/roles/archinstall/files/packages
Normal file
3
ansible/roles/archinstall/files/packages
Normal file
@ -0,0 +1,3 @@
|
||||
linux-lts
|
||||
linux-lts-headers
|
||||
base
|
136
ansible/roles/archinstall/tasks/main.yml
Normal file
136
ansible/roles/archinstall/tasks/main.yml
Normal file
@ -0,0 +1,136 @@
|
||||
---
|
||||
- name: attach installation iso as virtual media
|
||||
|
||||
- name: boot from installation iso
|
||||
|
||||
- name: detect booted ip address
|
||||
|
||||
- name: configure disks
|
||||
# Specify root disk and part, set to type 23 (linux root x86-64), label root
|
||||
# Specify efi disk and part, set to type 1 (efi system), label efi
|
||||
# format efi partition
|
||||
# mkfs.fat -F32 /dev/mmcblk0p1
|
||||
# Ecrypt root partition
|
||||
# cryptsetup -y -v luksFormat /dev/sda1 # TODO add keyfile/password automatically
|
||||
# cryptsetup open /dev/sda1 root
|
||||
# mkfs.ext4 /dev/mapper/root
|
||||
# mkdir /mnt/root
|
||||
# mount /dev/mapper/root /mnt/root
|
||||
# mkdir /mnt/root/efi
|
||||
# mount /dev/mmcblk0p1 /mnt/root/efi
|
||||
|
||||
# Add cryptsetup params to kernel cmdline
|
||||
# cryptdevice=UUID=device-UUID:root root=/dev/mapper/root rw
|
||||
|
||||
# add efi to /etc/fstab
|
||||
# mkdir /mnt/mountpoint/etc
|
||||
# sudo genfstab -L /mnt/mountpoint >> /mnt/mountpoint/etc/fstab
|
||||
|
||||
|
||||
|
||||
- name: sync ntp
|
||||
# timedatectl set-timezone Australia/Brisbane
|
||||
# timedatectl set-ntp true
|
||||
|
||||
# run reflector to get a list of mirrors
|
||||
# relfector -c AU --save /etc/pacman.d/mirrorlist
|
||||
|
||||
# update dbs
|
||||
# pacman -Sy
|
||||
|
||||
|
||||
# pacstrap
|
||||
# pacstrap -K /mnt/root base linux-lts linux-firmware nano openssh bind bash efibootmgr reflector screen pv pinentry sudo man-db man-pages texinfo ufw nftables intel-ucode e2fsprogs dosfstools curl cryptsetup sbctl sbsigntools fwupd fwupd-efi dmidecode udisks2 usbutils inetutils ethtool qemu-guest-agent arch-install-scripts lsof
|
||||
# desktop
|
||||
# pacstrap -K /mnt base linux linux-firmware nano openssh bind bash efibootmgr reflector screen pv pinentry sudo man-db man-pages texinfo ufw nftables intel-ucode e2fsprogs dosfstools curl cryptsetup sbctl sbsigntools fwupd fwupd-efi dmidecode udisks2 usbutils inetutils ethtool arch-install-scripts lsof btrfs-progs plasma-meta plasma-wayland-session kde-system dolphin-plugins
|
||||
|
||||
# gen fstab
|
||||
# genfstab -L /mnt/root >> /mnt/root/etc/fstab
|
||||
|
||||
#
|
||||
# chroot from here
|
||||
#
|
||||
|
||||
# set hostname
|
||||
# echo hv00 > /etc/hostname
|
||||
|
||||
# TODO add entries to /etc/hosts
|
||||
# 127.0.0.1 localhost
|
||||
# ::1 localhost
|
||||
# 127.0.1.1 static_fqdn
|
||||
|
||||
# link timezone
|
||||
# ln -sf /usr/share/zoneinfo/Australia/Brisbane /etc/localtime
|
||||
|
||||
# enable ntp again
|
||||
# timedatectl set-ntp true # TODO move this post reboot
|
||||
|
||||
# sync hardware clock
|
||||
# hwclock --systohc
|
||||
|
||||
# set locale
|
||||
# sed -i 's/#en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/g' /etc/locale.gen
|
||||
# locale-gen
|
||||
# echo LANG=en_US.UTF-8 > /etc/locale.conf
|
||||
|
||||
# uncomment wheel group in /etc/sudoers
|
||||
# sed -i 's/# %wheel ALL=(ALL:ALL) ALL/%wheel ALL=(ALL:ALL) ALL/g' /etc/sudoers
|
||||
|
||||
# add user
|
||||
# useradd -u 1000 -U -m -b /home/ -G wheel -s /bin/bash ladmin
|
||||
# set new user password
|
||||
|
||||
# disable root user
|
||||
# passwd -l root
|
||||
# usermod -s /sbin/nologin root
|
||||
|
||||
# create /etc/kernel/cmdline file
|
||||
# the uuids are the DISK uuids from /dev/disk/by-uuid, NOT the partuuids
|
||||
# echo 'cryptdevice=dbbb9fb2-5509-4701-a2bb-5660934a5378:root root=/dev/mapper/root rw' > /etc/kernel/cmdline
|
||||
# for sd-encrypt hook
|
||||
# echo 'rd.luks.name=dbbb9fb2-5509-4701-a2bb-5660934a5378=root root=/dev/mapper/root rw' > /etc/kernel/cmdline
|
||||
|
||||
|
||||
# create a default systemd-networkd config
|
||||
# enable systemd-networkd
|
||||
# enable sshd
|
||||
# enable ufw service
|
||||
# enable ufw firewall
|
||||
# create ufw config to allow ssh port 22
|
||||
|
||||
# modify mkinitcpio presets
|
||||
# template file?
|
||||
# output to default efi path ESP/efi/boot/bootx64.efi
|
||||
|
||||
# modify mkinitcpio.conf for encryption
|
||||
# old HOOKS=(base udev autodetect modconf kms keyboard keymap consolefont block filesystems fsck)
|
||||
# new HOOKS=(base systemd keyboard autodetect modconf kms block sd-encrypt filesystems fsck)
|
||||
# sed -i 's/^HOOKS=(base udev autodetect modconf block filesystems keyboard fsck)/HOOKS=(base udev autodetect modconf block encrypt filesystems keyboard fsck)/g' /etc/mkinitcpio.conf
|
||||
|
||||
# geneate sb keys with sbctl
|
||||
# keys go to /usr/share/secureboot/keys/db/db.pem
|
||||
# enroll sbctl keys
|
||||
|
||||
# add console= option to cmdline file
|
||||
|
||||
# create initcpio post hook /etc/initcpio/post/uki-sbsign
|
||||
# make /etc/initcpio/post/uki-sbsign executable
|
||||
# chmod +x /etc/initcpio/post/uki-sbsign
|
||||
# make initcpio
|
||||
# mkinitcpio -p linux-lts
|
||||
|
||||
# vfio and iommu
|
||||
# add 'intel_iommu=on iommu=pt' to kernel cmdline
|
||||
|
||||
# add vfio binding
|
||||
# vp2420 iGPU = 8086:4555
|
||||
# add vfio-pci ids to /etc/kernel/cmdline
|
||||
# vfio-pci.ids=8086:4555
|
||||
|
||||
# add vfio modules to mkinitcpio.conf
|
||||
# MODULES=(vfio_pci vfio vfio_iommu_type1)
|
||||
# ensure modconf hook is in mkinitcpio.conf
|
||||
# HOOKS=(base systemd keyboard autodetect modconf kms block sd-encrypt filesystems fsck)
|
||||
|
||||
# efibootmgr NO BACKSLASH ON A ROOT FILE
|
||||
# efibootmgr -c -d /dev/nvme0n1 -p 1 -L "Arch Linux" -l "archlinux.efi"
|
9
ansible/roles/archinstall/templates/eno.network.j2
Normal file
9
ansible/roles/archinstall/templates/eno.network.j2
Normal file
@ -0,0 +1,9 @@
|
||||
[Match]
|
||||
MACAddress={{ mac_address }}
|
||||
|
||||
[Link]
|
||||
ARP=no
|
||||
|
||||
[Network]
|
||||
DHCP=no
|
||||
Bond=lacp
|
24
ansible/roles/arr/tasks/main.yaml
Normal file
24
ansible/roles/arr/tasks/main.yaml
Normal file
@ -0,0 +1,24 @@
|
||||
---
|
||||
|
||||
- name: Install arr packages
|
||||
when: ansible_facts['os_family'] == "Archlinux"
|
||||
community.general.pacman:
|
||||
name: "{{ arr_packages }}"
|
||||
state: present
|
||||
update_cache: true
|
||||
|
||||
- name: Reload systemd
|
||||
ansible.builtin.systemd:
|
||||
daemon_reload: true
|
||||
|
||||
- name: Start arr services
|
||||
ansible.builtin.systemd:
|
||||
name: "{{ item }}"
|
||||
state: started
|
||||
enabled: true
|
||||
loop:
|
||||
- sonarr.service
|
||||
- radarr.service
|
||||
- lidarr.service
|
||||
- prowlarr.service
|
||||
- bazarr.service
|
6
ansible/roles/arr/vars/main.yaml
Normal file
6
ansible/roles/arr/vars/main.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
arr_packages:
|
||||
- sonarr
|
||||
- radarr
|
||||
- lidarr
|
||||
- bazarr
|
||||
- prowlarr
|
50
ansible/roles/aur_repo_client/tasks/main.yaml
Normal file
50
ansible/roles/aur_repo_client/tasks/main.yaml
Normal file
@ -0,0 +1,50 @@
|
||||
---
|
||||
|
||||
- name: Check if repo public key is in pacman keyring
|
||||
ansible.builtin.command:
|
||||
argv:
|
||||
- pacman-key
|
||||
- --list-keys
|
||||
- "{{ aur_repo_client_public_key_fingerprint }}"
|
||||
register: repo_key_check
|
||||
failed_when: repo_key_check.rc not in [0, 1]
|
||||
changed_when: false
|
||||
|
||||
- name: Add repo public key to pacman keyring
|
||||
when: repo_key_check.rc == 1
|
||||
block:
|
||||
|
||||
- name: Import the repo public key
|
||||
ansible.builtin.command:
|
||||
argv:
|
||||
- pacman-key
|
||||
- --recv-keys
|
||||
- "{{ aur_repo_client_public_key_fingerprint }}"
|
||||
- --keyserver
|
||||
- "{{ aur_repo_client_keyserver }}"
|
||||
changed_when: true
|
||||
|
||||
- name: Trust the repo public key
|
||||
ansible.builtin.command:
|
||||
argv:
|
||||
- pacman-key
|
||||
- --lsign-key
|
||||
- "{{ aur_repo_client_public_key_fingerprint }}"
|
||||
changed_when: true
|
||||
|
||||
- name: Add home repo block to pacman.conf
|
||||
ansible.builtin.blockinfile:
|
||||
path: /etc/pacman.conf
|
||||
block: |
|
||||
[{{ aur_repo_client_repo_name }}]
|
||||
SigLevel = Required TrustedOnly
|
||||
Server = {{ aur_repo_client_repo_address }}
|
||||
create: false
|
||||
state: present
|
||||
insertafter: EOF
|
||||
register: add_pacman_repo
|
||||
|
||||
- name: Update pacman database # noqa: no-handler
|
||||
when: add_pacman_repo.changed
|
||||
community.general.pacman:
|
||||
update_cache: true
|
6
ansible/roles/aur_repo_client/vars/main.yaml
Normal file
6
ansible/roles/aur_repo_client/vars/main.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
|
||||
aur_repo_client_repo_name: "home"
|
||||
aur_repo_client_repo_address: "https://repo.balsillie.house"
|
||||
aur_repo_client_public_key_fingerprint: DB529158B99DD8311D78CA2FBE6003C744F56EE2
|
||||
aur_repo_client_keyserver: hkps://keyserver.ubuntu.com
|
12
ansible/roles/aur_repo_host/files/aur-sync.service
Normal file
12
ansible/roles/aur_repo_host/files/aur-sync.service
Normal file
@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=Sync AUR packages
|
||||
Wants=aur-sync.timer
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/aur sync --no-view --upgrades --no-confirm --clean --rm-deps --sign --database home
|
||||
User=aur-builder
|
||||
Group=aur-builder
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
12
ansible/roles/aur_repo_host/files/aur-sync.timer
Normal file
12
ansible/roles/aur_repo_host/files/aur-sync.timer
Normal file
@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=Timer that runs aur sync service
|
||||
Requires=aur-sync.service
|
||||
|
||||
[Timer]
|
||||
Unit=aur-sync.service
|
||||
OnCalendar=*-*-* 16:00:00
|
||||
RandomizedDelaySec=120
|
||||
Persistent=true
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
270
ansible/roles/aur_repo_host/tasks/main.yaml
Normal file
270
ansible/roles/aur_repo_host/tasks/main.yaml
Normal file
@ -0,0 +1,270 @@
|
||||
---
|
||||
|
||||
- name: Lookup aur_repo_host secret
|
||||
delegate_to: localhost
|
||||
become: false
|
||||
community.hashi_vault.vault_kv1_get:
|
||||
path: ansible/group_vars/aur_repo_hosts
|
||||
register: aur_repo_host_secret
|
||||
|
||||
- name: Set aur_repo facts
|
||||
ansible.builtin.set_fact:
|
||||
aur_repo_private_key: "{{ aur_repo_host_secret.secret.aur_repo_private_key }}"
|
||||
aur_repo_key_thumbprint: "{{ aur_repo_host_secret.secret.aur_repo_key_thumbprint }}"
|
||||
|
||||
- name: Create the makepkg drop-in config file
|
||||
ansible.builtin.template:
|
||||
dest: /etc/makepkg.conf.d/makepkg.conf
|
||||
src: makepkg.conf.j2
|
||||
owner: root
|
||||
group: root
|
||||
mode: "0644"
|
||||
|
||||
- name: Create the build user group
|
||||
ansible.builtin.group:
|
||||
name: "{{ aur_repo_build_account }}"
|
||||
system: true
|
||||
state: present
|
||||
|
||||
- name: Create the build user
|
||||
ansible.builtin.user:
|
||||
name: "{{ aur_repo_build_account }}"
|
||||
password: '!'
|
||||
group: "{{ aur_repo_build_account }}"
|
||||
comment: "AUR Package Builder"
|
||||
shell: /sbin/nologin
|
||||
home: "{{ aur_repo_dir }}"
|
||||
createhome: true
|
||||
system: true
|
||||
state: present
|
||||
|
||||
- name: Create the build user sudoer file
|
||||
ansible.builtin.template:
|
||||
dest: /etc/sudoers.d/{{ aur_repo_build_account }}
|
||||
src: aur-sudoer.j2
|
||||
owner: root
|
||||
group: root
|
||||
mode: "0640"
|
||||
|
||||
- name: Create the build dirs
|
||||
ansible.builtin.file:
|
||||
path: "{{ item }}"
|
||||
state: directory
|
||||
owner: "{{ aur_repo_build_account }}"
|
||||
group: "{{ aur_repo_build_account }}"
|
||||
mode: "0775"
|
||||
loop:
|
||||
- "{{ aur_repo_dir }}"
|
||||
- "{{ aur_repo_dir }}/packages"
|
||||
- "{{ aur_repo_dir }}/sources"
|
||||
- "{{ aur_repo_dir }}/srcpackages"
|
||||
- /var/log/makepkg
|
||||
- /tmp/build
|
||||
|
||||
- name: Check if the signing key is in build user's keyring
|
||||
ansible.builtin.command:
|
||||
cmd: gpg2 --list-secret-key --with-colons {{ aur_repo_key_thumbprint }}
|
||||
failed_when: key_result.rc not in [0, 2]
|
||||
changed_when: false
|
||||
register: key_result
|
||||
vars:
|
||||
ansible_become_user: "{{ aur_repo_build_account }}"
|
||||
|
||||
- name: GPG key import block
|
||||
when: key_result.rc == 2
|
||||
block:
|
||||
|
||||
- name: Template out the signing private key
|
||||
ansible.builtin.template:
|
||||
dest: "/tmp/build/signing_key.asc"
|
||||
src: signing_key.asc.j2
|
||||
owner: "{{ aur_repo_build_account }}"
|
||||
group: "{{ aur_repo_build_account }}"
|
||||
mode: "0600"
|
||||
|
||||
- name: Import the signing key
|
||||
ansible.builtin.command:
|
||||
cmd: gpg2 --import /tmp/build/signing_key.asc
|
||||
changed_when: true
|
||||
vars:
|
||||
ansible_become_user: "{{ aur_repo_build_account }}"
|
||||
|
||||
- name: Delete the signing key
|
||||
ansible.builtin.file:
|
||||
path: "/tmp/build/signing_key.asc"
|
||||
state: absent
|
||||
|
||||
- name: Check if aurutils is already installed
|
||||
ansible.builtin.stat:
|
||||
follow: true
|
||||
path: /usr/bin/aur
|
||||
register: aurutils_stat
|
||||
|
||||
- name: Aurutils install block
|
||||
when: not aurutils_stat.stat.exists
|
||||
block:
|
||||
|
||||
- name: Install makepkg dependencies
|
||||
community.general.pacman:
|
||||
name:
|
||||
- git
|
||||
- base-devel
|
||||
state: present
|
||||
update_cache: true
|
||||
|
||||
- name: Clone aurutils
|
||||
ansible.builtin.git:
|
||||
depth: 1
|
||||
dest: /tmp/aurutils
|
||||
repo: https://aur.archlinux.org/aurutils.git
|
||||
single_branch: true
|
||||
version: master
|
||||
|
||||
- name: Slurp PKGBUILD contents
|
||||
ansible.builtin.slurp:
|
||||
path: /tmp/aurutils/PKGBUILD
|
||||
register: aurutils_pkgbuild
|
||||
|
||||
- name: Parse PKGBUILD into facts
|
||||
ansible.builtin.set_fact:
|
||||
aurutils_dependencies: "{{ aurutils_pkgbuild['content'] | b64decode | regex_search('(?<=^depends=\\().*(?=\\)$)', multiline=True) | replace(\"'\", '') | split(' ') }}" # noqa: yaml[line-length]
|
||||
aurutils_pkgver: "{{ aurutils_pkgbuild['content'] | b64decode | regex_search('(?<=^pkgver=).*(?=$)', multiline=True) }}"
|
||||
aurutils_pkgrel: "{{ aurutils_pkgbuild['content'] | b64decode | regex_search('(?<=^pkgrel=).*(?=$)', multiline=True) }}"
|
||||
aurutils_arch: "{{ aurutils_pkgbuild['content'] | b64decode | regex_search('(?<=^arch=\\().*(?=\\)$)', multiline=True) | replace(\"'\", '') }}"
|
||||
|
||||
- name: Install aurutils dependencies
|
||||
community.general.pacman:
|
||||
name: "{{ aurutils_dependencies }}"
|
||||
state: present
|
||||
reason: dependency
|
||||
update_cache: false
|
||||
|
||||
- name: Build aurutils
|
||||
ansible.builtin.command:
|
||||
cmd: makepkg
|
||||
chdir: /tmp/aurutils
|
||||
creates: "{{ aur_repo_dir }}/packages/aurutils-{{ aurutils_pkgver }}-{{ aurutils_pkgrel }}-{{ aurutils_arch }}.pkg.tar"
|
||||
vars:
|
||||
ansible_become_user: "{{ aur_repo_build_account }}"
|
||||
|
||||
- name: Update repo database
|
||||
ansible.builtin.command:
|
||||
argv:
|
||||
- repo-add
|
||||
- --prevent-downgrade
|
||||
- --remove
|
||||
- --sign
|
||||
- --key
|
||||
- "{{ aur_repo_key_thumbprint }}"
|
||||
- home.db.tar
|
||||
- aurutils-{{ aurutils_pkgver }}-{{ aurutils_pkgrel }}-{{ aurutils_arch }}.pkg.tar
|
||||
chdir: "{{ aur_repo_dir }}/packages"
|
||||
changed_when: true
|
||||
vars:
|
||||
ansible_become_user: "{{ aur_repo_build_account }}"
|
||||
|
||||
- name: Check if the signing key is in pacman keyring
|
||||
ansible.builtin.command:
|
||||
argv:
|
||||
- pacman-key
|
||||
- -l
|
||||
- "{{ aur_repo_key_thumbprint }}"
|
||||
failed_when: pacman_key_result.rc not in [0, 1]
|
||||
changed_when: false
|
||||
register: pacman_key_result
|
||||
|
||||
- name: Pacman key import block
|
||||
when: pacman_key_result.rc == 1
|
||||
block:
|
||||
|
||||
- name: Import the signing public key to arch keyring
|
||||
ansible.builtin.command:
|
||||
argv:
|
||||
- pacman-key
|
||||
- -r
|
||||
- "{{ aur_repo_key_thumbprint }}"
|
||||
- --keyserver
|
||||
- hkps://keyserver.ubuntu.com
|
||||
changed_when: true
|
||||
|
||||
- name: Locally sign the imported pacman key
|
||||
ansible.builtin.command:
|
||||
argv:
|
||||
- pacman-key
|
||||
- --lsign-key
|
||||
- "{{ aur_repo_key_thumbprint }}"
|
||||
changed_when: true
|
||||
|
||||
- name: Add custom repo block to pacman.conf
|
||||
ansible.builtin.blockinfile:
|
||||
path: /etc/pacman.conf
|
||||
block: |
|
||||
[home]
|
||||
SigLevel = Required TrustedOnly
|
||||
Server = file://{{ aur_repo_dir }}/packages
|
||||
create: false
|
||||
state: present
|
||||
insertafter: EOF
|
||||
|
||||
- name: Install aurutils
|
||||
community.general.pacman:
|
||||
name: aurutils
|
||||
state: present
|
||||
update_cache: true
|
||||
|
||||
# - name: Enable the multilib repository
|
||||
# ansible.builtin.replace:
|
||||
# path: /etc/pacman.conf
|
||||
# backup: true
|
||||
# regexp: '^[#]?\[multilib\]\n[#]?Include = \/etc\/pacman.d\/mirrorlist$'
|
||||
# replace: '[multilib]\nInclude = /etc/pacman.d/mirrorlist'
|
||||
# register: multilib_enable
|
||||
|
||||
# - name: Update the package database if multilib was enabled # noqa: no-handler
|
||||
# when: multilib_enable.changed | default(false)
|
||||
# community.general.pacman:
|
||||
# update_cache: true
|
||||
|
||||
- name: Sync AUR packages
|
||||
ansible.builtin.command:
|
||||
cmd: aur sync --no-view -CnrS {{ item }}
|
||||
loop: "{{ aur_repo_host_packages }}"
|
||||
register: aur_sync_result
|
||||
changed_when: (aur_sync_result.stderr_lines | last | replace(':','')) != "sync there is nothing to do"
|
||||
failed_when: aur_sync_result.rc != 0
|
||||
vars:
|
||||
ansible_become_user: "{{ aur_repo_build_account }}"
|
||||
|
||||
- name: Add the root www folder if it doesn't exist
|
||||
ansible.builtin.file:
|
||||
path: /var/www
|
||||
state: directory
|
||||
owner: http
|
||||
group: http
|
||||
mode: "0775"
|
||||
|
||||
- name: Link the aur repo to the web root
|
||||
ansible.builtin.file:
|
||||
src: "{{ aur_repo_dir }}/packages"
|
||||
path: /var/www{{ aur_repo_dir }}
|
||||
state: link
|
||||
|
||||
- name: Add the aur-sync systemd unit files
|
||||
ansible.builtin.copy:
|
||||
src: "{{ item }}"
|
||||
dest: /usr/lib/systemd/system/
|
||||
owner: root
|
||||
group: root
|
||||
mode: "0644"
|
||||
loop:
|
||||
- aur-sync.service
|
||||
- aur-sync.timer
|
||||
register: aur_sync_unit_files
|
||||
|
||||
- name: Enable and start the aur-sync systemd timer # noqa: no-handler
|
||||
when: aur_sync_unit_files.changed
|
||||
ansible.builtin.systemd:
|
||||
name: aur-sync.timer
|
||||
enabled: true
|
||||
state: started
|
||||
daemon_reload: true
|
1
ansible/roles/aur_repo_host/templates/aur-sudoer.j2
Normal file
1
ansible/roles/aur_repo_host/templates/aur-sudoer.j2
Normal file
@ -0,0 +1 @@
|
||||
{{ aur_repo_build_account }} ALL = (root) NOPASSWD: /usr/bin/pacman, /usr/bin/pacsync
|
21
ansible/roles/aur_repo_host/templates/makepkg.conf.j2
Normal file
21
ansible/roles/aur_repo_host/templates/makepkg.conf.j2
Normal file
@ -0,0 +1,21 @@
|
||||
|
||||
# Global Options
|
||||
|
||||
OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge debug lto autodeps)
|
||||
MAKEFLAGS="-j{{ (ansible_processor_nproc - 1) }}"
|
||||
PACKAGER="{{ aur_repo_packager_name }} <{{ aur_repo_packager_email }}>"
|
||||
|
||||
# Build Environment
|
||||
|
||||
BUILDDIR=/tmp/build
|
||||
BUILDENV=(!distcc color !ccache check sign)
|
||||
GPGKEY={{ aur_repo_key_thumbprint }}
|
||||
|
||||
# Outputs
|
||||
|
||||
PKGDEST={{ aur_repo_dir }}/packages
|
||||
SRCDEST={{ aur_repo_dir }}/sources
|
||||
SRCPKGDEST={{ aur_repo_dir }}/srcpackages
|
||||
LOGDEST=/var/log/makepkg
|
||||
PKGEXT=".pkg.tar"
|
||||
SRCEXT=".src.tar"
|
1
ansible/roles/aur_repo_host/templates/signing_key.asc.j2
Normal file
1
ansible/roles/aur_repo_host/templates/signing_key.asc.j2
Normal file
@ -0,0 +1 @@
|
||||
{{ aur_repo_private_key }}
|
6
ansible/roles/certbot/handlers/main.yaml
Normal file
6
ansible/roles/certbot/handlers/main.yaml
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
|
||||
- name: Restart nginx
|
||||
ansible.builtin.service:
|
||||
name: nginx.service
|
||||
state: restarted
|
67
ansible/roles/certbot/tasks/main.yaml
Normal file
67
ansible/roles/certbot/tasks/main.yaml
Normal file
@ -0,0 +1,67 @@
|
||||
- name: Install certbot package (Archlinux)
|
||||
when: ansible_facts['os_family'] == "Archlinux"
|
||||
community.general.pacman:
|
||||
name:
|
||||
- certbot
|
||||
- certbot-dns-{{ certbot_dns_plugin }}
|
||||
state: present
|
||||
update_cache: true
|
||||
|
||||
- name: Install certbot webserver plugin (Archlinux)
|
||||
when:
|
||||
- ansible_facts['os_family'] == "Archlinux"
|
||||
- certbot_webserver_type == 'nginx'
|
||||
community.general.pacman:
|
||||
name:
|
||||
- certbot-nginx
|
||||
state: present
|
||||
update_cache: true
|
||||
|
||||
- name: Template out the rfc2136 credentials file
|
||||
when: certbot_dns_plugin == 'rfc2136'
|
||||
ansible.builtin.template:
|
||||
src: "{{ certbot_dns_plugin }}.conf.j2"
|
||||
dest: "/etc/letsencrypt/{{ certbot_dns_plugin }}.conf"
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0600'
|
||||
|
||||
- name: Template out cloudflare credentials file
|
||||
when: certbot_dns_plugin == 'cloudflare'
|
||||
ansible.builtin.template:
|
||||
src: "{{ certbot_dns_plugin }}.conf.j2"
|
||||
dest: "/etc/letsencrypt/{{ certbot_dns_plugin }}.conf"
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0600'
|
||||
|
||||
- name: Template out the certbot default config
|
||||
ansible.builtin.template:
|
||||
src: cli.ini.j2
|
||||
dest: /etc/letsencrypt/cli.ini
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0644'
|
||||
|
||||
- name: Request and install certificates
|
||||
ansible.builtin.command:
|
||||
argv:
|
||||
- certbot
|
||||
- certonly
|
||||
- -n
|
||||
- --dns-{{ certbot_dns_plugin }}
|
||||
- --dns-{{ certbot_dns_plugin }}-credentials
|
||||
- /etc/letsencrypt/{{ certbot_dns_plugin }}.conf
|
||||
- --dns-{{ certbot_dns_plugin }}-propagation-seconds
|
||||
- "{{ certbot_dns_propagation_seconds | default(10) }}"
|
||||
- -d
|
||||
- "{{ item }}"
|
||||
creates: /etc/letsencrypt/live/{{ item }}/fullchain.pem
|
||||
loop: "{{ certbot_domains }}"
|
||||
notify: "{{ certbot_notify | default(omit) }}"
|
||||
|
||||
- name: Enable certbot renewal
|
||||
ansible.builtin.service:
|
||||
name: certbot-renew.timer
|
||||
state: started
|
||||
enabled: true
|
3
ansible/roles/certbot/templates/cli.ini.j2
Normal file
3
ansible/roles/certbot/templates/cli.ini.j2
Normal file
@ -0,0 +1,3 @@
|
||||
rsa-key-size = 4096
|
||||
email = {{ certbot_email }}
|
||||
agree-tos = true
|
1
ansible/roles/certbot/templates/cloudflare.conf.j2
Normal file
1
ansible/roles/certbot/templates/cloudflare.conf.j2
Normal file
@ -0,0 +1 @@
|
||||
dns_cloudflare_api_token = {{ certbot_cloudflare_api_token }}
|
6
ansible/roles/certbot/templates/rfc2136.conf.j2
Normal file
6
ansible/roles/certbot/templates/rfc2136.conf.j2
Normal file
@ -0,0 +1,6 @@
|
||||
dns_rfc2136_server = {{ certbot_rfc2136_server }}
|
||||
dns_rfc2136_port = {{ certbot_rfc2136_port | default(53) }}
|
||||
dns_rfc2136_name = {{ certbot_rfc2136_key_name }}
|
||||
dns_rfc2136_secret = {{ certbot_rfc2136_key_secret }}
|
||||
dns_rfc2136_algorithm = {{ certbot_rfc2136_key_algorithm | upper }}
|
||||
dns_rfc2136_sign_query = true
|
82
ansible/roles/docker/tasks/main.yaml
Normal file
82
ansible/roles/docker/tasks/main.yaml
Normal file
@ -0,0 +1,82 @@
|
||||
---
|
||||
|
||||
- name: Install Docker on Archlinux
|
||||
when: ansible_facts['os_family'] == "Archlinux"
|
||||
community.general.pacman:
|
||||
name: docker
|
||||
state: present
|
||||
update_cache: true
|
||||
|
||||
- name: Add users to docker group
|
||||
ansible.builtin.user:
|
||||
name: "{{ item }}"
|
||||
groups: docker
|
||||
append: true
|
||||
loop: "{{ docker_users }}"
|
||||
|
||||
- name: Start and enable Docker
|
||||
ansible.builtin.systemd:
|
||||
name: docker
|
||||
state: started
|
||||
enabled: true
|
||||
|
||||
- name: Create Docker networks
|
||||
when:
|
||||
- docker_networks is defined
|
||||
- docker_networks | length > 0
|
||||
community.docker.docker_network:
|
||||
attachable: "{{ item.attachable | default(true) }}"
|
||||
driver: "{{ item.driver | default('bridge') }}"
|
||||
driver_options: "{{ item.driver_options | default(omit) }}"
|
||||
enable_ipv6: "{{ item.enable_ipv6 | default(false) }}"
|
||||
internal: "{{ item.internal | default(false) }}"
|
||||
ipam_config: "{{ item.ipam | default(omit) }}"
|
||||
name: "{{ item.name }}"
|
||||
state: "present"
|
||||
loop: "{{ docker_networks }}"
|
||||
|
||||
- name: Create Docker volumes
|
||||
when:
|
||||
- docker_volumes is defined
|
||||
- docker_volumes | length > 0
|
||||
community.general.docker_volume:
|
||||
driver: "{{ item.driver | default('local') }}"
|
||||
driver_options: "{{ item.driver_options | default({}) }}"
|
||||
recreate: "never"
|
||||
state: "present"
|
||||
volume_name: "{{ item.name }}"
|
||||
loop: "{{ docker_volumes }}"
|
||||
|
||||
- name: Pull Docker images
|
||||
when:
|
||||
- docker_images is defined
|
||||
- docker_images | length > 0
|
||||
community.docker.docker_image_pull:
|
||||
name: "{{ item.name }}"
|
||||
pull: "always"
|
||||
tag: "{{ item.tag | default('latest') }}"
|
||||
loop: "{{ docker_images }}"
|
||||
|
||||
- name: Create Docker containers
|
||||
when:
|
||||
- docker_containers is defined
|
||||
- docker_containers | length > 0
|
||||
community.general.docker_container:
|
||||
auto_remove: "{{ item.auto_remove | default(false) }}"
|
||||
capabilities: "{{ item.capabilities | default(omit) }}"
|
||||
command: "{{ item.command | default(omit) }}"
|
||||
detach: true
|
||||
domainname: "{{ item.domainname | default(omit) }}"
|
||||
entrypoint: "{{ item.entrypoint | default(omit) }}"
|
||||
env: "{{ item.env | default({}) }}"
|
||||
etc_hosts: "{{ item.etc_hosts | default({}) }}"
|
||||
hostname: "{{ item.hostname | default(item.name) }}"
|
||||
image: "{{ item.image }}"
|
||||
name: "{{ item.name }}"
|
||||
networks: "{{ item.networks | default(omit) }}"
|
||||
published_ports: "{{ item.ports | default([]) }}"
|
||||
restart_policy: "{{ item.restart_policy | default('unless_stopped') }}"
|
||||
state: 'started'
|
||||
sysctls: "{{ item.sysctls | default({}) }}"
|
||||
volumes: "{{ item.volumes | default([]) }}"
|
||||
loop: "{{ docker_containers }}"
|
4
ansible/roles/firewall/defaults/main.yml
Normal file
4
ansible/roles/firewall/defaults/main.yml
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
firewall_package: ufw
|
||||
firewall_ssh_interface: br22
|
||||
firewall_spice_interface: br22
|
58
ansible/roles/firewall/tasks/main.yml
Normal file
58
ansible/roles/firewall/tasks/main.yml
Normal file
@ -0,0 +1,58 @@
|
||||
---
|
||||
- name: install ufw arch
|
||||
become: true
|
||||
community.general.pacman:
|
||||
name: "{{ firewall_package }}"
|
||||
state: latest
|
||||
update_cache: true
|
||||
when:
|
||||
- ansible_os_family == 'Arch'
|
||||
|
||||
- name: start ufw in allow mode
|
||||
become: true
|
||||
community.general.ufw:
|
||||
policy: allow
|
||||
state: enabled
|
||||
|
||||
- name: start and enable ufw service
|
||||
become: true
|
||||
ansible.builtin.service:
|
||||
name: ufw.service
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: add ssh rules
|
||||
become: true
|
||||
community.general.ufw:
|
||||
comment: SSH access
|
||||
rule: allow
|
||||
to_port: '22'
|
||||
proto: tcp
|
||||
interface: "{{ firewall_ssh_interface }}"
|
||||
direction: in
|
||||
src: "{{ item }}"
|
||||
loop:
|
||||
- 192.168.20.0/24
|
||||
- 192.168.72.0/24
|
||||
- 2406:e001:a:cb20::/64
|
||||
|
||||
- name: add spice rules
|
||||
become: true
|
||||
community.general.ufw:
|
||||
comment: SPICE access to guests
|
||||
rule: allow
|
||||
to_port: 5901:5904
|
||||
proto: tcp
|
||||
interface: "{{ firewall_spice_interface }}"
|
||||
direction: in
|
||||
src: '{{ item }}'
|
||||
loop:
|
||||
- 192.168.20.0/24
|
||||
- 192.168.72.0/24
|
||||
- 2406:e001:a:cb20::/64
|
||||
|
||||
- name: restore default deny policy
|
||||
become: true
|
||||
community.general.ufw:
|
||||
policy: deny
|
||||
logging: low
|
20
ansible/roles/hypervisor/defaults/main.yaml
Normal file
20
ansible/roles/hypervisor/defaults/main.yaml
Normal file
@ -0,0 +1,20 @@
|
||||
libvirt_packages:
|
||||
Archlinux:
|
||||
- qemu-base
|
||||
- openbsd-netcat
|
||||
- swtpm
|
||||
- gettext
|
||||
- libvirt
|
||||
- libvirt-python
|
||||
- python-lxml
|
||||
|
||||
hypervisor:
|
||||
storage: dir
|
||||
device: /dev/sdb
|
||||
|
||||
# hypervisor:
|
||||
# storage: zfs
|
||||
# datasets:
|
||||
# - name: tank/vhds
|
||||
# compression: lz4
|
||||
# encryption: 'off'
|
35
ansible/roles/hypervisor/tasks/libvirt_dir.yaml
Normal file
35
ansible/roles/hypervisor/tasks/libvirt_dir.yaml
Normal file
@ -0,0 +1,35 @@
|
||||
---
|
||||
|
||||
- name: Create the libvirt storage directories
|
||||
ansible.builtin.file:
|
||||
path: "{{ item }}"
|
||||
state: directory
|
||||
owner: libvirt-qemu
|
||||
group: libvirt-qemu
|
||||
mode: '0775'
|
||||
loop:
|
||||
- /var/lib/libvirt/vhds/
|
||||
|
||||
- name: Define additional libvirt storage pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item.name }}"
|
||||
command: define
|
||||
xml: "{{ lookup('template', 'dir_libvirt_pool.xml.j2') }}"
|
||||
loop:
|
||||
- name: vhds
|
||||
path: /var/lib/libvirt/vhds/
|
||||
|
||||
- name: Create additional libvirt storage pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item }}"
|
||||
command: build
|
||||
loop:
|
||||
- vhds
|
||||
|
||||
- name: Start additional libvirt storage pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item }}"
|
||||
state: active
|
||||
autostart: true
|
||||
loop:
|
||||
- vhds
|
91
ansible/roles/hypervisor/tasks/libvirt_drive_mount.yaml
Normal file
91
ansible/roles/hypervisor/tasks/libvirt_drive_mount.yaml
Normal file
@ -0,0 +1,91 @@
|
||||
---
|
||||
|
||||
- name: Configure disk partition
|
||||
community.general.parted:
|
||||
align: optimal
|
||||
device: "{{ hypervisor.device }}"
|
||||
fs_type: ext4
|
||||
label: gpt
|
||||
name: libvirt
|
||||
number: 1
|
||||
part_end: 100%
|
||||
part_start: 0%
|
||||
state: present
|
||||
|
||||
# TODO disk encryption
|
||||
|
||||
- name: Format filesystem
|
||||
community.general.filesystem:
|
||||
device: "{{ hypervisor.device }}1"
|
||||
fstype: ext4
|
||||
resizefs: true
|
||||
state: present
|
||||
|
||||
- name: Get list of services
|
||||
ansible.builtin.service_facts:
|
||||
|
||||
- name: Stop the libvirt services
|
||||
when: item in ansible_facts.services
|
||||
ansible.builtin.service:
|
||||
name: "{{ item }}"
|
||||
state: stopped
|
||||
loop:
|
||||
- libvirtd.service
|
||||
|
||||
- name: Check if libvirt storage directory exists
|
||||
ansible.builtin.stat:
|
||||
path: /var/lib/libvirt/
|
||||
register: libvirt_storage
|
||||
|
||||
- name: Temp mount and copy block
|
||||
when: libvirt_storage.stat.exists
|
||||
block:
|
||||
|
||||
- name: Temporarily mount hypervisor storage
|
||||
ansible.posix.mount:
|
||||
path: /mnt/libvirt_temp/
|
||||
src: "{{ hypervisor.device }}1"
|
||||
fstype: ext4
|
||||
state: mounted
|
||||
boot: false
|
||||
|
||||
- name: Copy libvirt contents to hypervisor storage
|
||||
ansible.builtin.copy:
|
||||
src: /var/lib/libvirt/
|
||||
dest: /mnt/libvirt_temp/
|
||||
remote_src: true
|
||||
mode: preserve
|
||||
|
||||
- name: Remove existing libvirt storage
|
||||
ansible.builtin.file:
|
||||
path: /var/lib/libvirt/
|
||||
state: "{{ item }}"
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0775'
|
||||
loop:
|
||||
- absent
|
||||
- directory
|
||||
|
||||
always:
|
||||
|
||||
- name: Unmount from temporary mount point
|
||||
ansible.posix.mount:
|
||||
path: /mnt/libvirt_temp/
|
||||
state: absent
|
||||
|
||||
- name: Mount hypervisor storage
|
||||
ansible.posix.mount:
|
||||
path: /var/lib/libvirt/
|
||||
src: "{{ hypervisor.device }}1"
|
||||
fstype: ext4
|
||||
state: mounted
|
||||
boot: true
|
||||
|
||||
- name: Start the libvirt service
|
||||
when: item in ansible_facts.services
|
||||
ansible.builtin.service:
|
||||
name: "{{ item }}"
|
||||
state: started
|
||||
loop:
|
||||
- libvirtd.service
|
40
ansible/roles/hypervisor/tasks/libvirt_zfs.yaml
Normal file
40
ansible/roles/hypervisor/tasks/libvirt_zfs.yaml
Normal file
@ -0,0 +1,40 @@
|
||||
---
|
||||
|
||||
- name: Create libvirt zfs dataset(s)
|
||||
community.general.zfs:
|
||||
name: "{{ item.name }}"
|
||||
state: present
|
||||
extra_zfs_properties: # TODO fix property values
|
||||
canmount: false
|
||||
mountpoint: none
|
||||
compression: false
|
||||
primarycache: metadata
|
||||
secondarycache: none
|
||||
reservation: none
|
||||
refreservation: none
|
||||
dedup: false
|
||||
encryption: "{{ item.encryption | default('off') }}"
|
||||
volmode: dev
|
||||
devices: false
|
||||
atime: false
|
||||
loop: "{{ hypervisor.datasets }}"
|
||||
|
||||
- name: Define additional libvirt storage pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item.name | split('/') | last }}"
|
||||
command: define
|
||||
xml: "{{ lookup('template', 'zfs_libvirt_pool.xml.j2') }}"
|
||||
loop: "{{ hypervisor.datasets }}"
|
||||
|
||||
- name: Create additional libvirt storage pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item.name | split('/') | last }}"
|
||||
command: build
|
||||
loop: "{{ hypervisor.datasets }}"
|
||||
|
||||
- name: Start additional libvirt storage pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item.name | split('/') | last }}"
|
||||
state: active
|
||||
autostart: true
|
||||
loop: "{{ hypervisor.datasets }}"
|
136
ansible/roles/hypervisor/tasks/main.yaml
Normal file
136
ansible/roles/hypervisor/tasks/main.yaml
Normal file
@ -0,0 +1,136 @@
|
||||
---
|
||||
|
||||
- name: Format and mount the libvirt disk if it is not root
|
||||
when:
|
||||
- hypervisor.device is defined
|
||||
- hypervisor.device not in (ansible_mounts | json_query('[?mount == `/var/lib/libvirt`].device'))
|
||||
ansible.builtin.include_tasks:
|
||||
file: libvirt_drive_mount.yaml
|
||||
|
||||
- name: Install libvirt packages (Archlinux)
|
||||
when: ansible_distribution == 'Archlinux'
|
||||
community.general.pacman:
|
||||
name: "{{ libvirt_packages['Archlinux'] }}"
|
||||
state: present
|
||||
update_cache: true
|
||||
|
||||
- name: Add user to libvirt group
|
||||
ansible.builtin.user:
|
||||
name: "{{ ansible_user }}"
|
||||
groups:
|
||||
- libvirt
|
||||
- libvirt-qemu
|
||||
append: true
|
||||
|
||||
- name: Load br_netfilter kernel module so sysctl flags can be set
|
||||
community.general.modprobe:
|
||||
name: br_netfilter
|
||||
state: present
|
||||
|
||||
- name: Set required sysctl flags for bridging
|
||||
ansible.posix.sysctl:
|
||||
name: "{{ item.name }}"
|
||||
reload: true
|
||||
state: present
|
||||
sysctl_file: /etc/sysctl.d/bridge.conf
|
||||
sysctl_set: true
|
||||
value: "{{ item.value }}"
|
||||
loop:
|
||||
- name: net.ipv4.ip_forward
|
||||
value: 1
|
||||
- name: net.bridge.bridge-nf-call-iptables
|
||||
value: 0
|
||||
- name: net.bridge.bridge-nf-call-ip6tables
|
||||
value: 0
|
||||
- name: net.bridge.bridge-nf-call-arptables
|
||||
value: 0
|
||||
|
||||
- name: Add bridge(s) to qemu_bridge_helper
|
||||
when: qemu_bridges is defined
|
||||
ansible.builtin.lineinfile:
|
||||
path: /etc/qemu/bridge.conf
|
||||
line: "{{ item }}"
|
||||
state: present
|
||||
backup: false
|
||||
insertafter: EOF
|
||||
loop: "{{ qemu_bridges | default(['virbr0']) }}"
|
||||
|
||||
- name: Start and enable libvirt service
|
||||
ansible.builtin.service:
|
||||
name: libvirtd.service
|
||||
state: started
|
||||
enabled: true
|
||||
|
||||
- name: Stop the default libvirt network
|
||||
community.libvirt.virt_net:
|
||||
name: default
|
||||
state: inactive
|
||||
|
||||
- name: Remove default libvirt network
|
||||
community.libvirt.virt_net:
|
||||
name: default
|
||||
state: absent
|
||||
|
||||
- name: Remove the default libvirt storage pool
|
||||
community.libvirt.virt_pool:
|
||||
name: default
|
||||
state: deleted
|
||||
|
||||
- name: Create standard libvirt storage directories
|
||||
ansible.builtin.file:
|
||||
path: "{{ item }}"
|
||||
state: directory
|
||||
owner: libvirt-qemu
|
||||
group: libvirt-qemu
|
||||
mode: '0775'
|
||||
loop:
|
||||
- /var/lib/libvirt/isos/
|
||||
- /var/lib/libvirt/nvram/
|
||||
|
||||
- name: Get libvirt storage pool facts
|
||||
community.libvirt.virt_pool:
|
||||
command: facts
|
||||
|
||||
- name: Define the standard libvirt storage pools # TODO add when condition against existing pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item.name }}"
|
||||
command: define
|
||||
xml: "{{ lookup('template', 'dir_libvirt_pool.xml.j2') }}"
|
||||
loop:
|
||||
- name: isos
|
||||
path: /var/lib/libvirt/isos/
|
||||
- name: nvram
|
||||
path: /var/lib/libvirt/nvram/
|
||||
|
||||
- name: Create the standard libvirt storage pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item }}"
|
||||
command: build
|
||||
loop:
|
||||
- isos
|
||||
- nvram
|
||||
|
||||
- name: Start the standard libvirt storage pools
|
||||
community.libvirt.virt_pool:
|
||||
name: "{{ item }}"
|
||||
state: active
|
||||
autostart: true
|
||||
loop:
|
||||
- isos
|
||||
- nvram
|
||||
|
||||
- name: Setup additional libvirt storage (dir)
|
||||
when: hypervisor.storage == 'dir'
|
||||
ansible.builtin.include_tasks:
|
||||
file: libvirt_dir.yaml
|
||||
|
||||
- name: Setup additional libvirt storage (zfs)
|
||||
when: hypervisor.storage == 'zfs'
|
||||
ansible.builtin.include_tasks:
|
||||
file: libvirt_zfs.yaml
|
||||
|
||||
# - name: Enroll libvirtd TLS certificate
|
||||
|
||||
# - name: Configure libvirtd TLS listener
|
||||
|
||||
# - name: Open libvirtd TLS firewall ports
|
@ -0,0 +1,6 @@
|
||||
<pool type="dir">
|
||||
<name>{{ item.name }}</name>
|
||||
<target>
|
||||
<path>{{ item.path }}</path>
|
||||
</target>
|
||||
</pool>
|
@ -0,0 +1,6 @@
|
||||
<pool type="zfs">
|
||||
<name>{{ item.name | split('/') | last }}</name>
|
||||
<source>
|
||||
<name>{{ item.name }}</name>
|
||||
</source>
|
||||
</pool>
|
21
ansible/roles/hypervisor_old/defaults/main.yml
Normal file
21
ansible/roles/hypervisor_old/defaults/main.yml
Normal file
@ -0,0 +1,21 @@
|
||||
---
|
||||
libvirt_server_packages:
|
||||
- qemu-base
|
||||
- libvirt
|
||||
- bridge-utils
|
||||
- openbsd-netcat
|
||||
- edk2-ovmf
|
||||
- swtpm
|
||||
- libvirt-python
|
||||
- python-lxml
|
||||
|
||||
libvirt_zfs_pool_name: zfs
|
||||
libvirt_zfs_pool_path: ssd/vhds
|
||||
|
||||
libvirt_iso_pool_name: iso
|
||||
libvirt_iso_pool_path: /iso
|
||||
|
||||
libvirt_qcow_pool_name: qcow
|
||||
libvirt_qcow_pool_path: /qcow
|
||||
|
||||
libvirt_cluster_network_name: cluster
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user