post home assistant update
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +1,2 @@
|
|||||||
secrets/
|
secrets/
|
||||||
|
venv/
|
||||||
|
|||||||
12
.vscode/settings.json
vendored
Normal file
12
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"yaml.schemas": {
|
||||||
|
"https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/ansible.json#/$defs/playbook": [
|
||||||
|
"file:///home/ducoterra/Containers/ansible/update-nginx-stream.yaml",
|
||||||
|
"file:///home/ducoterra/Containers/ansible/install-nginx-stream.yaml",
|
||||||
|
"file:///home/ducoterra/Containers/nginx-stream/update-nginx-stream.yaml",
|
||||||
|
"file:///home/ducoterra/Containers/nginx-stream/certbot.yaml",
|
||||||
|
"file:///home/ducoterra/Containers/nginx-stream/nginx.yaml"
|
||||||
|
],
|
||||||
|
"https://raw.githubusercontent.com/DavidAnson/markdownlint/main/schema/markdownlint-config-schema.json": "untitled:Untitled-1"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,6 +6,7 @@ quick.
|
|||||||
- [Fedora Server](#fedora-server)
|
- [Fedora Server](#fedora-server)
|
||||||
- [Initialization](#initialization)
|
- [Initialization](#initialization)
|
||||||
- [Disable swap](#disable-swap)
|
- [Disable swap](#disable-swap)
|
||||||
|
- [Network](#network)
|
||||||
- [Enable ISCSI](#enable-iscsi)
|
- [Enable ISCSI](#enable-iscsi)
|
||||||
- [Disable Firewalld](#disable-firewalld)
|
- [Disable Firewalld](#disable-firewalld)
|
||||||
- [Set SELinux to Permissive](#set-selinux-to-permissive)
|
- [Set SELinux to Permissive](#set-selinux-to-permissive)
|
||||||
@@ -20,6 +21,7 @@ quick.
|
|||||||
1. `dnf install vim pwgen wireguard-tools`
|
1. `dnf install vim pwgen wireguard-tools`
|
||||||
2. `hostnamectl hostname node1`
|
2. `hostnamectl hostname node1`
|
||||||
3. Set a static IP through the web interface
|
3. Set a static IP through the web interface
|
||||||
|
4. Allow wheel group members to sudo without password
|
||||||
|
|
||||||
## Disable swap
|
## Disable swap
|
||||||
|
|
||||||
@@ -30,6 +32,14 @@ quick.
|
|||||||
|
|
||||||
mask <systemd-zram-setup@zram0.service>
|
mask <systemd-zram-setup@zram0.service>
|
||||||
|
|
||||||
|
## Network
|
||||||
|
|
||||||
|
1. Set MTU to 9000
|
||||||
|
|
||||||
|
If your network supports it, use 9000 as your mtu to allow more data per packet between
|
||||||
|
servers. Note! For bridge interfaces you must set both the physical interface and bridge
|
||||||
|
interface to 9000 - setting one but not the other can cause connectivity problems.
|
||||||
|
|
||||||
## Enable ISCSI
|
## Enable ISCSI
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -184,7 +194,9 @@ You'll also need a policy which allows the user to modify the selected hosted zo
|
|||||||
"route53:ChangeResourceRecordSets"
|
"route53:ChangeResourceRecordSets"
|
||||||
],
|
],
|
||||||
"Resource" : [
|
"Resource" : [
|
||||||
"arn:aws:route53:::hostedzone/Z012820733346FJ0U4FUF"
|
"arn:aws:route53:::hostedzone/Z012820733346FJ0U4FUF",
|
||||||
|
"arn:aws:route53:::hostedzone/Z0092652G7L97DSINN18",
|
||||||
|
"arn:aws:route53:::hostedzone/Z04612891U5Q2JRHUZ11T"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
14
README.md
14
README.md
@@ -17,6 +17,8 @@ A project to store container-based hosting stuff.
|
|||||||
- [Nextcloud](#nextcloud)
|
- [Nextcloud](#nextcloud)
|
||||||
- [Test Deploy](#test-deploy)
|
- [Test Deploy](#test-deploy)
|
||||||
- [Gitea](#gitea)
|
- [Gitea](#gitea)
|
||||||
|
- [Staging](#staging)
|
||||||
|
- [Install](#install)
|
||||||
- [Minecraft](#minecraft)
|
- [Minecraft](#minecraft)
|
||||||
- [Nimcraft](#nimcraft)
|
- [Nimcraft](#nimcraft)
|
||||||
- [Testing](#testing)
|
- [Testing](#testing)
|
||||||
@@ -313,7 +315,6 @@ helm upgrade \
|
|||||||
--values truenas-iscsi-enc0.yaml \
|
--values truenas-iscsi-enc0.yaml \
|
||||||
--namespace democratic-csi \
|
--namespace democratic-csi \
|
||||||
--create-namespace \
|
--create-namespace \
|
||||||
--set-file driver.config.sshConnection.privateKey=secrets/democratic_rsa \
|
|
||||||
--set driver.config.httpConnection.apiKey=$(cat secrets/truenas-api-key) \
|
--set driver.config.httpConnection.apiKey=$(cat secrets/truenas-api-key) \
|
||||||
zfs-iscsi-enc0 democratic-csi/democratic-csi
|
zfs-iscsi-enc0 democratic-csi/democratic-csi
|
||||||
|
|
||||||
@@ -323,7 +324,6 @@ helm upgrade \
|
|||||||
--values truenas-iscsi-enc1.yaml \
|
--values truenas-iscsi-enc1.yaml \
|
||||||
--namespace democratic-csi \
|
--namespace democratic-csi \
|
||||||
--create-namespace \
|
--create-namespace \
|
||||||
--set-file driver.config.sshConnection.privateKey=secrets/democratic_rsa \
|
|
||||||
--set driver.config.httpConnection.apiKey=$(cat secrets/truenas-api-key) \
|
--set driver.config.httpConnection.apiKey=$(cat secrets/truenas-api-key) \
|
||||||
zfs-iscsi-enc1 democratic-csi/democratic-csi
|
zfs-iscsi-enc1 democratic-csi/democratic-csi
|
||||||
|
|
||||||
@@ -333,7 +333,6 @@ helm upgrade \
|
|||||||
--values truenas-nfs-enc1.yaml \
|
--values truenas-nfs-enc1.yaml \
|
||||||
--namespace democratic-csi \
|
--namespace democratic-csi \
|
||||||
--create-namespace \
|
--create-namespace \
|
||||||
--set-file driver.config.sshConnection.privateKey=secrets/democratic_rsa \
|
|
||||||
--set driver.config.httpConnection.apiKey=$(cat secrets/truenas-api-key) \
|
--set driver.config.httpConnection.apiKey=$(cat secrets/truenas-api-key) \
|
||||||
zfs-nfs-enc1 democratic-csi/democratic-csi
|
zfs-nfs-enc1 democratic-csi/democratic-csi
|
||||||
```
|
```
|
||||||
@@ -537,6 +536,15 @@ they decide to change things. This is the first chart (besides ingress-nginx) wh
|
|||||||
we need to pay attention to the MetalLB annotation. This has been set in the values.yaml
|
we need to pay attention to the MetalLB annotation. This has been set in the values.yaml
|
||||||
file.
|
file.
|
||||||
|
|
||||||
|
#### Staging
|
||||||
|
|
||||||
|
There is a `gitea-staging.yaml` file with staging values. This should be installed in
|
||||||
|
the `gitea-staging` namespace. Follow the instructions below, but replace the `gitea`
|
||||||
|
namespace with `gitea-staging`. Staging is useful for testing major release upgrades,
|
||||||
|
especially since Gitea tends to change how `values.yaml` is structured.
|
||||||
|
|
||||||
|
#### Install
|
||||||
|
|
||||||
First we need to create the gitea admin secret
|
First we need to create the gitea admin secret
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -22,3 +22,7 @@ apt:
|
|||||||
hardware:
|
hardware:
|
||||||
hosts:
|
hosts:
|
||||||
gamebox:
|
gamebox:
|
||||||
|
|
||||||
|
hass:
|
||||||
|
hosts:
|
||||||
|
homeassistant:
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ services:
|
|||||||
- "0.0.0.0:8081:80/tcp"
|
- "0.0.0.0:8081:80/tcp"
|
||||||
environment:
|
environment:
|
||||||
TZ: "America/Chicago"
|
TZ: "America/Chicago"
|
||||||
WEBPASSWORD: "SET A PASSWORD HERE"
|
# WEBPASSWORD: "SET A PASSWORD HERE"
|
||||||
# Volumes store your data between container upgrades
|
# Volumes store your data between container upgrades
|
||||||
volumes:
|
volumes:
|
||||||
- pihole:/etc/pihole
|
- pihole:/etc/pihole
|
||||||
|
|||||||
69
generic-nfs-enc1.yaml
Normal file
69
generic-nfs-enc1.yaml
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
csiDriver:
|
||||||
|
name: "driveripper.zfs-gen-nfs-enc1"
|
||||||
|
|
||||||
|
storageClasses:
|
||||||
|
- name: zfs-gen-nfs-enc1
|
||||||
|
defaultClass: false
|
||||||
|
reclaimPolicy: Delete
|
||||||
|
volumeBindingMode: Immediate
|
||||||
|
allowVolumeExpansion: true
|
||||||
|
parameters:
|
||||||
|
fsType: nfs
|
||||||
|
mountOptions:
|
||||||
|
- async
|
||||||
|
- noatime
|
||||||
|
secrets:
|
||||||
|
provisioner-secret:
|
||||||
|
controller-publish-secret:
|
||||||
|
node-stage-secret:
|
||||||
|
node-publish-secret:
|
||||||
|
controller-expand-secret:
|
||||||
|
|
||||||
|
volumeSnapshotClasses: []
|
||||||
|
|
||||||
|
driver:
|
||||||
|
config:
|
||||||
|
driver: zfs-generic-nfs
|
||||||
|
sshConnection:
|
||||||
|
host: driveripper.reeseapps.com
|
||||||
|
port: 22
|
||||||
|
username: democratic
|
||||||
|
privateKey: ""
|
||||||
|
|
||||||
|
zfs:
|
||||||
|
cli:
|
||||||
|
sudoEnabled: true
|
||||||
|
paths:
|
||||||
|
zfs: /usr/sbin/zfs
|
||||||
|
zpool: /usr/sbin/zpool
|
||||||
|
sudo: /usr/bin/sudo
|
||||||
|
chroot: /usr/sbin/chroot
|
||||||
|
|
||||||
|
datasetProperties:
|
||||||
|
"org.freenas:description": "{{ parameters.[csi.storage.k8s.io/pvc/namespace] }}/{{ parameters.[csi.storage.k8s.io/pvc/name] }}"
|
||||||
|
|
||||||
|
datasetParentName: enc1/dcsi/nfs
|
||||||
|
# do NOT make datasetParentName and detachedSnapshotsDatasetParentName overlap
|
||||||
|
# they may be siblings, but neither should be nested in the other
|
||||||
|
# do NOT comment this option out even if you don't plan to use snapshots, just leave it with dummy value
|
||||||
|
detachedSnapshotsDatasetParentName: enc1/dcsi/snaps
|
||||||
|
|
||||||
|
datasetEnableQuotas: true
|
||||||
|
datasetEnableReservation: false
|
||||||
|
datasetPermissionsMode: "0777"
|
||||||
|
datasetPermissionsUser: 0
|
||||||
|
datasetPermissionsGroup: 0
|
||||||
|
# datasetPermissionsAcls:
|
||||||
|
# - "-m everyone@:full_set:allow"
|
||||||
|
#- "-m u:kube:full_set:allow"
|
||||||
|
|
||||||
|
nfs:
|
||||||
|
# https://docs.oracle.com/cd/E23824_01/html/821-1448/gayne.html
|
||||||
|
# https://www.hiroom2.com/2016/05/18/ubuntu-16-04-share-zfs-storage-via-nfs-smb/
|
||||||
|
shareStrategy: "setDatasetProperties"
|
||||||
|
shareStrategySetDatasetProperties:
|
||||||
|
properties:
|
||||||
|
#sharenfs: "rw,no_subtree_check,no_root_squash"
|
||||||
|
sharenfs: "on"
|
||||||
|
# share: ""
|
||||||
|
shareHost: "driveripper.reeselink.com"
|
||||||
75
gitea-staging-values.yaml
Normal file
75
gitea-staging-values.yaml
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
image:
|
||||||
|
tag: 1.21.4
|
||||||
|
|
||||||
|
ingress:
|
||||||
|
enabled: true
|
||||||
|
annotations:
|
||||||
|
cert-manager.io/cluster-issuer: letsencrypt
|
||||||
|
kubernetes.io/ingress.class: nginx
|
||||||
|
nginx.ingress.kubernetes.io/proxy-body-size: "0"
|
||||||
|
nginx.org/client-max-body-size: "0"
|
||||||
|
hosts:
|
||||||
|
- host: gitea-staging.reeseapps.com
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
tls:
|
||||||
|
- hosts:
|
||||||
|
- gitea-staging.reeseapps.com
|
||||||
|
secretName: gitea-staging-tls-cert
|
||||||
|
|
||||||
|
persistence:
|
||||||
|
enabled: true
|
||||||
|
create: true
|
||||||
|
storageClass: zfs-iscsi-enc0
|
||||||
|
claimName: data-gitea-staging-0
|
||||||
|
annotations:
|
||||||
|
"helm.sh/resource-policy": keep
|
||||||
|
|
||||||
|
global:
|
||||||
|
storageClass: zfs-iscsi-enc1
|
||||||
|
|
||||||
|
postgresql:
|
||||||
|
enabled: true
|
||||||
|
image:
|
||||||
|
tag: 15
|
||||||
|
primary:
|
||||||
|
persistence:
|
||||||
|
enabled: true
|
||||||
|
storageClass: zfs-iscsi-enc1
|
||||||
|
annotations:
|
||||||
|
"helm.sh/resource-policy": keep
|
||||||
|
|
||||||
|
postgresql-ha:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
gitea:
|
||||||
|
admin:
|
||||||
|
existingSecret: gitea-admin-secret
|
||||||
|
config:
|
||||||
|
service:
|
||||||
|
DISABLE_REGISTRATION: true
|
||||||
|
|
||||||
|
service:
|
||||||
|
ssh:
|
||||||
|
port: 22
|
||||||
|
type: ClusterIP
|
||||||
|
|
||||||
|
redis-cluster:
|
||||||
|
enabled: true
|
||||||
|
image:
|
||||||
|
tag: 7.2
|
||||||
|
|
||||||
|
deployment:
|
||||||
|
tolerations:
|
||||||
|
- key: "node.kubernetes.io/unreachable"
|
||||||
|
operator: "Exists"
|
||||||
|
effect: "NoExecute"
|
||||||
|
tolerationSeconds: 1
|
||||||
|
- key: "node.kubernetes.io/not-ready"
|
||||||
|
operator: "Exists"
|
||||||
|
effect: "NoExecute"
|
||||||
|
tolerationSeconds: 1
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
type: Recreate
|
||||||
@@ -1,3 +1,6 @@
|
|||||||
|
image:
|
||||||
|
tag: 1.21.4
|
||||||
|
|
||||||
ingress:
|
ingress:
|
||||||
enabled: true
|
enabled: true
|
||||||
annotations:
|
annotations:
|
||||||
@@ -28,6 +31,8 @@ global:
|
|||||||
|
|
||||||
postgresql:
|
postgresql:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
image:
|
||||||
|
tag: 15
|
||||||
primary:
|
primary:
|
||||||
persistence:
|
persistence:
|
||||||
enabled: true
|
enabled: true
|
||||||
@@ -56,6 +61,8 @@ service:
|
|||||||
|
|
||||||
redis-cluster:
|
redis-cluster:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
image:
|
||||||
|
tag: 7.2
|
||||||
|
|
||||||
deployment:
|
deployment:
|
||||||
tolerations:
|
tolerations:
|
||||||
|
|||||||
12
hass_trackers/README.md
Normal file
12
hass_trackers/README.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# HASS Trackers Ansible
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
You'll need to ssh into home assistant and run `apk add python3`
|
||||||
|
|
||||||
|
Check vars.yaml to edit your trackers.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ansible-playbook -i ansible/inventory.yaml hass_trackers/install_trackers.yaml
|
||||||
|
ansible-playbook -i ansible/inventory.yaml hass_trackers/create_dashboards.yaml
|
||||||
|
```
|
||||||
300
hass_trackers/dashboard.yaml
Normal file
300
hass_trackers/dashboard.yaml
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
type: grid
|
||||||
|
square: false
|
||||||
|
columns: 1
|
||||||
|
cards:
|
||||||
|
- type: markdown
|
||||||
|
content: >
|
||||||
|
{% set trackers = [
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_fridge_water_filter") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_fridge_water_filter", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_fridge_water_filter")) / state_attr("binary_sensor.threshold_for_days_since_fridge_water_filter", "upper")),
|
||||||
|
"name": "Fridge Water Filter Replaced"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_litter_boxes_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_litter_boxes_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_litter_boxes_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_litter_boxes_cleaned", "upper")),
|
||||||
|
"name": "Litter Boxes Cleaned"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_cat_water_fountain_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_cat_water_fountain_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_cat_water_fountain_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_cat_water_fountain_cleaned", "upper")),
|
||||||
|
"name": "Cat Water Fountain Cleaned"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_kitchen_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_kitchen_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_kitchen_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_kitchen_cleaned", "upper")),
|
||||||
|
"name": "Kitchen Cleaned"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_kitchen_counters_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_kitchen_counters_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_kitchen_counters_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_kitchen_counters_cleaned", "upper")),
|
||||||
|
"name": "Kitchen Counters Cleaned"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_living_room_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_living_room_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_living_room_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_living_room_cleaned", "upper")),
|
||||||
|
"name": "Living Room Cleaned"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_upstairs_vacuumed") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_upstairs_vacuumed", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_upstairs_vacuumed")) / state_attr("binary_sensor.threshold_for_days_since_upstairs_vacuumed", "upper")),
|
||||||
|
"name": "Upstairs Vacuumed"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_bathroom_counters_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_bathroom_counters_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_bathroom_counters_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_bathroom_counters_cleaned", "upper")),
|
||||||
|
"name": "Bathroom Counters Cleaned"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_bedroom_vacuumed") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_bedroom_vacuumed", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_bedroom_vacuumed")) / state_attr("binary_sensor.threshold_for_days_since_bedroom_vacuumed", "upper")),
|
||||||
|
"name": "Bedroom Vacuumed"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_toilets_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_toilets_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_toilets_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_toilets_cleaned", "upper")),
|
||||||
|
"name": "Toilets Cleaned"
|
||||||
|
},
|
||||||
|
]%}
|
||||||
|
|
||||||
|
|
||||||
|
{% for tracker in (trackers | sort(reverse=true, attribute='percent')) %}
|
||||||
|
{% set days_left = ((tracker.threshold - tracker.counter) | int | string) %}
|
||||||
|
{% set message = tracker.name + " completed " + tracker.counter | string + " days ago. Due again in " + days_left + " days." %}
|
||||||
|
{% if tracker.percent > 1 %}
|
||||||
|
<ha-alert alert-type="error">{{ message }}</ha-alert>
|
||||||
|
{% elif tracker.percent > 0.8 %}
|
||||||
|
<ha-alert alert-type="warning">{{ message }}</ha-alert>
|
||||||
|
{% else %}
|
||||||
|
<ha-alert alert-type="success">{{ message }}</ha-alert>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Fridge Water Filter Replaced
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_fridge_water_filter
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_fridge_water_filter
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_fridge_water_filter
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_fridge_water_filter
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Litter Boxes Cleaned
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_litter_boxes_cleaned
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_litter_boxes_cleaned
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_litter_boxes_cleaned
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_litter_boxes_cleaned
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Cat Water Fountain Cleaned
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_cat_water_fountain_cleaned
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_cat_water_fountain_cleaned
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_cat_water_fountain_cleaned
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_cat_water_fountain_cleaned
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Kitchen Cleaned
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_kitchen_cleaned
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_kitchen_cleaned
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_kitchen_cleaned
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_kitchen_cleaned
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Kitchen Counters Cleaned
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_kitchen_counters_cleaned
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_kitchen_counters_cleaned
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_kitchen_counters_cleaned
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_kitchen_counters_cleaned
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Living Room Cleaned
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_living_room_cleaned
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_living_room_cleaned
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_living_room_cleaned
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_living_room_cleaned
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Upstairs Vacuumed
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_upstairs_vacuumed
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_upstairs_vacuumed
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_upstairs_vacuumed
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_upstairs_vacuumed
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Bathroom Counters Cleaned
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_bathroom_counters_cleaned
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_bathroom_counters_cleaned
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_bathroom_counters_cleaned
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_bathroom_counters_cleaned
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Bedroom Vacuumed
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_bedroom_vacuumed
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_bedroom_vacuumed
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_bedroom_vacuumed
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_bedroom_vacuumed
|
||||||
|
name: Last Non-Zero
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since Toilets Cleaned
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_toilets_cleaned
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_toilets_cleaned
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_toilets_cleaned
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_toilets_cleaned
|
||||||
|
name: Last Non-Zero
|
||||||
@@ -1,167 +0,0 @@
|
|||||||
|
|
||||||
Counters:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
days_since_fridge_filter_replaced:
|
|
||||||
name: days_since_fridge_filter_replaced
|
|
||||||
restore: true
|
|
||||||
initial: 0
|
|
||||||
step: 1
|
|
||||||
icon: mdi:fridge
|
|
||||||
last_non_zero_days_since_fridge_filter_replaced:
|
|
||||||
name: last_non_zero_days_since_fridge_filter_replaced
|
|
||||||
restore: true
|
|
||||||
initial: 0
|
|
||||||
step: 1
|
|
||||||
icon: mdi:fridge
|
|
||||||
```
|
|
||||||
|
|
||||||
Input Buttons:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
reset_days_since_fridge_filter_replaced:
|
|
||||||
name: reset_days_since_fridge_filter_replaced
|
|
||||||
icon: mdi:fridge
|
|
||||||
undo_reset_days_since_fridge_filter_replaced:
|
|
||||||
name: undo_reset_days_since_fridge_filter_replaced
|
|
||||||
icon: mdi:undo
|
|
||||||
```
|
|
||||||
|
|
||||||
Automations:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- alias: 'Counter: Increment days_since_fridge_filter_replaced'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: time
|
|
||||||
at: 00:00:00
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_fridge_filter_replaced
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Reset days_since_fridge_filter_replaced'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- input_button.reset_days_since_fridge_filter_replaced
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.reset
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_fridge_filter_replaced
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Update last_non_zero_days_since_fridge_filter_replaced'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- counter.days_since_fridge_filter_replaced
|
|
||||||
condition:
|
|
||||||
- condition: numeric_state
|
|
||||||
entity_id: counter.days_since_fridge_filter_replaced
|
|
||||||
above: 0
|
|
||||||
action:
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_fridge_filter_replaced
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Reset last_non_zero_days_since_fridge_filter_replaced'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: numeric_state
|
|
||||||
entity_id:
|
|
||||||
- counter.days_since_fridge_filter_replaced
|
|
||||||
above: 0
|
|
||||||
below: 2
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.reset
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_fridge_filter_replaced
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_fridge_filter_replaced
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Undo Reset days_since_fridge_filter_replaced'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- input_button.undo_reset_days_since_fridge_filter_replaced
|
|
||||||
condition:
|
|
||||||
- condition: numeric_state
|
|
||||||
entity_id: counter.days_since_fridge_filter_replaced
|
|
||||||
below: 1
|
|
||||||
action:
|
|
||||||
- service: counter.set_value
|
|
||||||
metadata: {}
|
|
||||||
data:
|
|
||||||
value: "{{ int(states('counter.last_non_zero_days_since_fridge_filter_replaced'))}}"
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_fridge_filter_replaced
|
|
||||||
- service: counter.decrement
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_fridge_filter_replaced
|
|
||||||
mode: single
|
|
||||||
```
|
|
||||||
|
|
||||||
Dashboard:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
title: Days Since Fridge Filter Replaced
|
|
||||||
type: grid
|
|
||||||
square: false
|
|
||||||
columns: 1
|
|
||||||
cards:
|
|
||||||
- type: grid
|
|
||||||
square: false
|
|
||||||
columns: 2
|
|
||||||
cards:
|
|
||||||
- name: Reset
|
|
||||||
show_name: true
|
|
||||||
show_icon: true
|
|
||||||
type: button
|
|
||||||
tap_action:
|
|
||||||
action: toggle
|
|
||||||
entity: input_button.reset_days_since_fridge_filter_replaced
|
|
||||||
show_state: true
|
|
||||||
- name: Undo
|
|
||||||
show_name: true
|
|
||||||
show_icon: true
|
|
||||||
type: button
|
|
||||||
tap_action:
|
|
||||||
action: toggle
|
|
||||||
entity: input_button.undo_reset_days_since_fridge_filter_replaced
|
|
||||||
show_state: true
|
|
||||||
- type: grid
|
|
||||||
square: false
|
|
||||||
columns: 2
|
|
||||||
cards:
|
|
||||||
- type: entity
|
|
||||||
entity: counter.days_since_fridge_filter_replaced
|
|
||||||
state_color: false
|
|
||||||
name: Days Since
|
|
||||||
- type: entity
|
|
||||||
entity: counter.last_non_zero_days_since_fridge_filter_replaced
|
|
||||||
state_color: false
|
|
||||||
name: Last Value
|
|
||||||
- type: history-graph
|
|
||||||
hours_to_show: 72
|
|
||||||
entities:
|
|
||||||
- counter.days_since_fridge_filter_replaced
|
|
||||||
```
|
|
||||||
68
hass_trackers/install_trackers.yaml
Normal file
68
hass_trackers/install_trackers.yaml
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
- name: Install HASS Trackers
|
||||||
|
hosts: hass
|
||||||
|
vars_files:
|
||||||
|
- vars.yaml
|
||||||
|
tasks:
|
||||||
|
- name: Create counters dir
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: /root/config/counters
|
||||||
|
state: directory
|
||||||
|
mode: '0755'
|
||||||
|
- name: Create binary sensors dir
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: /root/config/binary_sensors
|
||||||
|
state: directory
|
||||||
|
mode: '0755'
|
||||||
|
- name: Create input buttons dir
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: /root/config/input_buttons
|
||||||
|
state: directory
|
||||||
|
mode: '0755'
|
||||||
|
- name: Create automations dir
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: /root/config/automations
|
||||||
|
state: directory
|
||||||
|
mode: '0755'
|
||||||
|
|
||||||
|
- name: Template all counters
|
||||||
|
template:
|
||||||
|
src: templates/counter.yaml
|
||||||
|
dest: /root/config/counters/{{ item.id }}.yaml
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
with_items: "{{ trackers }}"
|
||||||
|
- name: Template all thresholds
|
||||||
|
template:
|
||||||
|
src: templates/threshold.yaml
|
||||||
|
dest: /root/config/binary_sensors/{{ item.id }}.yaml
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
with_items: "{{ trackers }}"
|
||||||
|
- name: Template all input buttons
|
||||||
|
template:
|
||||||
|
src: templates/input_button.yaml
|
||||||
|
dest: /root/config/input_buttons/{{ item.id }}.yaml
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
with_items: "{{ trackers }}"
|
||||||
|
- name: Template all automations
|
||||||
|
template:
|
||||||
|
src: templates/automation.yaml
|
||||||
|
dest: /root/config/automations/{{ item.id }}.yaml
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
with_items: "{{ trackers }}"
|
||||||
|
|
||||||
|
- name: Install HASS Trackers
|
||||||
|
hosts: localhost
|
||||||
|
vars_files:
|
||||||
|
- vars.yaml
|
||||||
|
tasks:
|
||||||
|
- name: Template Dashboard
|
||||||
|
template:
|
||||||
|
src: templates/dashboard.yaml
|
||||||
|
dest: "dashboard.yaml"
|
||||||
@@ -1,167 +0,0 @@
|
|||||||
|
|
||||||
Counters:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
days_since_litter_box_changed:
|
|
||||||
name: days_since_litter_box_changed
|
|
||||||
restore: true
|
|
||||||
initial: 0
|
|
||||||
step: 1
|
|
||||||
icon: mdi:cat
|
|
||||||
last_non_zero_days_since_litter_box_changed:
|
|
||||||
name: last_non_zero_days_since_litter_box_changed
|
|
||||||
restore: true
|
|
||||||
initial: 0
|
|
||||||
step: 1
|
|
||||||
icon: mdi:undo
|
|
||||||
```
|
|
||||||
|
|
||||||
Input Buttons:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
reset_days_since_litter_box_changed:
|
|
||||||
name: reset_days_since_litter_box_changed
|
|
||||||
icon: mdi:cat
|
|
||||||
undo_reset_days_since_litter_box_changed:
|
|
||||||
name: undo_reset_days_since_litter_box_changed
|
|
||||||
icon: mdi:undo
|
|
||||||
```
|
|
||||||
|
|
||||||
Automations:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- alias: 'Counter: Increment days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: time
|
|
||||||
at: 00:00:00
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Reset days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- input_button.reset_days_since_litter_box_changed
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.reset
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Update last_non_zero_days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- counter.days_since_litter_box_changed
|
|
||||||
condition:
|
|
||||||
- condition: numeric_state
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
above: 0
|
|
||||||
action:
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Reset last_non_zero_days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: numeric_state
|
|
||||||
entity_id:
|
|
||||||
- counter.days_since_litter_box_changed
|
|
||||||
above: 0
|
|
||||||
below: 2
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.reset
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Undo Reset days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- input_button.undo_reset_days_since_litter_box_changed
|
|
||||||
condition:
|
|
||||||
- condition: numeric_state
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
below: 1
|
|
||||||
action:
|
|
||||||
- service: counter.set_value
|
|
||||||
metadata: {}
|
|
||||||
data:
|
|
||||||
value: "{{ int(states('counter.last_non_zero_days_since_litter_box_changed'))}}"
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
- service: counter.decrement
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
```
|
|
||||||
|
|
||||||
Dashboard:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
title: REPLACE ME
|
|
||||||
type: grid
|
|
||||||
square: false
|
|
||||||
columns: 1
|
|
||||||
cards:
|
|
||||||
- type: grid
|
|
||||||
square: false
|
|
||||||
columns: 2
|
|
||||||
cards:
|
|
||||||
- show_name: true
|
|
||||||
show_icon: true
|
|
||||||
type: button
|
|
||||||
tap_action:
|
|
||||||
action: toggle
|
|
||||||
entity: input_button.reset_days_since_litter_box_changed
|
|
||||||
show_state: true
|
|
||||||
name: Reset
|
|
||||||
- show_name: true
|
|
||||||
show_icon: true
|
|
||||||
type: button
|
|
||||||
tap_action:
|
|
||||||
action: toggle
|
|
||||||
entity: input_button.undo_reset_days_since_litter_box_changed
|
|
||||||
show_state: true
|
|
||||||
name: Undo
|
|
||||||
- type: grid
|
|
||||||
square: false
|
|
||||||
columns: 2
|
|
||||||
cards:
|
|
||||||
- type: entity
|
|
||||||
entity: counter.days_since_litter_box_changed
|
|
||||||
state_color: false
|
|
||||||
name: Days Since
|
|
||||||
- type: entity
|
|
||||||
entity: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
state_color: false
|
|
||||||
name: Last Value
|
|
||||||
- type: history-graph
|
|
||||||
hours_to_show: 72
|
|
||||||
entities:
|
|
||||||
- counter.days_since_litter_box_changed
|
|
||||||
```
|
|
||||||
39
hass_trackers/markdown_dashboard.yaml
Normal file
39
hass_trackers/markdown_dashboard.yaml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{% set trackers = [
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_fridge_water_filter") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_fridge_water_filter", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_fridge_water_filter")) / state_attr("binary_sensor.threshold_for_days_since_fridge_water_filter", "upper")),
|
||||||
|
"name": "Fridge Water Filter Replaced"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_litter_boxes_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_litter_boxes_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_litter_boxes_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_litter_boxes_cleaned", "upper")),
|
||||||
|
"name": "Litter Boxes Cleaned"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_cat_water_fountain_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_cat_water_fountain_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_cat_water_fountain_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_cat_water_fountain_cleaned", "upper")),
|
||||||
|
"name": "Cat Water Fountain Cleaned"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_kitchen_cleaned") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_kitchen_cleaned", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_kitchen_cleaned")) / state_attr("binary_sensor.threshold_for_days_since_kitchen_cleaned", "upper")),
|
||||||
|
"name": "Kitchen Cleaned"
|
||||||
|
},
|
||||||
|
]%}
|
||||||
|
|
||||||
|
|
||||||
|
{% for tracker in (trackers | sort(reverse=true, attribute='percent')) %}
|
||||||
|
{% set days_left = ((tracker.threshold - tracker.counter) | int | string) %}
|
||||||
|
{% set message = tracker.name + " completed " + tracker.counter | string + " days ago. Due again in " + days_left + " days." %}
|
||||||
|
{% if tracker.percent > 1 %}
|
||||||
|
<ha-alert alert-type="error">{{ message }}</ha-alert>
|
||||||
|
{% elif tracker.percent > 0.8 %}
|
||||||
|
<ha-alert alert-type="warning">{{ message }}</ha-alert>
|
||||||
|
{% else %}
|
||||||
|
<ha-alert alert-type="success">{{ message }}</ha-alert>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
88
hass_trackers/templates/automation.yaml
Normal file
88
hass_trackers/templates/automation.yaml
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
- alias: 'Counter: Increment {{ item.id }}'
|
||||||
|
description: ''
|
||||||
|
trigger:
|
||||||
|
- platform: time
|
||||||
|
at: 00:00:00
|
||||||
|
condition: []
|
||||||
|
action:
|
||||||
|
- service: counter.increment
|
||||||
|
metadata: {}
|
||||||
|
data: {}
|
||||||
|
target:
|
||||||
|
entity_id: counter.days_since_{{ item.id }}
|
||||||
|
mode: single
|
||||||
|
- alias: 'Counter: Reset {{ item.id }}'
|
||||||
|
description: ''
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id:
|
||||||
|
- input_button.reset_days_since_{{ item.id }}
|
||||||
|
condition: []
|
||||||
|
action:
|
||||||
|
- service: counter.reset
|
||||||
|
metadata: {}
|
||||||
|
data: {}
|
||||||
|
target:
|
||||||
|
entity_id: counter.days_since_{{ item.id }}
|
||||||
|
mode: single
|
||||||
|
- alias: 'Counter: Update last_non_zero_days_since_{{ item.id }}'
|
||||||
|
description: ''
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id:
|
||||||
|
- counter.days_since_{{ item.id }}
|
||||||
|
condition:
|
||||||
|
- condition: numeric_state
|
||||||
|
entity_id: counter.days_since_{{ item.id }}
|
||||||
|
above: 0
|
||||||
|
action:
|
||||||
|
- service: counter.increment
|
||||||
|
metadata: {}
|
||||||
|
data: {}
|
||||||
|
target:
|
||||||
|
entity_id: counter.last_non_zero_days_since_{{ item.id }}
|
||||||
|
mode: single
|
||||||
|
- alias: 'Counter: Reset last_non_zero_days_since_{{ item.id }}'
|
||||||
|
description: ''
|
||||||
|
trigger:
|
||||||
|
- platform: numeric_state
|
||||||
|
entity_id:
|
||||||
|
- counter.days_since_{{ item.id }}
|
||||||
|
above: 0
|
||||||
|
below: 2
|
||||||
|
condition: []
|
||||||
|
action:
|
||||||
|
- service: counter.reset
|
||||||
|
metadata: {}
|
||||||
|
data: {}
|
||||||
|
target:
|
||||||
|
entity_id: counter.last_non_zero_days_since_{{ item.id }}
|
||||||
|
- service: counter.increment
|
||||||
|
metadata: {}
|
||||||
|
data: {}
|
||||||
|
target:
|
||||||
|
entity_id: counter.last_non_zero_days_since_{{ item.id }}
|
||||||
|
mode: single
|
||||||
|
- alias: 'Counter: Undo Reset {{ item.id }}'
|
||||||
|
description: ''
|
||||||
|
trigger:
|
||||||
|
- platform: state
|
||||||
|
entity_id:
|
||||||
|
- input_button.undo_reset_days_since_{{ item.id }}
|
||||||
|
condition:
|
||||||
|
- condition: numeric_state
|
||||||
|
entity_id: counter.days_since_{{ item.id }}
|
||||||
|
below: 1
|
||||||
|
action:
|
||||||
|
- service: counter.set_value
|
||||||
|
metadata: {}
|
||||||
|
data:
|
||||||
|
value: "{{ '{{' }} int(states('counter.last_non_zero_days_since_{{ item.id }}')) {{ '}}' }}"
|
||||||
|
target:
|
||||||
|
entity_id: counter.days_since_{{ item.id }}
|
||||||
|
- service: counter.decrement
|
||||||
|
metadata: {}
|
||||||
|
data: {}
|
||||||
|
target:
|
||||||
|
entity_id: counter.last_non_zero_days_since_{{ item.id }}
|
||||||
|
mode: single
|
||||||
12
hass_trackers/templates/counter.yaml
Normal file
12
hass_trackers/templates/counter.yaml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
days_since_{{ item.id }}:
|
||||||
|
name: Days Since {{ item.name }}
|
||||||
|
restore: true
|
||||||
|
initial: 0
|
||||||
|
step: 1
|
||||||
|
icon: {{ item.icon }}
|
||||||
|
last_non_zero_days_since_{{ item.id }}:
|
||||||
|
name: Last Non-Zero Days Since {{ item.name }}
|
||||||
|
restore: true
|
||||||
|
initial: 0
|
||||||
|
step: 1
|
||||||
|
icon: mdi:undo
|
||||||
54
hass_trackers/templates/dashboard.yaml
Normal file
54
hass_trackers/templates/dashboard.yaml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
type: grid
|
||||||
|
square: false
|
||||||
|
columns: 1
|
||||||
|
cards:
|
||||||
|
- type: markdown
|
||||||
|
content: >
|
||||||
|
{{ '{%' }} set trackers = [
|
||||||
|
{% for item in trackers -%}
|
||||||
|
{
|
||||||
|
"counter": states("counter.days_since_{{ item.id }}") | int,
|
||||||
|
"threshold": state_attr("binary_sensor.threshold_for_days_since_{{ item.id }}", "upper"),
|
||||||
|
"percent": (int(states("counter.days_since_{{ item.id }}")) / state_attr("binary_sensor.threshold_for_days_since_{{ item.id }}", "upper")),
|
||||||
|
"name": "{{ item.name }}"
|
||||||
|
},
|
||||||
|
{% endfor -%}
|
||||||
|
]{{ '%} ' }}
|
||||||
|
|
||||||
|
{% raw %}
|
||||||
|
{% for tracker in (trackers | sort(reverse=true, attribute='percent')) %}
|
||||||
|
{% set days_left = ((tracker.threshold - tracker.counter) | int | string) %}
|
||||||
|
{% set message = tracker.name + " completed " + tracker.counter | string + " days ago. Due again in " + days_left + " days." %}
|
||||||
|
{% if tracker.percent > 1 %}
|
||||||
|
<ha-alert alert-type="error">{{ message }}</ha-alert>
|
||||||
|
{% elif tracker.percent > 0.8 %}
|
||||||
|
<ha-alert alert-type="warning">{{ message }}</ha-alert>
|
||||||
|
{% else %}
|
||||||
|
<ha-alert alert-type="success">{{ message }}</ha-alert>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{%- endraw %}
|
||||||
|
{% for item in trackers %}
|
||||||
|
|
||||||
|
- type: grid
|
||||||
|
square: false
|
||||||
|
columns: 2
|
||||||
|
title: Days Since {{ item.name }}
|
||||||
|
cards:
|
||||||
|
- type: button
|
||||||
|
name: Reset
|
||||||
|
entity: input_button.reset_days_since_{{ item.id }}
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: button
|
||||||
|
entity: input_button.undo_reset_days_since_{{ item.id }}
|
||||||
|
name: Undo
|
||||||
|
tap_action:
|
||||||
|
action: toggle
|
||||||
|
- type: entity
|
||||||
|
entity: counter.days_since_{{ item.id }}
|
||||||
|
name: Days Since
|
||||||
|
- type: entity
|
||||||
|
entity: counter.last_non_zero_days_since_{{ item.id }}
|
||||||
|
name: Last Non-Zero
|
||||||
|
{% endfor %}
|
||||||
6
hass_trackers/templates/input_button.yaml
Normal file
6
hass_trackers/templates/input_button.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
reset_days_since_{{ item.id }}:
|
||||||
|
name: Reset {{ item.name }}
|
||||||
|
icon: {{ item.icon }}
|
||||||
|
undo_reset_days_since_{{ item.id }}:
|
||||||
|
name: Undo Reset {{ item.name }}
|
||||||
|
icon: mdi:undo
|
||||||
4
hass_trackers/templates/threshold.yaml
Normal file
4
hass_trackers/templates/threshold.yaml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
- platform: threshold
|
||||||
|
entity_id: counter.days_since_{{ item.id }}
|
||||||
|
upper: {{ item.check_every }}
|
||||||
|
name: threshold_for_days_since_{{ item.id }}
|
||||||
41
hass_trackers/vars.yaml
Normal file
41
hass_trackers/vars.yaml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
trackers:
|
||||||
|
- id: fridge_water_filter
|
||||||
|
name: Fridge Water Filter Replaced
|
||||||
|
check_every: 180
|
||||||
|
icon: mdi:fridge
|
||||||
|
- id: litter_boxes_cleaned
|
||||||
|
name: Litter Boxes Cleaned
|
||||||
|
check_every: 15
|
||||||
|
icon: mdi:cat
|
||||||
|
- id: cat_water_fountain_cleaned
|
||||||
|
name: Cat Water Fountain Cleaned
|
||||||
|
check_every: 7
|
||||||
|
icon: mdi:fountain
|
||||||
|
- id: kitchen_cleaned
|
||||||
|
name: Kitchen Cleaned
|
||||||
|
check_every: 7
|
||||||
|
icon: mdi:fridge
|
||||||
|
- id: kitchen_counters_cleaned
|
||||||
|
name: Kitchen Counters Cleaned
|
||||||
|
check_every: 7
|
||||||
|
icon: mdi:countertop
|
||||||
|
- id: living_room_cleaned
|
||||||
|
name: Living Room Cleaned
|
||||||
|
check_every: 7
|
||||||
|
icon: mdi:sofa
|
||||||
|
- id: upstairs_vacuumed
|
||||||
|
name: Upstairs Vacuumed
|
||||||
|
check_every: 14
|
||||||
|
icon: mdi:vacuum
|
||||||
|
- id: bathroom_counters_cleaned
|
||||||
|
name: Bathroom Counters Cleaned
|
||||||
|
check_every: 14
|
||||||
|
icon: mdi:bathtub
|
||||||
|
- id: bedroom_vacuumed
|
||||||
|
name: Bedroom Vacuumed
|
||||||
|
check_every: 14
|
||||||
|
icon: mdi:vacuum
|
||||||
|
- id: toilets_cleaned
|
||||||
|
name: Toilets Cleaned
|
||||||
|
check_every: 14
|
||||||
|
icon: mdi:toilet
|
||||||
@@ -14,10 +14,10 @@ server_props: |
|
|||||||
broadcast-console-to-ops=true
|
broadcast-console-to-ops=true
|
||||||
enable-query=false
|
enable-query=false
|
||||||
player-idle-timeout=0
|
player-idle-timeout=0
|
||||||
difficulty=2
|
difficulty=3
|
||||||
spawn-monsters=true
|
spawn-monsters=true
|
||||||
op-permission-level=4
|
op-permission-level=4
|
||||||
pvp=true
|
pvp=false
|
||||||
snooper-enabled=true
|
snooper-enabled=true
|
||||||
level-type=amplified
|
level-type=amplified
|
||||||
hardcore=false
|
hardcore=false
|
||||||
@@ -41,5 +41,5 @@ server_props: |
|
|||||||
level-seed=
|
level-seed=
|
||||||
prevent-proxy-connections=false
|
prevent-proxy-connections=false
|
||||||
use-native-transport=true
|
use-native-transport=true
|
||||||
motd=Welcome to Nimcraft!
|
motd=Courniiiiiiieeeeeeeeee
|
||||||
enable-rcon=false
|
enable-rcon=false
|
||||||
|
|||||||
@@ -104,9 +104,7 @@ spec:
|
|||||||
claimName: {{ .Release.Name }}-postgres-iops
|
claimName: {{ .Release.Name }}-postgres-iops
|
||||||
- name: redis
|
- name: redis
|
||||||
emptyDir:
|
emptyDir:
|
||||||
sizeLimit: 2Gi
|
sizeLimit: 500Mi
|
||||||
# persistentVolumeClaim:
|
|
||||||
# claimName: {{ .Release.Name }}-redis-iops
|
|
||||||
- name: postgres-init
|
- name: postgres-init
|
||||||
secret:
|
secret:
|
||||||
secretName: {{ .Release.Name }}-postgres-init
|
secretName: {{ .Release.Name }}-postgres-init
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
nextcloud:
|
nextcloud:
|
||||||
image: nextcloud:27.1.2
|
image: nextcloud:28.0.1
|
||||||
domain: nextcloud.reeseapps.com
|
domain: nextcloud.reeseapps.com
|
||||||
html:
|
html:
|
||||||
storageClassName: zfs-iscsi-enc1
|
storageClassName: zfs-iscsi-enc1
|
||||||
@@ -16,6 +16,6 @@ postgres:
|
|||||||
password: ""
|
password: ""
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
storageClassName: zfs-iscsi-enc1
|
storageClassName: zfs-nfs-enc1
|
||||||
storage: 32Gi
|
storage: 32Gi
|
||||||
password: ""
|
password: ""
|
||||||
|
|||||||
@@ -7,6 +7,8 @@
|
|||||||
- [Philips Hue Lights](#philips-hue-lights)
|
- [Philips Hue Lights](#philips-hue-lights)
|
||||||
- [Shelly](#shelly)
|
- [Shelly](#shelly)
|
||||||
- [Trackers](#trackers)
|
- [Trackers](#trackers)
|
||||||
|
- [Looping Over Entities with Labels](#looping-over-entities-with-labels)
|
||||||
|
- [Get All Entity Attributes](#get-all-entity-attributes)
|
||||||
|
|
||||||
## Setup and Configuration
|
## Setup and Configuration
|
||||||
|
|
||||||
@@ -51,176 +53,31 @@ hue lights support color_temp in mireds, here are some mired-kelvin conversions:
|
|||||||
|
|
||||||
1. Outbound Websocket `wss://homeassistant.reeseapps.com/api/shelly/ws`
|
1. Outbound Websocket `wss://homeassistant.reeseapps.com/api/shelly/ws`
|
||||||
|
|
||||||
|
Shelly devices can act as "passive" or "active" bluetooth scanners. Both of these configurations
|
||||||
|
allow home assistant to proxy bluetooth connections through shelly devices, significantly extending
|
||||||
|
the range of your home assistant's bluetooth capabilities. Active scanning uses more power but
|
||||||
|
is quicker to pick up and transmit device information. Note that "gateway mode" is not required,
|
||||||
|
just enable bluetooth and rpc or select "active" from the configuration menu for the shelly
|
||||||
|
device.
|
||||||
|
|
||||||
### Trackers
|
### Trackers
|
||||||
|
|
||||||
Trackers allow for a simple "days since x" Setup with a reset and undo button.
|
See `hass_trackers/`
|
||||||
|
|
||||||
Copy each of these into a text editor and find/replace "days_since_litter_box_changed"
|
### Looping Over Entities with Labels
|
||||||
with the name of the entity you want to track
|
|
||||||
|
|
||||||
Counters:
|
<https://www.home-assistant.io/docs/configuration/templating/#labels>
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
days_since_litter_box_changed:
|
{% for item in label_entities("Battery Level") -%}
|
||||||
name: days_since_litter_box_changed
|
- {{ item }}
|
||||||
restore: true
|
{% endfor %}
|
||||||
initial: 0
|
|
||||||
step: 1
|
|
||||||
icon: mdi:cat
|
|
||||||
last_non_zero_days_since_litter_box_changed:
|
|
||||||
name: last_non_zero_days_since_litter_box_changed
|
|
||||||
restore: true
|
|
||||||
initial: 0
|
|
||||||
step: 1
|
|
||||||
icon: mdi:undo
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Input Buttons:
|
### Get All Entity Attributes
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
reset_days_since_litter_box_changed:
|
{% for item in label_entities("Battery Level") -%}
|
||||||
name: reset_days_since_litter_box_changed
|
- {{ states[item].attributes }}
|
||||||
icon: mdi:cat
|
{% endfor %}
|
||||||
undo_reset_days_since_litter_box_changed:
|
|
||||||
name: undo_reset_days_since_litter_box_changed
|
|
||||||
icon: mdi:undo
|
|
||||||
```
|
|
||||||
|
|
||||||
Automations:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- alias: 'Counter: Increment days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: time
|
|
||||||
at: 00:00:00
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Reset days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- input_button.reset_days_since_litter_box_changed
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.reset
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Update last_non_zero_days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- counter.days_since_litter_box_changed
|
|
||||||
condition:
|
|
||||||
- condition: numeric_state
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
above: 0
|
|
||||||
action:
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Reset last_non_zero_days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: numeric_state
|
|
||||||
entity_id:
|
|
||||||
- counter.days_since_litter_box_changed
|
|
||||||
above: 0
|
|
||||||
below: 2
|
|
||||||
condition: []
|
|
||||||
action:
|
|
||||||
- service: counter.reset
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
- service: counter.increment
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
- alias: 'Counter: Undo Reset days_since_litter_box_changed'
|
|
||||||
description: ''
|
|
||||||
trigger:
|
|
||||||
- platform: state
|
|
||||||
entity_id:
|
|
||||||
- input_button.undo_reset_days_since_litter_box_changed
|
|
||||||
condition:
|
|
||||||
- condition: numeric_state
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
below: 1
|
|
||||||
action:
|
|
||||||
- service: counter.set_value
|
|
||||||
metadata: {}
|
|
||||||
data:
|
|
||||||
value: "{{ int(states('counter.last_non_zero_days_since_litter_box_changed'))}}"
|
|
||||||
target:
|
|
||||||
entity_id: counter.days_since_litter_box_changed
|
|
||||||
- service: counter.decrement
|
|
||||||
metadata: {}
|
|
||||||
data: {}
|
|
||||||
target:
|
|
||||||
entity_id: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
mode: single
|
|
||||||
```
|
|
||||||
|
|
||||||
Dashboard:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
title: REPLACE ME
|
|
||||||
type: grid
|
|
||||||
square: false
|
|
||||||
columns: 1
|
|
||||||
cards:
|
|
||||||
- type: grid
|
|
||||||
square: false
|
|
||||||
columns: 2
|
|
||||||
cards:
|
|
||||||
- show_name: true
|
|
||||||
show_icon: true
|
|
||||||
type: button
|
|
||||||
tap_action:
|
|
||||||
action: toggle
|
|
||||||
entity: input_button.reset_days_since_litter_box_changed
|
|
||||||
show_state: true
|
|
||||||
name: Reset
|
|
||||||
- show_name: true
|
|
||||||
show_icon: true
|
|
||||||
type: button
|
|
||||||
tap_action:
|
|
||||||
action: toggle
|
|
||||||
entity: input_button.undo_reset_days_since_litter_box_changed
|
|
||||||
show_state: true
|
|
||||||
name: Undo
|
|
||||||
- type: grid
|
|
||||||
square: false
|
|
||||||
columns: 2
|
|
||||||
cards:
|
|
||||||
- type: entity
|
|
||||||
entity: counter.days_since_litter_box_changed
|
|
||||||
state_color: false
|
|
||||||
name: Days Since
|
|
||||||
- type: entity
|
|
||||||
entity: counter.last_non_zero_days_since_litter_box_changed
|
|
||||||
state_color: false
|
|
||||||
name: Last Value
|
|
||||||
- type: history-graph
|
|
||||||
hours_to_show: 72
|
|
||||||
entities:
|
|
||||||
- counter.days_since_litter_box_changed
|
|
||||||
```
|
```
|
||||||
|
|||||||
22
nextcloud/README.md
Normal file
22
nextcloud/README.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Nextcloud AIO
|
||||||
|
|
||||||
|
## Prereq
|
||||||
|
|
||||||
|
1. Have a reverse proxy pointing at your server
|
||||||
|
2. Have a valid certificate
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run \
|
||||||
|
--init \
|
||||||
|
--sig-proxy=false \
|
||||||
|
--name nextcloud-aio-mastercontainer \
|
||||||
|
--restart always \
|
||||||
|
--publish 8080:8080 \
|
||||||
|
--env APACHE_PORT=11000 \
|
||||||
|
--env APACHE_IP_BINDING=0.0.0.0 \
|
||||||
|
--volume nextcloud_aio_mastercontainer:/mnt/docker-aio-config \
|
||||||
|
--volume /var/run/docker.sock:/var/run/docker.sock:ro \
|
||||||
|
nextcloud/all-in-one:latest
|
||||||
|
```
|
||||||
@@ -5,8 +5,9 @@
|
|||||||
Check vars.yaml to edit your servers.
|
Check vars.yaml to edit your servers.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
ansible-playbook -i ansible/inventory.yaml nginx/nginx.yaml
|
# Run certbot first to ensure certs exist
|
||||||
ansible-playbook -i ansible/inventory.yaml nginx/certbot.yaml
|
ansible-playbook -i ansible/inventory.yaml nginx/certbot.yaml
|
||||||
|
ansible-playbook -i ansible/inventory.yaml nginx/nginx.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
## Restricted Addresses
|
## Restricted Addresses
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
- name: Generate placeholder letsencrypt certs for domains if needed
|
|
||||||
block:
|
|
||||||
- name: check if fullchain already exists
|
|
||||||
stat: path=/etc/letsencrypt/live/{{ item.external }}/fullchain.pem
|
|
||||||
register: p
|
|
||||||
- name: Generate self signed cert
|
|
||||||
shell: openssl req -x509 -newkey rsa:4096 -keyout /etc/letsencrypt/live/{{ item.external }}/privkey.pem -out /etc/letsencrypt/live/{{ item.external }}/fullchain.pem -sha256 -days 3650 -nodes -subj "/C=US/ST=Ohio/L=Columbus/O=ducoterra/OU=ducoterra/CN={{ item.external }}"
|
|
||||||
when: not p.stat.exists
|
|
||||||
@@ -16,9 +16,12 @@
|
|||||||
ansible.builtin.systemd_service:
|
ansible.builtin.systemd_service:
|
||||||
state: stopped
|
state: stopped
|
||||||
name: nginx
|
name: nginx
|
||||||
- name: Get certs for all terminate domains
|
- name: Get certs for all reeseapps domains
|
||||||
ansible.builtin.shell: /usr/bin/certbot certonly --standalone -d '{{ item.external_domain }}' -n
|
ansible.builtin.shell: /usr/bin/certbot certonly --standalone -d '{{ item.external.domain }}' -n
|
||||||
loop: "{{ terminate_ssl }}"
|
loop: "{{ reeseapps }}"
|
||||||
|
- name: Get certs for all reeseseal domains
|
||||||
|
ansible.builtin.shell: /usr/bin/certbot certonly --dns-route53 -d '{{ item.external.domain }}' -n
|
||||||
|
loop: "{{ reeseseal }}"
|
||||||
- name: Start nginx service
|
- name: Start nginx service
|
||||||
ansible.builtin.systemd_service:
|
ansible.builtin.systemd_service:
|
||||||
state: started
|
state: started
|
||||||
|
|||||||
@@ -6,20 +6,17 @@ map $http_upgrade $connection_upgrade {
|
|||||||
server {
|
server {
|
||||||
|
|
||||||
listen 127.0.0.1:443 ssl http2;
|
listen 127.0.0.1:443 ssl http2;
|
||||||
|
# listen 127.0.0.1:443 ssl; # for nginx v1.25.1+
|
||||||
|
|
||||||
server_name {{ item.external.domain }};
|
server_name {{ item.external.domain }};
|
||||||
|
|
||||||
access_log /var/log/nginx/{{ item.external.domain }}-access.log compression;
|
access_log /var/log/nginx/{{ item.external.domain }}-access.log compression;
|
||||||
|
|
||||||
# listen 443 ssl http2; # for nginx versions below v1.25.1
|
|
||||||
# listen [::]:443 ssl http2; # for nginx versions below v1.25.1 - comment to disable IPv6
|
|
||||||
|
|
||||||
# http2 on; # uncomment to enable HTTP/2 - supported on nginx v1.25.1+
|
# http2 on; # uncomment to enable HTTP/2 - supported on nginx v1.25.1+
|
||||||
# http3 on; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+
|
# http3 on; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+
|
||||||
# quic_retry on; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+
|
# quic_retry on; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+
|
||||||
# add_header Alt-Svc 'h3=":443"; ma=86400'; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+
|
# add_header Alt-Svc 'h3=":443"; ma=86400'; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+
|
||||||
# listen 8443 quic reuseport; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+ - please remove "reuseport" if there is already another quic listener on port 443 with enabled reuseport
|
# listen 443 quic reuseport; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+ - please remove "reuseport" if there is already another quic listener on port 443 with enabled reuseport
|
||||||
# listen [::]:8443 quic reuseport; # uncomment to enable HTTP/3 / QUIC - supported on nginx v1.25.0+ - please remove "reuseport" if there is already another quic listener on port 443 with enabled reuseport - keep comment to disable IPv6
|
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
resolver 1.1.1.1;
|
resolver 1.1.1.1;
|
||||||
@@ -53,4 +50,14 @@ server {
|
|||||||
ssl_protocols TLSv1.2 TLSv1.3;
|
ssl_protocols TLSv1.2 TLSv1.3;
|
||||||
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305;
|
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305;
|
||||||
ssl_prefer_server_ciphers on;
|
ssl_prefer_server_ciphers on;
|
||||||
|
|
||||||
|
# Optional settings:
|
||||||
|
|
||||||
|
# OCSP stapling
|
||||||
|
# ssl_stapling on;
|
||||||
|
# ssl_stapling_verify on;
|
||||||
|
# ssl_trusted_certificate /etc/letsencrypt/live/<your-nc-domain>/chain.pem;
|
||||||
|
|
||||||
|
# replace with the IP address of your resolver
|
||||||
|
# resolver 127.0.0.1; # needed for oscp stapling: e.g. use 94.140.15.15 for adguard / 1.1.1.1 for cloudflared or 8.8.8.8 for google - you can use the same nameserver as listed in your /etc/resolv.conf file
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,14 +5,14 @@ worker_processes 8;
|
|||||||
events {}
|
events {}
|
||||||
|
|
||||||
stream {
|
stream {
|
||||||
log_format basic '$remote_addr $restricted_domain [$time_local] '
|
log_format basic '$remote_addr $domain [$time_local] '
|
||||||
'$protocol $status $bytes_sent $bytes_received '
|
'$protocol $status $bytes_sent $bytes_received '
|
||||||
'$session_time';
|
'$session_time';
|
||||||
|
|
||||||
include /etc/nginx/stream.d/*.conf;
|
include /etc/nginx/stream.d/*.conf;
|
||||||
|
|
||||||
# Map all SSL parsed server names to hosts
|
# Map all SSL parsed server names to hosts
|
||||||
map $ssl_preread_server_name $unrestricted_domain {
|
map $ssl_preread_server_name $domain {
|
||||||
|
|
||||||
"" 127.0.0.1:443;
|
"" 127.0.0.1:443;
|
||||||
|
|
||||||
@@ -22,26 +22,11 @@ stream {
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
# For each domain we want to terminate, forward to internal http server
|
# For each domain we want to terminate, forward to internal http server
|
||||||
{% for domain in terminate_ssl %}
|
{% for domain in reeseapps %}
|
||||||
{% if not domain.restricted %}
|
|
||||||
{{ domain.external.domain }} 127.0.0.1:443;
|
{{ domain.external.domain }} 127.0.0.1:443;
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
default {{ nginx.defaults.domain }}:443;
|
{% for domain in reeseseal %}
|
||||||
}
|
|
||||||
|
|
||||||
map $ssl_preread_server_name $restricted_domain {
|
|
||||||
|
|
||||||
"" 127.0.0.1:443;
|
|
||||||
|
|
||||||
# For each domain we need to stream to a remote server, forward to internal domain
|
|
||||||
{% for domain in stream_ssl %}
|
|
||||||
{{ domain.external.domain }} {{ domain.internal.domain }}:{{ domain.internal.port }};
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
# For each domain we want to terminate, forward to internal http server
|
|
||||||
{% for domain in terminate_ssl %}
|
|
||||||
{{ domain.external.domain }} 127.0.0.1:443;
|
{{ domain.external.domain }} 127.0.0.1:443;
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
@@ -53,17 +38,7 @@ stream {
|
|||||||
access_log /var/log/nginx/stream-access-443.log basic;
|
access_log /var/log/nginx/stream-access-443.log basic;
|
||||||
listen {{ ansible_default_ipv4.address }}:443;
|
listen {{ ansible_default_ipv4.address }}:443;
|
||||||
resolver 1.1.1.1;
|
resolver 1.1.1.1;
|
||||||
proxy_pass $restricted_domain;
|
proxy_pass $domain;
|
||||||
ssl_preread on;
|
|
||||||
proxy_socket_keepalive on;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Forward 444 (restricted) traffic
|
|
||||||
server {
|
|
||||||
access_log /var/log/nginx/stream-access-444.log basic;
|
|
||||||
listen {{ ansible_default_ipv4.address }}:444;
|
|
||||||
resolver 1.1.1.1;
|
|
||||||
proxy_pass $unrestricted_domain;
|
|
||||||
ssl_preread on;
|
ssl_preread on;
|
||||||
proxy_socket_keepalive on;
|
proxy_socket_keepalive on;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -55,14 +55,22 @@
|
|||||||
mode: '0644'
|
mode: '0644'
|
||||||
with_fileglob:
|
with_fileglob:
|
||||||
- http.d/*
|
- http.d/*
|
||||||
- name: Template all http configurations
|
- name: Template all reeseapps http configurations
|
||||||
template:
|
template:
|
||||||
src: https.conf
|
src: https.conf
|
||||||
dest: /etc/nginx/http.d/{{ item.external.domain }}.conf
|
dest: /etc/nginx/http.d/{{ item.external.domain }}.conf
|
||||||
owner: root
|
owner: root
|
||||||
group: root
|
group: root
|
||||||
mode: '0644'
|
mode: '0644'
|
||||||
with_items: "{{ terminate_ssl }}"
|
with_items: "{{ reeseapps }}"
|
||||||
|
- name: Template all reeseseal http configurations
|
||||||
|
template:
|
||||||
|
src: https.conf
|
||||||
|
dest: /etc/nginx/http.d/{{ item.external.domain }}.conf
|
||||||
|
owner: root
|
||||||
|
group: root
|
||||||
|
mode: '0644'
|
||||||
|
with_items: "{{ reeseseal }}"
|
||||||
- name: Reload nginx service
|
- name: Reload nginx service
|
||||||
ansible.builtin.systemd_service:
|
ansible.builtin.systemd_service:
|
||||||
state: restarted
|
state: restarted
|
||||||
|
|||||||
196
nginx/vars.yaml
196
nginx/vars.yaml
@@ -1,89 +1,3 @@
|
|||||||
terminate_ssl:
|
|
||||||
- external:
|
|
||||||
domain: octoprint.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: replicator.reeselink.com
|
|
||||||
port: 443
|
|
||||||
protocol: https
|
|
||||||
restricted: true
|
|
||||||
- external:
|
|
||||||
domain: truenas.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: driveripper.reeselink.com
|
|
||||||
port: 8443
|
|
||||||
protocol: https
|
|
||||||
restricted: false
|
|
||||||
- external:
|
|
||||||
domain: pihole-yellow.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: yellow.reeselink.com
|
|
||||||
port: 8081
|
|
||||||
protocol: http
|
|
||||||
restricted: true
|
|
||||||
- external:
|
|
||||||
domain: pihole-orange.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: orange.reeselink.com
|
|
||||||
port: 8081
|
|
||||||
protocol: http
|
|
||||||
restricted: true
|
|
||||||
- external:
|
|
||||||
domain: yellow.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: yellow.reeselink.com
|
|
||||||
port: 9090
|
|
||||||
protocol: https
|
|
||||||
restricted: true
|
|
||||||
- external:
|
|
||||||
domain: orange.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: orange.reeselink.com
|
|
||||||
port: 9090
|
|
||||||
protocol: https
|
|
||||||
restricted: true
|
|
||||||
- external:
|
|
||||||
domain: node1.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: node1.reeselink.com
|
|
||||||
port: 9090
|
|
||||||
protocol: https
|
|
||||||
restricted: true
|
|
||||||
- external:
|
|
||||||
domain: node2.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: node2.reeselink.com
|
|
||||||
port: 9090
|
|
||||||
protocol: https
|
|
||||||
restricted: true
|
|
||||||
- external:
|
|
||||||
domain: node3.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: node3.reeselink.com
|
|
||||||
port: 9090
|
|
||||||
protocol: https
|
|
||||||
restricted: true
|
|
||||||
stream_ssl:
|
|
||||||
- external:
|
|
||||||
domain: nextcloud-aio.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: nextcloud-aio.reeselink.com
|
|
||||||
port: 443
|
|
||||||
- external:
|
|
||||||
domain: containers.reeseapps.com
|
|
||||||
port: 443
|
|
||||||
internal:
|
|
||||||
domain: node1.reeselink.com
|
|
||||||
port: 6443
|
|
||||||
nextcloud:
|
nextcloud:
|
||||||
domain: nextcloud-aio.reeseapps.com
|
domain: nextcloud-aio.reeseapps.com
|
||||||
nginx:
|
nginx:
|
||||||
@@ -94,3 +8,113 @@ iperf:
|
|||||||
unifi_external:
|
unifi_external:
|
||||||
domain: unifi-server1.reeselink.com
|
domain: unifi-server1.reeselink.com
|
||||||
internal_ip: 10.1.0.0/16
|
internal_ip: 10.1.0.0/16
|
||||||
|
|
||||||
|
reeseapps:
|
||||||
|
- external:
|
||||||
|
domain: truenas.reeseapps.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: driveripper.reeselink.com
|
||||||
|
port: 8443
|
||||||
|
protocol: https
|
||||||
|
- external:
|
||||||
|
domain: nextcloud-aio.reeseapps.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: nextcloud-aio.reeselink.com
|
||||||
|
port: 11000
|
||||||
|
protocol: http
|
||||||
|
- external:
|
||||||
|
domain: homeassistant.reeseapps.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: homeassistant.reeselink.com
|
||||||
|
port: 8123
|
||||||
|
protocol: https
|
||||||
|
|
||||||
|
reeseseal:
|
||||||
|
- external:
|
||||||
|
domain: cr10se.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: cr10se.reeselink.com
|
||||||
|
port: 80
|
||||||
|
protocol: http
|
||||||
|
- external:
|
||||||
|
domain: hue.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: nginx.reeselink.com
|
||||||
|
port: 80
|
||||||
|
protocol: http
|
||||||
|
- external:
|
||||||
|
domain: nextcloud-aio.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: nextcloud-aio.reeselink.com
|
||||||
|
port: 11000
|
||||||
|
protocol: http
|
||||||
|
- external:
|
||||||
|
domain: octoprint.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: replicator.reeselink.com
|
||||||
|
port: 443
|
||||||
|
protocol: https
|
||||||
|
- external:
|
||||||
|
domain: pihole-yellow.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: yellow.reeselink.com
|
||||||
|
port: 8081
|
||||||
|
protocol: http
|
||||||
|
- external:
|
||||||
|
domain: pihole-orange.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: orange.reeselink.com
|
||||||
|
port: 8081
|
||||||
|
protocol: http
|
||||||
|
- external:
|
||||||
|
domain: yellow.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: yellow.reeselink.com
|
||||||
|
port: 9090
|
||||||
|
protocol: https
|
||||||
|
- external:
|
||||||
|
domain: orange.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: orange.reeselink.com
|
||||||
|
port: 9090
|
||||||
|
protocol: https
|
||||||
|
- external:
|
||||||
|
domain: node1.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: node1.reeselink.com
|
||||||
|
port: 9090
|
||||||
|
protocol: https
|
||||||
|
- external:
|
||||||
|
domain: node2.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: node2.reeselink.com
|
||||||
|
port: 9090
|
||||||
|
protocol: https
|
||||||
|
- external:
|
||||||
|
domain: node3.reeseseal.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: node3.reeselink.com
|
||||||
|
port: 9090
|
||||||
|
protocol: https
|
||||||
|
|
||||||
|
stream_ssl:
|
||||||
|
- external:
|
||||||
|
domain: containers.reeseapps.com
|
||||||
|
port: 443
|
||||||
|
internal:
|
||||||
|
domain: node1.reeselink.com
|
||||||
|
port: 6443
|
||||||
|
|||||||
59
podman.md
59
podman.md
@@ -1,5 +1,21 @@
|
|||||||
# Podman
|
# Podman
|
||||||
|
|
||||||
|
- [Podman](#podman)
|
||||||
|
- [Notes](#notes)
|
||||||
|
- [Podman systemd files](#podman-systemd-files)
|
||||||
|
- [iperf3](#iperf3)
|
||||||
|
- [pihole](#pihole)
|
||||||
|
- [Grafana](#grafana)
|
||||||
|
- [Nginx](#nginx)
|
||||||
|
- [Nginx Build](#nginx-build)
|
||||||
|
- [Nginx Run](#nginx-run)
|
||||||
|
- [Quadlet Generation](#quadlet-generation)
|
||||||
|
- [Update yellow/orange](#update-yelloworange)
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- podman auth is stored in `/run/user/1000/containers`
|
||||||
|
|
||||||
## Podman systemd files
|
## Podman systemd files
|
||||||
|
|
||||||
Rather than copying compose files or running podman run as systemd services you can
|
Rather than copying compose files or running podman run as systemd services you can
|
||||||
@@ -10,7 +26,7 @@ Podlet generates quadlets - systemd files specifically for containers.
|
|||||||
You generate quadlets from compose files like so:
|
You generate quadlets from compose files like so:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run \
|
podman run \
|
||||||
-v ./compose:/compose \
|
-v ./compose:/compose \
|
||||||
-v ./quadlets:/quadlets \
|
-v ./quadlets:/quadlets \
|
||||||
quay.io/k9withabone/podlet \
|
quay.io/k9withabone/podlet \
|
||||||
@@ -62,3 +78,44 @@ podman run \
|
|||||||
--overwrite \
|
--overwrite \
|
||||||
compose /compose/grafana-compose.yaml
|
compose /compose/grafana-compose.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Nginx
|
||||||
|
|
||||||
|
nginx proxies all other services.
|
||||||
|
|
||||||
|
#### Nginx Build
|
||||||
|
|
||||||
|
```bash
|
||||||
|
podman build -f nginx-stream/Containerfile -t docker.io/ducoterra/nginx-stream:latest
|
||||||
|
podman build -f dns/Containerfile -t docker.io/ducoterra/nginx-stream-dns:latest
|
||||||
|
|
||||||
|
podman push docker.io/ducoterra/nginx-stream:latest
|
||||||
|
podman push docker.io/ducoterra/nginx-stream-dns:latest
|
||||||
|
|
||||||
|
podman-compose -f compose/nginx-compose.yaml up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Nginx Run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
podman-compose -f compose/nginx-compose.yaml up
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Quadlet Generation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
podman run \
|
||||||
|
-v ./compose:/compose \
|
||||||
|
-v ./quadlets:/quadlets \
|
||||||
|
quay.io/k9withabone/podlet \
|
||||||
|
-f /quadlets \
|
||||||
|
-i \
|
||||||
|
--overwrite \
|
||||||
|
compose /compose/nginx-stream-compose.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Update yellow/orange
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ansible-playbook -i ansible/inventory.yaml ansible/update-quadlets.yaml
|
||||||
|
```
|
||||||
|
|||||||
@@ -24,39 +24,15 @@ volumeSnapshotClasses: []
|
|||||||
|
|
||||||
driver:
|
driver:
|
||||||
config:
|
config:
|
||||||
driver: freenas-iscsi
|
driver: freenas-api-iscsi
|
||||||
instance_id:
|
instance_id:
|
||||||
httpConnection:
|
httpConnection:
|
||||||
protocol: https
|
protocol: https
|
||||||
host: driveripper.reeseapps.com
|
host: driveripper.reeselink.com
|
||||||
port: 8443
|
port: 8443
|
||||||
allowInsecure: false
|
|
||||||
# use only 1 of apiKey or username/password
|
|
||||||
# if both are present, apiKey is preferred
|
|
||||||
# apiKey is only available starting in TrueNAS-12
|
|
||||||
apiKey: ""
|
apiKey: ""
|
||||||
# username:
|
allowInsecure: true
|
||||||
# password:
|
|
||||||
# use apiVersion 2 for TrueNAS-12 and up (will work on 11.x in some scenarios as well)
|
|
||||||
# leave unset for auto-detection
|
|
||||||
apiVersion: 2
|
|
||||||
sshConnection:
|
|
||||||
host: democratic-csi-server.reeselink.com
|
|
||||||
port: 22
|
|
||||||
username: democratic
|
|
||||||
# use either password or key
|
|
||||||
# password: ""
|
|
||||||
privateKey: ""
|
|
||||||
zfs:
|
zfs:
|
||||||
cli:
|
|
||||||
sudoEnabled: true
|
|
||||||
paths:
|
|
||||||
zfs: /usr/sbin/zfs
|
|
||||||
zpool: /usr/sbin/zpool
|
|
||||||
sudo: /usr/bin/sudo
|
|
||||||
chroot: /usr/sbin/chroot
|
|
||||||
# can be used to set arbitrary values on the dataset/zvol
|
|
||||||
# can use handlebars templates with the parameters from the storage class/CO
|
|
||||||
datasetProperties:
|
datasetProperties:
|
||||||
"org.freenas:description": "{{ parameters.[csi.storage.k8s.io/pvc/namespace] }}/{{ parameters.[csi.storage.k8s.io/pvc/name] }}"
|
"org.freenas:description": "{{ parameters.[csi.storage.k8s.io/pvc/namespace] }}/{{ parameters.[csi.storage.k8s.io/pvc/name] }}"
|
||||||
|
|
||||||
|
|||||||
@@ -24,39 +24,15 @@ volumeSnapshotClasses: []
|
|||||||
|
|
||||||
driver:
|
driver:
|
||||||
config:
|
config:
|
||||||
driver: freenas-iscsi
|
driver: freenas-api-iscsi
|
||||||
instance_id:
|
instance_id:
|
||||||
httpConnection:
|
httpConnection:
|
||||||
protocol: https
|
protocol: https
|
||||||
host: driveripper.reeseapps.com
|
host: driveripper.reeselink.com
|
||||||
port: 8443
|
port: 8443
|
||||||
allowInsecure: false
|
|
||||||
# use only 1 of apiKey or username/password
|
|
||||||
# if both are present, apiKey is preferred
|
|
||||||
# apiKey is only available starting in TrueNAS-12
|
|
||||||
apiKey: ""
|
apiKey: ""
|
||||||
# username:
|
allowInsecure: true
|
||||||
# password:
|
|
||||||
# use apiVersion 2 for TrueNAS-12 and up (will work on 11.x in some scenarios as well)
|
|
||||||
# leave unset for auto-detection
|
|
||||||
apiVersion: 2
|
|
||||||
sshConnection:
|
|
||||||
host: democratic-csi-server.reeselink.com
|
|
||||||
port: 22
|
|
||||||
username: democratic
|
|
||||||
# use either password or key
|
|
||||||
# password: ""
|
|
||||||
privateKey: ""
|
|
||||||
zfs:
|
zfs:
|
||||||
cli:
|
|
||||||
sudoEnabled: true
|
|
||||||
paths:
|
|
||||||
zfs: /usr/sbin/zfs
|
|
||||||
zpool: /usr/sbin/zpool
|
|
||||||
sudo: /usr/bin/sudo
|
|
||||||
chroot: /usr/sbin/chroot
|
|
||||||
# can be used to set arbitrary values on the dataset/zvol
|
|
||||||
# can use handlebars templates with the parameters from the storage class/CO
|
|
||||||
datasetProperties:
|
datasetProperties:
|
||||||
"org.freenas:description": "{{ parameters.[csi.storage.k8s.io/pvc/namespace] }}/{{ parameters.[csi.storage.k8s.io/pvc/name] }}"
|
"org.freenas:description": "{{ parameters.[csi.storage.k8s.io/pvc/namespace] }}/{{ parameters.[csi.storage.k8s.io/pvc/name] }}"
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
csiDriver:
|
csiDriver:
|
||||||
name: "driveripper.zfs-nfs-enc1"
|
name: "driveripper.zfs-nfs-enc1"
|
||||||
|
|
||||||
storageClasses:
|
storageClasses:
|
||||||
- name: zfs-nfs-enc1
|
- name: zfs-nfs-enc1
|
||||||
defaultClass: false
|
defaultClass: false
|
||||||
@@ -18,44 +17,18 @@ storageClasses:
|
|||||||
node-stage-secret:
|
node-stage-secret:
|
||||||
node-publish-secret:
|
node-publish-secret:
|
||||||
controller-expand-secret:
|
controller-expand-secret:
|
||||||
|
|
||||||
volumeSnapshotClasses: []
|
volumeSnapshotClasses: []
|
||||||
|
|
||||||
driver:
|
driver:
|
||||||
config:
|
config:
|
||||||
driver: freenas-nfs
|
driver: freenas-api-nfs
|
||||||
instance_id:
|
instance_id:
|
||||||
httpConnection:
|
httpConnection:
|
||||||
protocol: https
|
protocol: https
|
||||||
host: driveripper.reeseapps.com
|
host: driveripper.reeselink.com
|
||||||
port: 8443
|
port: 8443
|
||||||
allowInsecure: false
|
|
||||||
# use only 1 of apiKey or username/password
|
|
||||||
# if both are present, apiKey is preferred
|
|
||||||
# apiKey is only available starting in TrueNAS-12
|
|
||||||
apiKey: ""
|
apiKey: ""
|
||||||
# username:
|
allowInsecure: true
|
||||||
# password:
|
|
||||||
# use apiVersion 2 for TrueNAS-12 and up (will work on 11.x in some scenarios as well)
|
|
||||||
# leave unset for auto-detection
|
|
||||||
apiVersion: 2
|
|
||||||
sshConnection:
|
|
||||||
host: democratic-csi-server.reeselink.com
|
|
||||||
port: 22
|
|
||||||
username: democratic
|
|
||||||
# use either password or key
|
|
||||||
# password: ""
|
|
||||||
privateKey: ""
|
|
||||||
zfs:
|
zfs:
|
||||||
cli:
|
|
||||||
sudoEnabled: true
|
|
||||||
paths:
|
|
||||||
zfs: /usr/sbin/zfs
|
|
||||||
zpool: /usr/sbin/zpool
|
|
||||||
sudo: /usr/bin/sudo
|
|
||||||
chroot: /usr/sbin/chroot
|
|
||||||
# can be used to set arbitrary values on the dataset/zvol
|
|
||||||
# can use handlebars templates with the parameters from the storage class/CO
|
|
||||||
datasetProperties:
|
datasetProperties:
|
||||||
"org.freenas:description": "{{ parameters.[csi.storage.k8s.io/pvc/namespace] }}/{{ parameters.[csi.storage.k8s.io/pvc/name] }}"
|
"org.freenas:description": "{{ parameters.[csi.storage.k8s.io/pvc/namespace] }}/{{ parameters.[csi.storage.k8s.io/pvc/name] }}"
|
||||||
|
|
||||||
@@ -69,12 +42,11 @@ driver:
|
|||||||
datasetPermissionsUser: 0
|
datasetPermissionsUser: 0
|
||||||
datasetPermissionsGroup: 0
|
datasetPermissionsGroup: 0
|
||||||
nfs:
|
nfs:
|
||||||
shareCommentTemplate: "{{ parameters.[csi.storage.k8s.io/pvc/namespace] }}-{{ parameters.[csi.storage.k8s.io/pvc/name] }}"
|
|
||||||
shareHost: democratic-csi-server.reeselink.com
|
shareHost: democratic-csi-server.reeselink.com
|
||||||
shareAlldirs: false
|
shareAlldirs: false
|
||||||
shareAllowedHosts: []
|
shareAllowedHosts: []
|
||||||
shareAllowedNetworks: []
|
shareAllowedNetworks: []
|
||||||
shareMaprootUser: root
|
shareMaprootUser: root
|
||||||
shareMaprootGroup: wheel
|
shareMaprootGroup: root
|
||||||
shareMapallUser: ""
|
shareMapallUser: ""
|
||||||
shareMapallGroup: ""
|
shareMapallGroup: ""
|
||||||
|
|||||||
Reference in New Issue
Block a user